podman

Форк
0
/
assembler_regabi_amd64.go 
1175 строк · 45.3 Кб
1
//go:build go1.17 && !go1.22
2
// +build go1.17,!go1.22
3

4
/*
5
 * Copyright 2021 ByteDance Inc.
6
 *
7
 * Licensed under the Apache License, Version 2.0 (the "License");
8
 * you may not use this file except in compliance with the License.
9
 * You may obtain a copy of the License at
10
 *
11
 *     http://www.apache.org/licenses/LICENSE-2.0
12
 *
13
 * Unless required by applicable law or agreed to in writing, software
14
 * distributed under the License is distributed on an "AS IS" BASIS,
15
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16
 * See the License for the specific language governing permissions and
17
 * limitations under the License.
18
 */
19

20
package encoder
21

22
import (
23
    `fmt`
24
    `reflect`
25
    `strconv`
26
    `unsafe`
27

28
    `github.com/bytedance/sonic/internal/cpu`
29
    `github.com/bytedance/sonic/internal/jit`
30
    `github.com/bytedance/sonic/internal/native/types`
31
    `github.com/twitchyliquid64/golang-asm/obj`
32
    `github.com/twitchyliquid64/golang-asm/obj/x86`
33

34
    `github.com/bytedance/sonic/internal/native`
35
    `github.com/bytedance/sonic/internal/rt`
36
)
37

38
/** Register Allocations
39
 *
40
 *  State Registers:
41
 *
42
 *      %rbx : stack base
43
 *      %rdi : result pointer
44
 *      %rsi : result length
45
 *      %rdx : result capacity
46
 *      %r12 : sp->p
47
 *      %r13 : sp->q
48
 *      %r14 : sp->x
49
 *      %r15 : sp->f
50
 *
51
 *  Error Registers:
52
 *
53
 *      %r10 : error type register
54
 *      %r11 : error pointer register
55
 */
56

57
/** Function Prototype & Stack Map
58
 *
59
 *  func (buf *[]byte, p unsafe.Pointer, sb *_Stack, fv uint64) (err error)
60
 *
61
 *  buf    :   (FP)
62
 *  p      :  8(FP)
63
 *  sb     : 16(FP)
64
 *  fv     : 24(FP)
65
 *  err.vt : 32(FP)
66
 *  err.vp : 40(FP)
67
 */
68

69
const (
70
    _S_cond = iota
71
    _S_init
72
)
73

74
const (
75
    _FP_args   = 32     // 32 bytes for spill registers of arguments
76
    _FP_fargs  = 40     // 40 bytes for passing arguments to other Go functions
77
    _FP_saves  = 64     // 64 bytes for saving the registers before CALL instructions
78
    _FP_locals = 24     // 24 bytes for local variables
79
)
80

81
const (
82
    _FP_loffs = _FP_fargs + _FP_saves
83
    _FP_offs  = _FP_loffs + _FP_locals
84
    // _FP_offs  = _FP_loffs + _FP_locals + _FP_debug
85
    _FP_size  = _FP_offs + 8     // 8 bytes for the parent frame pointer
86
    _FP_base  = _FP_size + 8     // 8 bytes for the return address
87
)
88

89
const (
90
    _FM_exp32 = 0x7f800000
91
    _FM_exp64 = 0x7ff0000000000000
92
)
93

94
const (
95
    _IM_null   = 0x6c6c756e           // 'null'
96
    _IM_true   = 0x65757274           // 'true'
97
    _IM_fals   = 0x736c6166           // 'fals' ('false' without the 'e')
98
    _IM_open   = 0x00225c22           // '"\"∅'
99
    _IM_array  = 0x5d5b               // '[]'
100
    _IM_object = 0x7d7b               // '{}'
101
    _IM_mulv   = -0x5555555555555555
102
)
103

104
const (
105
    _LB_more_space        = "_more_space"
106
    _LB_more_space_return = "_more_space_return_"
107
)
108

109
const (
110
    _LB_error                 = "_error"
111
    _LB_error_too_deep        = "_error_too_deep"
112
    _LB_error_invalid_number  = "_error_invalid_number"
113
    _LB_error_nan_or_infinite = "_error_nan_or_infinite"
114
    _LB_panic = "_panic"
115
)
116

117
var (
118
    _AX = jit.Reg("AX")
119
    _BX = jit.Reg("BX")
120
    _CX = jit.Reg("CX")
121
    _DX = jit.Reg("DX")
122
    _DI = jit.Reg("DI")
123
    _SI = jit.Reg("SI")
124
    _BP = jit.Reg("BP")
125
    _SP = jit.Reg("SP")
126
    _R8 = jit.Reg("R8")
127
    _R9 = jit.Reg("R9")
128
)
129

130
var (
131
    _X0 = jit.Reg("X0")
132
    _Y0 = jit.Reg("Y0")
133
)
134

135
var (
136
    _ST = jit.Reg("R15")     // can't use R14 since it's always scratched by Go...
137
    _RP = jit.Reg("DI")
138
    _RL = jit.Reg("SI")
139
    _RC = jit.Reg("DX")
140
)
141

142
var (
143
    _LR = jit.Reg("R9")
144
    _ET = jit.Reg("AX")
145
    _EP = jit.Reg("BX")
146
)
147

148
var (
149
    _SP_p = jit.Reg("R10") // saved on BX when call_c
150
    _SP_q = jit.Reg("R11") // saved on BP when call_c
151
    _SP_x = jit.Reg("R12")
152
    _SP_f = jit.Reg("R13") 
153
)
154

155
var (
156
    _ARG_rb = jit.Ptr(_SP, _FP_base)
157
    _ARG_vp = jit.Ptr(_SP, _FP_base + 8)
158
    _ARG_sb = jit.Ptr(_SP, _FP_base + 16)
159
    _ARG_fv = jit.Ptr(_SP, _FP_base + 24)
160
)
161

162
var (
163
    _RET_et = _ET
164
    _RET_ep = _EP
165
)
166

167
var (
168
    _VAR_sp = jit.Ptr(_SP, _FP_fargs + _FP_saves)
169
    _VAR_dn = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
170
    _VAR_vp = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
171
)
172

173
var (
174
    _REG_ffi = []obj.Addr{ _RP, _RL, _RC, _SP_q}
175
    _REG_b64 = []obj.Addr{_SP_p, _SP_q}
176

177
    _REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC}
178
    _REG_ms  = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR}
179
    _REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL}
180
)
181

182
type _Assembler struct {
183
    jit.BaseAssembler
184
    p _Program
185
    x int
186
    name string
187
}
188

189
func newAssembler(p _Program) *_Assembler {
190
    return new(_Assembler).Init(p)
191
}
192

193
/** Assembler Interface **/
194

195
func (self *_Assembler) Load() _Encoder {
196
    return ptoenc(self.BaseAssembler.Load("encode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
197
}
198

199
func (self *_Assembler) Init(p _Program) *_Assembler {
200
    self.p = p
201
    self.BaseAssembler.Init(self.compile)
202
    return self
203
}
204

205
func (self *_Assembler) compile() {
206
    self.prologue()
207
    self.instrs()
208
    self.epilogue()
209
    self.builtins()
210
}
211

212
/** Assembler Stages **/
213

214
var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
215
    _OP_null           : (*_Assembler)._asm_OP_null,
216
    _OP_empty_arr      : (*_Assembler)._asm_OP_empty_arr,
217
    _OP_empty_obj      : (*_Assembler)._asm_OP_empty_obj, 
218
    _OP_bool           : (*_Assembler)._asm_OP_bool,
219
    _OP_i8             : (*_Assembler)._asm_OP_i8,
220
    _OP_i16            : (*_Assembler)._asm_OP_i16,
221
    _OP_i32            : (*_Assembler)._asm_OP_i32,
222
    _OP_i64            : (*_Assembler)._asm_OP_i64,
223
    _OP_u8             : (*_Assembler)._asm_OP_u8,
224
    _OP_u16            : (*_Assembler)._asm_OP_u16,
225
    _OP_u32            : (*_Assembler)._asm_OP_u32,
226
    _OP_u64            : (*_Assembler)._asm_OP_u64,
227
    _OP_f32            : (*_Assembler)._asm_OP_f32,
228
    _OP_f64            : (*_Assembler)._asm_OP_f64,
229
    _OP_str            : (*_Assembler)._asm_OP_str,
230
    _OP_bin            : (*_Assembler)._asm_OP_bin,
231
    _OP_quote          : (*_Assembler)._asm_OP_quote,
232
    _OP_number         : (*_Assembler)._asm_OP_number,
233
    _OP_eface          : (*_Assembler)._asm_OP_eface,
234
    _OP_iface          : (*_Assembler)._asm_OP_iface,
235
    _OP_byte           : (*_Assembler)._asm_OP_byte,
236
    _OP_text           : (*_Assembler)._asm_OP_text,
237
    _OP_deref          : (*_Assembler)._asm_OP_deref,
238
    _OP_index          : (*_Assembler)._asm_OP_index,
239
    _OP_load           : (*_Assembler)._asm_OP_load,
240
    _OP_save           : (*_Assembler)._asm_OP_save,
241
    _OP_drop           : (*_Assembler)._asm_OP_drop,
242
    _OP_drop_2         : (*_Assembler)._asm_OP_drop_2,
243
    _OP_recurse        : (*_Assembler)._asm_OP_recurse,
244
    _OP_is_nil         : (*_Assembler)._asm_OP_is_nil,
245
    _OP_is_nil_p1      : (*_Assembler)._asm_OP_is_nil_p1,
246
    _OP_is_zero_1      : (*_Assembler)._asm_OP_is_zero_1,
247
    _OP_is_zero_2      : (*_Assembler)._asm_OP_is_zero_2,
248
    _OP_is_zero_4      : (*_Assembler)._asm_OP_is_zero_4,
249
    _OP_is_zero_8      : (*_Assembler)._asm_OP_is_zero_8,
250
    _OP_is_zero_map    : (*_Assembler)._asm_OP_is_zero_map,
251
    _OP_goto           : (*_Assembler)._asm_OP_goto,
252
    _OP_map_iter       : (*_Assembler)._asm_OP_map_iter,
253
    _OP_map_stop       : (*_Assembler)._asm_OP_map_stop,
254
    _OP_map_check_key  : (*_Assembler)._asm_OP_map_check_key,
255
    _OP_map_write_key  : (*_Assembler)._asm_OP_map_write_key,
256
    _OP_map_value_next : (*_Assembler)._asm_OP_map_value_next,
257
    _OP_slice_len      : (*_Assembler)._asm_OP_slice_len,
258
    _OP_slice_next     : (*_Assembler)._asm_OP_slice_next,
259
    _OP_marshal        : (*_Assembler)._asm_OP_marshal,
260
    _OP_marshal_p      : (*_Assembler)._asm_OP_marshal_p,
261
    _OP_marshal_text   : (*_Assembler)._asm_OP_marshal_text,
262
    _OP_marshal_text_p : (*_Assembler)._asm_OP_marshal_text_p,
263
    _OP_cond_set       : (*_Assembler)._asm_OP_cond_set,
264
    _OP_cond_testc     : (*_Assembler)._asm_OP_cond_testc,
265
}
266

267
func (self *_Assembler) instr(v *_Instr) {
268
    if fn := _OpFuncTab[v.op()]; fn != nil {
269
        fn(self, v)
270
    } else {
271
        panic(fmt.Sprintf("invalid opcode: %d", v.op()))
272
    }
273
}
274

275
func (self *_Assembler) instrs() {
276
    for i, v := range self.p {
277
        self.Mark(i)
278
        self.instr(&v)
279
        self.debug_instr(i, &v)
280
    }
281
}
282

283
func (self *_Assembler) builtins() {
284
    self.more_space()
285
    self.error_too_deep()
286
    self.error_invalid_number()
287
    self.error_nan_or_infinite()
288
    self.go_panic()
289
}
290

291
func (self *_Assembler) epilogue() {
292
    self.Mark(len(self.p))
293
    self.Emit("XORL", _ET, _ET)
294
    self.Emit("XORL", _EP, _EP)
295
    self.Link(_LB_error)
296
    self.Emit("MOVQ", _ARG_rb, _CX)                 // MOVQ rb<>+0(FP), CX
297
    self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8))         // MOVQ RL, 8(CX)
298
    self.Emit("MOVQ", jit.Imm(0), _ARG_rb)          // MOVQ AX, rb<>+0(FP)
299
    self.Emit("MOVQ", jit.Imm(0), _ARG_vp)          // MOVQ BX, vp<>+8(FP)
300
    self.Emit("MOVQ", jit.Imm(0), _ARG_sb)          // MOVQ CX, sb<>+16(FP)
301
    self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP)  // MOVQ _FP_offs(SP), BP
302
    self.Emit("ADDQ", jit.Imm(_FP_size), _SP)       // ADDQ $_FP_size, SP
303
    self.Emit("RET")                                // RET
304
}
305

306
func (self *_Assembler) prologue() {
307
    self.Emit("SUBQ", jit.Imm(_FP_size), _SP)       // SUBQ $_FP_size, SP
308
    self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs))  // MOVQ BP, _FP_offs(SP)
309
    self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP)  // LEAQ _FP_offs(SP), BP
310
    self.Emit("MOVQ", _AX, _ARG_rb)                 // MOVQ AX, rb<>+0(FP)
311
    self.Emit("MOVQ", _BX, _ARG_vp)                 // MOVQ BX, vp<>+8(FP)
312
    self.Emit("MOVQ", _CX, _ARG_sb)                 // MOVQ CX, sb<>+16(FP)
313
    self.Emit("MOVQ", _DI, _ARG_fv)                 // MOVQ DI, rb<>+24(FP)
314
    self.Emit("MOVQ", jit.Ptr(_AX,  0), _RP)        // MOVQ (AX)  , DI
315
    self.Emit("MOVQ", jit.Ptr(_AX,  8), _RL)        // MOVQ 8(AX) , SI
316
    self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC)        // MOVQ 16(AX), DX
317
    self.Emit("MOVQ", _BX, _SP_p)                   // MOVQ BX, R10
318
    self.Emit("MOVQ", _CX, _ST)                     // MOVQ CX, R8
319
    self.Emit("XORL", _SP_x, _SP_x)                 // XORL R10, R12
320
    self.Emit("XORL", _SP_f, _SP_f)                 // XORL R11, R13
321
    self.Emit("XORL", _SP_q, _SP_q)                 // XORL R13, R11
322
}
323

324
/** Assembler Inline Functions **/
325

326
func (self *_Assembler) xsave(reg ...obj.Addr) {
327
    for i, v := range reg {
328
        if i > _FP_saves / 8 - 1 {
329
            panic("too many registers to save")
330
        } else {
331
            self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
332
        }
333
    }
334
}
335

336
func (self *_Assembler) xload(reg ...obj.Addr) {
337
    for i, v := range reg {
338
        if i > _FP_saves / 8 - 1 {
339
            panic("too many registers to load")
340
        } else {
341
            self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
342
        }
343
    }
344
}
345

346
func (self *_Assembler) rbuf_di() {
347
    if _RP.Reg != x86.REG_DI {
348
        panic("register allocation messed up: RP != DI")
349
    } else {
350
        self.Emit("ADDQ", _RL, _RP)
351
    }
352
}
353

354
func (self *_Assembler) store_int(nd int, fn obj.Addr, ins string) {
355
    self.check_size(nd)
356
    self.save_c()                           // SAVE   $C_regs
357
    self.rbuf_di()                          // MOVQ   RP, DI
358
    self.Emit(ins, jit.Ptr(_SP_p, 0), _SI)  // $ins   (SP.p), SI
359
    self.call_c(fn)                         // CALL_C $fn
360
    self.Emit("ADDQ", _AX, _RL)             // ADDQ   AX, RL
361
}
362

363
func (self *_Assembler) store_str(s string) {
364
    i := 0
365
    m := rt.Str2Mem(s)
366

367
    /* 8-byte stores */
368
    for i <= len(m) - 8 {
369
        self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX)        // MOVQ $s[i:], AX
370
        self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i)))  // MOVQ AX, i(RP)(RL)
371
        i += 8
372
    }
373

374
    /* 4-byte stores */
375
    if i <= len(m) - 4 {
376
        self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i)))  // MOVL $s[i:], i(RP)(RL)
377
        i += 4
378
    }
379

380
    /* 2-byte stores */
381
    if i <= len(m) - 2 {
382
        self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i)))  // MOVW $s[i:], i(RP)(RL)
383
        i += 2
384
    }
385

386
    /* last byte */
387
    if i < len(m) {
388
        self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i)))     // MOVB $s[i:], i(RP)(RL)
389
    }
390
}
391

392
func (self *_Assembler) check_size(n int) {
393
    self.check_size_rl(jit.Ptr(_RL, int64(n)))
394
}
395

396
func (self *_Assembler) check_size_r(r obj.Addr, d int) {
397
    self.check_size_rl(jit.Sib(_RL, r, 1, int64(d)))
398
}
399

400
func (self *_Assembler) check_size_rl(v obj.Addr) {
401
    idx := self.x
402
    key := _LB_more_space_return + strconv.Itoa(idx)
403

404
    /* the following code relies on LR == R9 to work */
405
    if _LR.Reg != x86.REG_R9 {
406
        panic("register allocation messed up: LR != R9")
407
    }
408

409
    /* check for buffer capacity */
410
    self.x++
411
    self.Emit("LEAQ", v, _AX)       // LEAQ $v, AX
412
    self.Emit("CMPQ", _AX, _RC)     // CMPQ AX, RC
413
    self.Sjmp("JBE" , key)          // JBE  _more_space_return_{n}
414
    self.slice_grow_ax(key)         // GROW $key
415
    self.Link(key)                  // _more_space_return_{n}:
416
}
417

418
func (self *_Assembler) slice_grow_ax(ret string) {
419
    self.Byte(0x4c, 0x8d, 0x0d)         // LEAQ ?(PC), R9
420
    self.Sref(ret, 4)                   // .... &ret
421
    self.Sjmp("JMP" , _LB_more_space)   // JMP  _more_space
422
}
423

424
/** State Stack Helpers **/
425

426
const (
427
    _StateSize  = int64(unsafe.Sizeof(_State{}))
428
    _StackLimit = _MaxStack * _StateSize
429
)
430

431
func (self *_Assembler) save_state() {
432
    self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX)             // MOVQ (ST), CX
433
    self.Emit("LEAQ", jit.Ptr(_CX, _StateSize), _R9)    // LEAQ _StateSize(CX), R9
434
    self.Emit("CMPQ", _R9, jit.Imm(_StackLimit))        // CMPQ R9, $_StackLimit
435
    self.Sjmp("JAE" , _LB_error_too_deep)               // JA   _error_too_deep
436
    self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8))   // MOVQ SP.x, 8(ST)(CX)
437
    self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16))  // MOVQ SP.f, 16(ST)(CX)
438
    self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24))  // MOVQ SP.p, 24(ST)(CX)
439
    self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32))  // MOVQ SP.q, 32(ST)(CX)
440
    self.Emit("MOVQ", _R9, jit.Ptr(_ST, 0))             // MOVQ R9, (ST)
441
}
442

443
func (self *_Assembler) drop_state(decr int64) {
444
    self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX)                // MOVQ  (ST), AX
445
    self.Emit("SUBQ" , jit.Imm(decr), _AX)                  // SUBQ  $decr, AX
446
    self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0))                // MOVQ  AX, (ST)
447
    self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _SP_x)      // MOVQ  8(ST)(AX), SP.x
448
    self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 16), _SP_f)     // MOVQ  16(ST)(AX), SP.f
449
    self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 24), _SP_p)     // MOVQ  24(ST)(AX), SP.p
450
    self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 32), _SP_q)     // MOVQ  32(ST)(AX), SP.q
451
    self.Emit("PXOR" , _X0, _X0)                            // PXOR  X0, X0
452
    self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8))        // MOVOU X0, 8(ST)(AX)
453
    self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24))       // MOVOU X0, 24(ST)(AX)
454
}
455

456
/** Buffer Helpers **/
457

458
func (self *_Assembler) add_char(ch byte) {
459
    self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0))  // MOVB $ch, (RP)(RL)
460
    self.Emit("ADDQ", jit.Imm(1), _RL)                              // ADDQ $1, RL
461
}
462

463
func (self *_Assembler) add_long(ch uint32, n int64) {
464
    self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0))  // MOVL $ch, (RP)(RL)
465
    self.Emit("ADDQ", jit.Imm(n), _RL)                              // ADDQ $n, RL
466
}
467

468
func (self *_Assembler) add_text(ss string) {
469
    self.store_str(ss)                                  // TEXT $ss
470
    self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL)     // ADDQ ${len(ss)}, RL
471
}
472

473
// get *buf at AX
474
func (self *_Assembler) prep_buffer_AX() {
475
    self.Emit("MOVQ", _ARG_rb, _AX)             // MOVQ rb<>+0(FP), AX
476
    self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8))     // MOVQ RL, 8(AX)
477
}
478

479
func (self *_Assembler) save_buffer() {
480
    self.Emit("MOVQ", _ARG_rb, _CX)             // MOVQ rb<>+0(FP), CX
481
    self.Emit("MOVQ", _RP, jit.Ptr(_CX,  0))    // MOVQ RP, (CX)
482
    self.Emit("MOVQ", _RL, jit.Ptr(_CX,  8))    // MOVQ RL, 8(CX)
483
    self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16))    // MOVQ RC, 16(CX)
484
}
485

486
// get *buf at AX
487
func (self *_Assembler) load_buffer_AX() {
488
    self.Emit("MOVQ", _ARG_rb, _AX)             // MOVQ rb<>+0(FP), AX
489
    self.Emit("MOVQ", jit.Ptr(_AX,  0), _RP)    // MOVQ (AX), RP
490
    self.Emit("MOVQ", jit.Ptr(_AX,  8), _RL)    // MOVQ 8(AX), RL
491
    self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC)    // MOVQ 16(AX), RC
492
}
493

494
/** Function Interface Helpers **/
495

496
func (self *_Assembler) call(pc obj.Addr) {
497
    self.Emit("MOVQ", pc, _LR)  // MOVQ $pc, AX
498
    self.Rjmp("CALL", _LR)      // CALL AX
499
}
500

501
func (self *_Assembler) save_c() {
502
    self.xsave(_REG_ffi...)     // SAVE $REG_ffi
503
}
504

505
func (self *_Assembler) call_b64(pc obj.Addr) {
506
    self.xsave(_REG_b64...)     // SAVE $REG_all
507
    self.call(pc)               // CALL $pc
508
    self.xload(_REG_b64...)     // LOAD $REG_ffi
509
}
510

511
func (self *_Assembler) call_c(pc obj.Addr) {
512
    self.Emit("XCHGQ", _SP_p, _BX)
513
    self.call(pc)               // CALL $pc
514
    self.xload(_REG_ffi...)     // LOAD $REG_ffi
515
    self.Emit("XCHGQ", _SP_p, _BX)
516
}
517

518
func (self *_Assembler) call_go(pc obj.Addr) {
519
    self.xsave(_REG_all...)     // SAVE $REG_all
520
    self.call(pc)               // CALL $pc
521
    self.xload(_REG_all...)     // LOAD $REG_all
522
}
523

524
func (self *_Assembler) call_more_space(pc obj.Addr) {
525
    self.xsave(_REG_ms...)     // SAVE $REG_all
526
    self.call(pc)               // CALL $pc
527
    self.xload(_REG_ms...)     // LOAD $REG_all
528
}
529

530
func (self *_Assembler) call_encoder(pc obj.Addr) {
531
    self.xsave(_REG_enc...)     // SAVE $REG_all
532
    self.call(pc)               // CALL $pc
533
    self.xload(_REG_enc...)     // LOAD $REG_all
534
}
535

536
func (self *_Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) {
537
    switch vt.Kind() {
538
        case reflect.Interface        : self.call_marshaler_i(fn, it)
539
        case reflect.Ptr, reflect.Map : self.call_marshaler_v(fn, it, vt, true)
540
        // struct/array of 1 direct iface type can be direct
541
        default                       : self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect())
542
    }
543
}
544

545
func (self *_Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) {
546
    self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)                      // MOVQ    (SP.p), AX
547
    self.Emit("TESTQ", _AX, _AX)                                    // TESTQ   AX, AX
548
    self.Sjmp("JZ"   , "_null_{n}")                                 // JZ      _null_{n}
549
    self.Emit("MOVQ" , _AX, _BX)                                    // MOVQ    AX, BX
550
    self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX)                      // MOVQ    8(SP.p), CX
551
    self.Emit("MOVQ" , jit.Gtype(it), _AX)                          // MOVQ    $it, AX
552
    self.call_go(_F_assertI2I)                                       // CALL_GO assertI2I
553
    self.Emit("TESTQ", _AX, _AX)                                    // TESTQ   AX, AX
554
    self.Sjmp("JZ"   , "_null_{n}")                                 // JZ      _null_{n}
555
    self.Emit("MOVQ", _BX, _CX)                                     // MOVQ   BX, CX
556
    self.Emit("MOVQ", _AX, _BX)                                     // MOVQ   AX, BX
557
    self.prep_buffer_AX()
558
    self.Emit("MOVQ", _ARG_fv, _DI)                                 // MOVQ   ARG.fv, DI
559
    self.call_go(fn)                                                // CALL    $fn
560
    self.Emit("TESTQ", _ET, _ET)                                    // TESTQ ET, ET
561
    self.Sjmp("JNZ"  , _LB_error)                                   // JNZ   _error
562
    self.load_buffer_AX()
563
    self.Sjmp("JMP"  , "_done_{n}")                                 // JMP     _done_{n}
564
    self.Link("_null_{n}")                                          // _null_{n}:
565
    self.check_size(4)                                              // SIZE    $4
566
    self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0))   // MOVL    $'null', (RP)(RL*1)
567
    self.Emit("ADDQ", jit.Imm(4), _RL)                              // ADDQ    $4, RL
568
    self.Link("_done_{n}")                                          // _done_{n}:
569
}
570

571
func (self *_Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) {
572
    self.prep_buffer_AX()                          // MOVE {buf}, (SP)
573
    self.Emit("MOVQ", jit.Itab(it, vt), _BX)       // MOVQ $(itab(it, vt)), BX
574

575
    /* dereference the pointer if needed */
576
    if !deref {
577
        self.Emit("MOVQ", _SP_p, _CX)              // MOVQ SP.p, CX
578
    } else {
579
        self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _CX)   // MOVQ 0(SP.p), CX
580
    }
581

582
    /* call the encoder, and perform error checks */
583
    self.Emit("MOVQ", _ARG_fv, _DI)                 // MOVQ   ARG.fv, DI
584
    self.call_go(fn)                                // CALL  $fn
585
    self.Emit("TESTQ", _ET, _ET)                // TESTQ ET, ET
586
    self.Sjmp("JNZ"  , _LB_error)               // JNZ   _error
587
    self.load_buffer_AX()
588
}
589

590
/** Builtin: _more_space **/
591

592
var (
593
    _T_byte      = jit.Type(byteType)
594
    _F_growslice = jit.Func(growslice)
595
)
596

597
// AX must saving n 
598
func (self *_Assembler) more_space() {
599
    self.Link(_LB_more_space)
600
    self.Emit("MOVQ", _RP, _BX)        // MOVQ DI, BX
601
    self.Emit("MOVQ", _RL, _CX)        // MOVQ SI, CX
602
    self.Emit("MOVQ", _RC, _DI)        // MOVQ DX, DI
603
    self.Emit("MOVQ", _AX, _SI)        // MOVQ AX, SI
604
    self.Emit("MOVQ", _T_byte, _AX)    // MOVQ $_T_byte, AX
605
    self.call_more_space(_F_growslice)            // CALL $pc    
606
    self.Emit("MOVQ", _AX, _RP)        // MOVQ AX, DI
607
    self.Emit("MOVQ", _BX, _RL)        // MOVQ BX, SI
608
    self.Emit("MOVQ", _CX, _RC)        // MOVQ CX, DX
609
    self.save_buffer()                 // SAVE {buf}
610
    self.Rjmp("JMP" , _LR)             // JMP  LR
611
}
612

613
/** Builtin Errors **/
614

615
var (
616
    _V_ERR_too_deep               = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_too_deep))))
617
    _V_ERR_nan_or_infinite        = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_nan_or_infinite))))
618
    _I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(errorType), jsonUnsupportedValueType)
619
)
620

621
func (self *_Assembler) error_too_deep() {
622
    self.Link(_LB_error_too_deep)
623
    self.Emit("MOVQ", _V_ERR_too_deep, _EP)                 // MOVQ $_V_ERR_too_deep, EP
624
    self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET)   // MOVQ $_I_json_UnsupportedValuError, ET
625
    self.Sjmp("JMP" , _LB_error)                            // JMP  _error
626
}
627

628
func (self *_Assembler) error_invalid_number() {
629
    self.Link(_LB_error_invalid_number)
630
    self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX)  // MOVQ    0(SP), AX
631
    self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _BX)  // MOVQ    8(SP), BX
632
    self.call_go(_F_error_number)              // CALL_GO error_number
633
    self.Sjmp("JMP" , _LB_error)               // JMP     _error
634
}
635

636
func (self *_Assembler) error_nan_or_infinite()  {
637
    self.Link(_LB_error_nan_or_infinite)
638
    self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP)          // MOVQ $_V_ERR_nan_or_infinite, EP
639
    self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET)   // MOVQ $_I_json_UnsupportedValuError, ET
640
    self.Sjmp("JMP" , _LB_error)                            // JMP  _error
641
}
642

643
/** String Encoding Routine **/
644

645
var (
646
    _F_quote = jit.Imm(int64(native.S_quote))
647
    _F_panic = jit.Func(goPanic)
648
)
649

650
func (self *_Assembler) go_panic() {
651
    self.Link(_LB_panic)
652
    self.Emit("MOVQ", _SP_p, _BX)
653
    self.call_go(_F_panic)
654
}
655

656
func (self *_Assembler) encode_string(doubleQuote bool) {       
657
    self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX)  // MOVQ  8(SP.p), AX
658
    self.Emit("TESTQ", _AX, _AX)                // TESTQ AX, AX
659
    self.Sjmp("JZ"   , "_str_empty_{n}")        // JZ    _str_empty_{n}
660
    self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
661
    self.Sjmp("JNE"   , "_str_next_{n}") 
662
    self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), _AX)
663
    self.Sjmp("JMP", _LB_panic)
664
    self.Link("_str_next_{n}")
665

666
    /* openning quote, check for double quote */
667
    if !doubleQuote {
668
        self.check_size_r(_AX, 2)   // SIZE $2
669
        self.add_char('"')          // CHAR $'"'
670
    } else {
671
        self.check_size_r(_AX, 6)   // SIZE $6
672
        self.add_long(_IM_open, 3)  // TEXT $`"\"`
673
    }
674

675
    /* quoting loop */
676
    self.Emit("XORL", _AX, _AX)         // XORL AX, AX
677
    self.Emit("MOVQ", _AX, _VAR_sp)     // MOVQ AX, sp
678
    self.Link("_str_loop_{n}")          // _str_loop_{n}:
679
    self.save_c()                       // SAVE $REG_ffi
680

681
    /* load the output buffer first, and then input buffer,
682
     * because the parameter registers collide with RP / RL / RC */
683
    self.Emit("MOVQ", _RC, _CX)                         // MOVQ RC, CX
684
    self.Emit("SUBQ", _RL, _CX)                         // SUBQ RL, CX
685
    self.Emit("MOVQ", _CX, _VAR_dn)                     // MOVQ CX, dn
686
    self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX)     // LEAQ (RP)(RL), DX
687
    self.Emit("LEAQ", _VAR_dn, _CX)                     // LEAQ dn, CX
688
    self.Emit("MOVQ", _VAR_sp, _AX)                     // MOVQ sp, AX
689
    self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI)           // MOVQ (SP.p), DI
690
    self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI)           // MOVQ 8(SP.p), SI
691
    self.Emit("ADDQ", _AX, _DI)                         // ADDQ AX, DI
692
    self.Emit("SUBQ", _AX, _SI)                         // SUBQ AX, SI
693

694
    /* set the flags based on `doubleQuote` */
695
    if !doubleQuote {
696
        self.Emit("XORL", _R8, _R8)                                 // XORL R8, R8
697
    } else {
698
        self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8)     // MOVL ${types.F_DOUBLE_UNQUOTE}, R8
699
    }
700

701
    /* call the native quoter */
702
    self.call_c(_F_quote)                   // CALL  quote
703
    self.Emit("ADDQ" , _VAR_dn, _RL)        // ADDQ  dn, RL
704

705
    self.Emit("TESTQ", _AX, _AX)            // TESTQ AX, AX
706
    self.Sjmp("JS"   , "_str_space_{n}")    // JS    _str_space_{n}
707

708
    /* close the string, check for double quote */
709
    if !doubleQuote {
710
        self.check_size(1)                  // SIZE $1
711
        self.add_char('"')                  // CHAR $'"'
712
        self.Sjmp("JMP", "_str_end_{n}")    // JMP  _str_end_{n}
713
    } else {
714
        self.check_size(3)                  // SIZE $3
715
        self.add_text("\\\"\"")             // TEXT $'\""'
716
        self.Sjmp("JMP", "_str_end_{n}")    // JMP  _str_end_{n}
717
    }
718

719
    /* not enough space to contain the quoted string */
720
    self.Link("_str_space_{n}")                         // _str_space_{n}:
721
    self.Emit("NOTQ", _AX)                              // NOTQ AX
722
    self.Emit("ADDQ", _AX, _VAR_sp)                     // ADDQ AX, sp
723
    self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX)     // LEAQ (RC)(RC), AX
724
    self.slice_grow_ax("_str_loop_{n}")                 // GROW _str_loop_{n}
725

726
    /* empty string, check for double quote */
727
    if !doubleQuote {
728
        self.Link("_str_empty_{n}")     // _str_empty_{n}:
729
        self.check_size(2)              // SIZE $2
730
        self.add_text("\"\"")           // TEXT $'""'
731
        self.Link("_str_end_{n}")       // _str_end_{n}:
732
    } else {
733
        self.Link("_str_empty_{n}")     // _str_empty_{n}:
734
        self.check_size(6)              // SIZE $6
735
        self.add_text("\"\\\"\\\"\"")   // TEXT $'"\"\""'
736
        self.Link("_str_end_{n}")       // _str_end_{n}:
737
    }
738
}
739

740
/** OpCode Assembler Functions **/
741

742
var (
743
    _T_json_Marshaler         = rt.UnpackType(jsonMarshalerType)
744
    _T_encoding_TextMarshaler = rt.UnpackType(encodingTextMarshalerType)
745
)
746

747
var (
748
    _F_f64toa    = jit.Imm(int64(native.S_f64toa))
749
    _F_f32toa    = jit.Imm(int64(native.S_f32toa))
750
    _F_i64toa    = jit.Imm(int64(native.S_i64toa))
751
    _F_u64toa    = jit.Imm(int64(native.S_u64toa))
752
    _F_b64encode = jit.Imm(int64(_subr__b64encode))
753
)
754

755
var (
756
    _F_memmove       = jit.Func(memmove)
757
    _F_error_number  = jit.Func(error_number)
758
    _F_isValidNumber = jit.Func(isValidNumber)
759
)
760

761
var (
762
    _F_iteratorStop  = jit.Func(iteratorStop)
763
    _F_iteratorNext  = jit.Func(iteratorNext)
764
    _F_iteratorStart = jit.Func(iteratorStart)
765
)
766

767
var (
768
    _F_encodeTypedPointer  obj.Addr
769
    _F_encodeJsonMarshaler obj.Addr
770
    _F_encodeTextMarshaler obj.Addr
771
)
772

773
const (
774
    _MODE_AVX2 = 1 << 2
775
)
776

777
func init() {
778
    _F_encodeTypedPointer  = jit.Func(encodeTypedPointer)
779
    _F_encodeJsonMarshaler = jit.Func(encodeJsonMarshaler)
780
    _F_encodeTextMarshaler = jit.Func(encodeTextMarshaler)
781
}
782

783
func (self *_Assembler) _asm_OP_null(_ *_Instr) {
784
    self.check_size(4)
785
    self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0))  // MOVL $'null', (RP)(RL*1)
786
    self.Emit("ADDQ", jit.Imm(4), _RL)                             // ADDQ $4, RL
787
}
788

789
func (self *_Assembler) _asm_OP_empty_arr(_ *_Instr) {
790
    self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
791
    self.Sjmp("JC", "_empty_arr_{n}")
792
    self._asm_OP_null(nil)
793
    self.Sjmp("JMP", "_empty_arr_end_{n}")
794
    self.Link("_empty_arr_{n}")
795
    self.check_size(2)
796
    self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0)) 
797
    self.Emit("ADDQ", jit.Imm(2), _RL)    
798
    self.Link("_empty_arr_end_{n}")                  
799
}
800

801
func (self *_Assembler) _asm_OP_empty_obj(_ *_Instr) {
802
    self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
803
    self.Sjmp("JC", "_empty_obj_{n}")
804
    self._asm_OP_null(nil)
805
    self.Sjmp("JMP", "_empty_obj_end_{n}")
806
    self.Link("_empty_obj_{n}")
807
    self.check_size(2)
808
    self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0))  
809
    self.Emit("ADDQ", jit.Imm(2), _RL) 
810
    self.Link("_empty_obj_end_{n}")                                             
811
}
812

813
func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
814
    self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0))                // CMPB (SP.p), $0
815
    self.Sjmp("JE"  , "_false_{n}")                                 // JE   _false_{n}
816
    self.check_size(4)                                              // SIZE $4
817
    self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0))   // MOVL $'true', (RP)(RL*1)
818
    self.Emit("ADDQ", jit.Imm(4), _RL)                              // ADDQ $4, RL
819
    self.Sjmp("JMP" , "_end_{n}")                                   // JMP  _end_{n}
820
    self.Link("_false_{n}")                                         // _false_{n}:
821
    self.check_size(5)                                              // SIZE $5
822
    self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0))   // MOVL $'fals', (RP)(RL*1)
823
    self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4))        // MOVB $'e', 4(RP)(RL*1)
824
    self.Emit("ADDQ", jit.Imm(5), _RL)                              // ADDQ $5, RL
825
    self.Link("_end_{n}")                                           // _end_{n}:
826
}
827

828
func (self *_Assembler) _asm_OP_i8(_ *_Instr) {
829
    self.store_int(4, _F_i64toa, "MOVBQSX")
830
}
831

832
func (self *_Assembler) _asm_OP_i16(_ *_Instr) {
833
    self.store_int(6, _F_i64toa, "MOVWQSX")
834
}
835

836
func (self *_Assembler) _asm_OP_i32(_ *_Instr) {
837
    self.store_int(17, _F_i64toa, "MOVLQSX")
838
}
839

840
func (self *_Assembler) _asm_OP_i64(_ *_Instr) {
841
    self.store_int(21, _F_i64toa, "MOVQ")
842
}
843

844
func (self *_Assembler) _asm_OP_u8(_ *_Instr) {
845
    self.store_int(3, _F_u64toa, "MOVBQZX")
846
}
847

848
func (self *_Assembler) _asm_OP_u16(_ *_Instr) {
849
    self.store_int(5, _F_u64toa, "MOVWQZX")
850
}
851

852
func (self *_Assembler) _asm_OP_u32(_ *_Instr) {
853
    self.store_int(16, _F_u64toa, "MOVLQZX")
854
}
855

856
func (self *_Assembler) _asm_OP_u64(_ *_Instr) {
857
    self.store_int(20, _F_u64toa, "MOVQ")
858
}
859

860
func (self *_Assembler) _asm_OP_f32(_ *_Instr) {
861
    self.check_size(32)
862
    self.Emit("MOVL"    , jit.Ptr(_SP_p, 0), _AX)       // MOVL     (SP.p), AX
863
    self.Emit("ANDL"    , jit.Imm(_FM_exp32), _AX)      // ANDL     $_FM_exp32, AX
864
    self.Emit("XORL"    , jit.Imm(_FM_exp32), _AX)      // XORL     $_FM_exp32, AX
865
    self.Sjmp("JZ"      , _LB_error_nan_or_infinite)    // JZ       _error_nan_or_infinite
866
    self.save_c()                                       // SAVE     $C_regs
867
    self.rbuf_di()                                      // MOVQ     RP, DI
868
    self.Emit("MOVSS"   , jit.Ptr(_SP_p, 0), _X0)       // MOVSS    (SP.p), X0
869
    self.call_c(_F_f32toa)                              // CALL_C   f64toa
870
    self.Emit("ADDQ"    , _AX, _RL)                     // ADDQ     AX, RL
871
}
872

873
func (self *_Assembler) _asm_OP_f64(_ *_Instr) {
874
    self.check_size(32)
875
    self.Emit("MOVQ"  , jit.Ptr(_SP_p, 0), _AX)     // MOVQ   (SP.p), AX
876
    self.Emit("MOVQ"  , jit.Imm(_FM_exp64), _CX)    // MOVQ   $_FM_exp64, CX
877
    self.Emit("ANDQ"  , _CX, _AX)                   // ANDQ   CX, AX
878
    self.Emit("XORQ"  , _CX, _AX)                   // XORQ   CX, AX
879
    self.Sjmp("JZ"    , _LB_error_nan_or_infinite)  // JZ     _error_nan_or_infinite
880
    self.save_c()                                   // SAVE   $C_regs
881
    self.rbuf_di()                                  // MOVQ   RP, DI
882
    self.Emit("MOVSD" , jit.Ptr(_SP_p, 0), _X0)     // MOVSD  (SP.p), X0
883
    self.call_c(_F_f64toa)                          // CALL_C f64toa
884
    self.Emit("ADDQ"  , _AX, _RL)                   // ADDQ   AX, RL
885
}
886

887
func (self *_Assembler) _asm_OP_str(_ *_Instr) {
888
    self.encode_string(false)
889
}
890

891
func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
892
    self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX)           // MOVQ 8(SP.p), AX
893
    self.Emit("ADDQ", jit.Imm(2), _AX)                  // ADDQ $2, AX
894
    self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX)           // MOVQ $_MF_mulv, CX
895
    self.Emit("MOVQ", _DX, _BX)                         // MOVQ DX, BX
896
    self.From("MULQ", _CX)                              // MULQ CX
897
    self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX)     // LEAQ 1(DX)(DX), AX
898
    self.Emit("ORQ" , jit.Imm(2), _AX)                  // ORQ  $2, AX
899
    self.Emit("MOVQ", _BX, _DX)                         // MOVQ BX, DX
900
    self.check_size_r(_AX, 0)                           // SIZE AX
901
    self.add_char('"')                                  // CHAR $'"'
902
    self.Emit("MOVQ", _ARG_rb, _DI)                     // MOVQ rb<>+0(FP), DI
903
    self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8))             // MOVQ SI, 8(DI)
904
    self.Emit("MOVQ", _SP_p, _SI)                       // MOVQ SP.p, SI
905

906
    /* check for AVX2 support */
907
    if !cpu.HasAVX2 {
908
        self.Emit("XORL", _DX, _DX)                     // XORL DX, DX
909
    } else {
910
        self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX)     // MOVL $_MODE_AVX2, DX
911
    }
912

913
    /* call the encoder */
914
    self.call_b64(_F_b64encode)   // CALL b64encode
915
    self.load_buffer_AX()       // LOAD {buf}
916
    self.add_char('"')          // CHAR $'"'
917
}
918

919
func (self *_Assembler) _asm_OP_quote(_ *_Instr) {
920
    self.encode_string(true)
921
}
922

923
func (self *_Assembler) _asm_OP_number(_ *_Instr) {
924
    self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _BX)          // MOVQ    (SP.p), BX
925
    self.Emit("TESTQ", _BX, _BX)                        // TESTQ   BX, BX
926
    self.Sjmp("JZ"   , "_empty_{n}")
927
    self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)          // MOVQ    (SP.p), AX
928
    self.Emit("TESTQ", _AX, _AX)                        // TESTQ   AX, AX
929
    self.Sjmp("JNZ"   , "_number_next_{n}") 
930
    self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), _AX)
931
    self.Sjmp("JMP", _LB_panic)
932
    self.Link("_number_next_{n}")
933
    self.call_go(_F_isValidNumber)                      // CALL_GO isValidNumber
934
    self.Emit("CMPB" , _AX, jit.Imm(0))                 // CMPB    AX, $0
935
    self.Sjmp("JE"   , _LB_error_invalid_number)        // JE      _error_invalid_number
936
    self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _BX)          // MOVQ    (SP.p), BX
937
    self.check_size_r(_BX, 0)                           // SIZE    BX
938
    self.Emit("LEAQ" , jit.Sib(_RP, _RL, 1, 0), _AX)    // LEAQ    (RP)(RL), AX
939
    self.Emit("ADDQ" , jit.Ptr(_SP_p, 8), _RL)          // ADDQ    8(SP.p), RL
940
    self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _BX)           // MOVOU   (SP.p), BX
941
    self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _CX)           // MOVOU   X0, 8(SP)
942
    self.call_go(_F_memmove)                            // CALL_GO memmove
943
    self.Emit("MOVQ", _ARG_rb, _AX)                     // MOVQ rb<>+0(FP), AX
944
    self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8))             // MOVQ RL, 8(AX)
945
    self.Sjmp("JMP"  , "_done_{n}")                     // JMP     _done_{n}
946
    self.Link("_empty_{n}")                             // _empty_{n}
947
    self.check_size(1)                                  // SIZE    $1
948
    self.add_char('0')                                  // CHAR    $'0'
949
    self.Link("_done_{n}")                              // _done_{n}:
950
}
951

952
func (self *_Assembler) _asm_OP_eface(_ *_Instr) {
953
    self.prep_buffer_AX()                       // MOVE  {buf}, AX
954
    self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _BX)  // MOVQ  (SP.p), BX
955
    self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _CX)  // LEAQ  8(SP.p), CX
956
    self.Emit("MOVQ" , _ST, _DI)                // MOVQ  ST, DI
957
    self.Emit("MOVQ" , _ARG_fv, _SI)            // MOVQ  fv, AX
958
    self.call_encoder(_F_encodeTypedPointer)            // CALL  encodeTypedPointer
959
    self.Emit("TESTQ", _ET, _ET)                // TESTQ ET, ET
960
    self.Sjmp("JNZ"  , _LB_error)               // JNZ   _error
961
    self.load_buffer_AX()
962
}
963

964
func (self *_Assembler) _asm_OP_iface(_ *_Instr) {
965
    self.prep_buffer_AX()                       // MOVE  {buf}, AX
966
    self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _CX)  // MOVQ  (SP.p), CX
967
    self.Emit("MOVQ" , jit.Ptr(_CX, 8), _BX)    // MOVQ  8(CX), BX
968
    self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _CX)  // LEAQ  8(SP.p), CX
969
    self.Emit("MOVQ" , _ST, _DI)                // MOVQ  ST, DI
970
    self.Emit("MOVQ" , _ARG_fv, _SI)            // MOVQ  fv, AX
971
    self.call_encoder(_F_encodeTypedPointer)    // CALL  encodeTypedPointer
972
    self.Emit("TESTQ", _ET, _ET)                // TESTQ ET, ET
973
    self.Sjmp("JNZ"  , _LB_error)               // JNZ   _error
974
    self.load_buffer_AX()
975
}
976

977
func (self *_Assembler) _asm_OP_byte(p *_Instr) {
978
    self.check_size(1)
979
    self.Emit("MOVB", jit.Imm(p.i64()), jit.Sib(_RP, _RL, 1, 0))    // MOVL p.vi(), (RP)(RL*1)
980
    self.Emit("ADDQ", jit.Imm(1), _RL)                              // ADDQ $1, RL
981
}
982

983
func (self *_Assembler) _asm_OP_text(p *_Instr) {
984
    self.check_size(len(p.vs()))    // SIZE ${len(p.vs())}
985
    self.add_text(p.vs())           // TEXT ${p.vs()}
986
}
987

988
func (self *_Assembler) _asm_OP_deref(_ *_Instr) {
989
    self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p)     // MOVQ (SP.p), SP.p
990
}
991

992
func (self *_Assembler) _asm_OP_index(p *_Instr) {
993
    self.Emit("MOVQ", jit.Imm(p.i64()), _AX)    // MOVQ $p.vi(), AX
994
    self.Emit("ADDQ", _AX, _SP_p)               // ADDQ AX, SP.p
995
}
996

997
func (self *_Assembler) _asm_OP_load(_ *_Instr) {
998
    self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)                 // MOVQ (ST), AX
999
    self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x)     // MOVQ -24(ST)(AX), SP.x
1000
    self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p)      // MOVQ -8(ST)(AX), SP.p
1001
    self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q)       // MOVQ (ST)(AX), SP.q
1002
}
1003

1004
func (self *_Assembler) _asm_OP_save(_ *_Instr) {
1005
    self.save_state()
1006
}
1007

1008
func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
1009
    self.drop_state(_StateSize)
1010
}
1011

1012
func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
1013
    self.drop_state(_StateSize * 2)                     // DROP  $(_StateSize * 2)
1014
    self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56))   // MOVOU X0, 56(ST)(AX)
1015
}
1016

1017
func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
1018
    self.prep_buffer_AX()                       // MOVE {buf}, (SP)
1019
    vt, pv := p.vp()
1020
    self.Emit("MOVQ", jit.Type(vt), _BX)    // MOVQ $(type(p.vt())), BX
1021

1022
    /* check for indirection */
1023
    if !rt.UnpackType(vt).Indirect() {
1024
        self.Emit("MOVQ", _SP_p, _CX)           // MOVQ SP.p, CX
1025
    } else {
1026
        self.Emit("MOVQ", _SP_p, _VAR_vp)  // MOVQ SP.p, VAR.vp
1027
        self.Emit("LEAQ", _VAR_vp, _CX)    // LEAQ VAR.vp, CX
1028
    }
1029

1030
    /* call the encoder */
1031
    self.Emit("MOVQ" , _ST, _DI)                // MOVQ  ST, DI
1032
    self.Emit("MOVQ" , _ARG_fv, _SI)            // MOVQ  $fv, SI
1033
    if pv {
1034
        self.Emit("BTCQ", jit.Imm(bitPointerValue), _SI)  // BTCQ $1, SI
1035
    }
1036
    self.call_encoder(_F_encodeTypedPointer)    // CALL  encodeTypedPointer
1037
    self.Emit("TESTQ", _ET, _ET)                // TESTQ ET, ET
1038
    self.Sjmp("JNZ"  , _LB_error)               // JNZ   _error
1039
    self.load_buffer_AX()
1040
}
1041

1042
func (self *_Assembler) _asm_OP_is_nil(p *_Instr) {
1043
    self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))    // CMPQ (SP.p), $0
1044
    self.Xjmp("JE"  , p.vi())                           // JE   p.vi()
1045
}
1046

1047
func (self *_Assembler) _asm_OP_is_nil_p1(p *_Instr) {
1048
    self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0))    // CMPQ 8(SP.p), $0
1049
    self.Xjmp("JE"  , p.vi())                           // JE   p.vi()
1050
}
1051

1052
func (self *_Assembler) _asm_OP_is_zero_1(p *_Instr) {
1053
    self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0))    // CMPB (SP.p), $0
1054
    self.Xjmp("JE"  , p.vi())                           // JE   p.vi()
1055
}
1056

1057
func (self *_Assembler) _asm_OP_is_zero_2(p *_Instr) {
1058
    self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0))    // CMPW (SP.p), $0
1059
    self.Xjmp("JE"  , p.vi())                           // JE   p.vi()
1060
}
1061

1062
func (self *_Assembler) _asm_OP_is_zero_4(p *_Instr) {
1063
    self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0))    // CMPL (SP.p), $0
1064
    self.Xjmp("JE"  , p.vi())                           // JE   p.vi()
1065
}
1066

1067
func (self *_Assembler) _asm_OP_is_zero_8(p *_Instr) {
1068
    self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))    // CMPQ (SP.p), $0
1069
    self.Xjmp("JE"  , p.vi())                           // JE   p.vi()
1070
}
1071

1072
func (self *_Assembler) _asm_OP_is_zero_map(p *_Instr) {
1073
    self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)          // MOVQ  (SP.p), AX
1074
    self.Emit("TESTQ", _AX, _AX)                        // TESTQ AX, AX
1075
    self.Xjmp("JZ"   , p.vi())                          // JZ    p.vi()
1076
    self.Emit("CMPQ" , jit.Ptr(_AX, 0), jit.Imm(0))     // CMPQ  (AX), $0
1077
    self.Xjmp("JE"   , p.vi())                          // JE    p.vi()
1078
}
1079

1080
func (self *_Assembler) _asm_OP_goto(p *_Instr) {
1081
    self.Xjmp("JMP", p.vi())
1082
}
1083

1084
func (self *_Assembler) _asm_OP_map_iter(p *_Instr) {
1085
    self.Emit("MOVQ" , jit.Type(p.vt()), _AX)       // MOVQ    $p.vt(), AX
1086
    self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _BX)      // MOVQ    (SP.p), BX
1087
    self.Emit("MOVQ" , _ARG_fv, _CX)                // MOVQ    fv, CX
1088
    self.call_go(_F_iteratorStart)                  // CALL_GO iteratorStart
1089
    self.Emit("MOVQ" , _AX, _SP_q)                  // MOVQ    AX, SP.q
1090
    self.Emit("MOVQ" , _BX, _ET)                    // MOVQ    32(SP), ET
1091
    self.Emit("MOVQ" , _CX, _EP)                    // MOVQ    40(SP), EP
1092
    self.Emit("TESTQ", _ET, _ET)                    // TESTQ   ET, ET
1093
    self.Sjmp("JNZ"  , _LB_error)                   // JNZ     _error
1094
}
1095

1096
func (self *_Assembler) _asm_OP_map_stop(_ *_Instr) {
1097
    self.Emit("MOVQ", _SP_q, _AX)               // MOVQ    SP.q, AX
1098
    self.call_go(_F_iteratorStop)               // CALL_GO iteratorStop
1099
    self.Emit("XORL", _SP_q, _SP_q)             // XORL    SP.q, SP.q
1100
}
1101

1102
func (self *_Assembler) _asm_OP_map_check_key(p *_Instr) {
1103
    self.Emit("MOVQ" , jit.Ptr(_SP_q, 0), _SP_p)    // MOVQ    (SP.q), SP.p
1104
    self.Emit("TESTQ", _SP_p, _SP_p)                // TESTQ   SP.p, SP.p
1105
    self.Xjmp("JZ"   , p.vi())                      // JNZ     p.vi()
1106
}
1107

1108
func (self *_Assembler) _asm_OP_map_write_key(p *_Instr) {
1109
    self.Emit("BTQ", jit.Imm(bitSortMapKeys), _ARG_fv)      // BTQ ${SortMapKeys}, fv
1110
    self.Sjmp("JNC", "_unordered_key_{n}")                  // JNC _unordered_key_{n}
1111
    self.encode_string(false)                               // STR $false
1112
    self.Xjmp("JMP", p.vi())                                // JMP ${p.vi()}
1113
    self.Link("_unordered_key_{n}")                         // _unordered_key_{n}:
1114
}
1115

1116
func (self *_Assembler) _asm_OP_map_value_next(_ *_Instr) {
1117
    self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p)     // MOVQ    8(SP.q), SP.p
1118
    self.Emit("MOVQ", _SP_q, _AX)                   // MOVQ    SP.q, AX
1119
    self.call_go(_F_iteratorNext)                   // CALL_GO iteratorNext
1120
}
1121

1122
func (self *_Assembler) _asm_OP_slice_len(_ *_Instr) {
1123
    self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _SP_x)        // MOVQ  8(SP.p), SP.x
1124
    self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _SP_p)        // MOVQ  (SP.p), SP.p
1125
    self.Emit("ORQ"  , jit.Imm(1 << _S_init), _SP_f)    // ORQ   $(1<<_S_init), SP.f
1126
}
1127

1128
func (self *_Assembler) _asm_OP_slice_next(p *_Instr) {
1129
    self.Emit("TESTQ"  , _SP_x, _SP_x)                          // TESTQ   SP.x, SP.x
1130
    self.Xjmp("JZ"     , p.vi())                                // JZ      p.vi()
1131
    self.Emit("SUBQ"   , jit.Imm(1), _SP_x)                     // SUBQ    $1, SP.x
1132
    self.Emit("BTRQ"   , jit.Imm(_S_init), _SP_f)               // BTRQ    $_S_init, SP.f
1133
    self.Emit("LEAQ"   , jit.Ptr(_SP_p, int64(p.vlen())), _AX)  // LEAQ    $(p.vlen())(SP.p), AX
1134
    self.Emit("CMOVQCC", _AX, _SP_p)                            // CMOVQNC AX, SP.p
1135
}
1136

1137
func (self *_Assembler) _asm_OP_marshal(p *_Instr) {
1138
    self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt())
1139
}
1140

1141
func (self *_Assembler) _asm_OP_marshal_p(p *_Instr) {
1142
    if p.vk() != reflect.Ptr {
1143
        panic("marshal_p: invalid type")
1144
    } else {
1145
        self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt(), false)
1146
    }
1147
}
1148

1149
func (self *_Assembler) _asm_OP_marshal_text(p *_Instr) {
1150
    self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt())
1151
}
1152

1153
func (self *_Assembler) _asm_OP_marshal_text_p(p *_Instr) {
1154
    if p.vk() != reflect.Ptr {
1155
        panic("marshal_text_p: invalid type")
1156
    } else {
1157
        self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt(), false)
1158
    }
1159
}
1160

1161
func (self *_Assembler) _asm_OP_cond_set(_ *_Instr) {
1162
    self.Emit("ORQ", jit.Imm(1 << _S_cond), _SP_f)  // ORQ $(1<<_S_cond), SP.f
1163
}
1164

1165
func (self *_Assembler) _asm_OP_cond_testc(p *_Instr) {
1166
    self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f)      // BTRQ $_S_cond, SP.f
1167
    self.Xjmp("JC"  , p.vi())
1168
}
1169

1170
func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
1171
    self.Emit("MOVQ", jit.Imm(int64(p2.op())),  _CX) // MOVQ $(p2.op()), AX
1172
    self.Emit("MOVQ", jit.Imm(int64(p1.op())),  _BX) // MOVQ $(p1.op()), BX
1173
    self.Emit("MOVQ", jit.Imm(int64(i)),  _AX)       // MOVQ $(i), CX
1174
    self.call_go(_F_println)
1175
}
1176

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.