podman
1175 строк · 46.7 Кб
1// +build go1.16,!go1.17
2
3/*
4* Copyright 2021 ByteDance Inc.
5*
6* Licensed under the Apache License, Version 2.0 (the "License");
7* you may not use this file except in compliance with the License.
8* You may obtain a copy of the License at
9*
10* http://www.apache.org/licenses/LICENSE-2.0
11*
12* Unless required by applicable law or agreed to in writing, software
13* distributed under the License is distributed on an "AS IS" BASIS,
14* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15* See the License for the specific language governing permissions and
16* limitations under the License.
17*/
18
19package encoder
20
21import (
22`fmt`
23`reflect`
24`strconv`
25`unsafe`
26
27`github.com/bytedance/sonic/internal/cpu`
28`github.com/bytedance/sonic/internal/jit`
29`github.com/bytedance/sonic/internal/native/types`
30`github.com/twitchyliquid64/golang-asm/obj`
31`github.com/twitchyliquid64/golang-asm/obj/x86`
32
33`github.com/bytedance/sonic/internal/native`
34`github.com/bytedance/sonic/internal/rt`
35)
36
37/** Register Allocations
38*
39* State Registers:
40*
41* %rbx : stack base
42* %rdi : result pointer
43* %rsi : result length
44* %rdx : result capacity
45* %r12 : sp->p
46* %r13 : sp->q
47* %r14 : sp->x
48* %r15 : sp->f
49*
50* Error Registers:
51*
52* %r10 : error type register
53* %r11 : error pointer register
54*/
55
56/** Function Prototype & Stack Map
57*
58* func (buf *[]byte, p unsafe.Pointer, sb *_Stack, fv uint64) (err error)
59*
60* buf : (FP)
61* p : 8(FP)
62* sb : 16(FP)
63* fv : 24(FP)
64* err.vt : 32(FP)
65* err.vp : 40(FP)
66*/
67
68const (
69_S_cond = iota
70_S_init
71)
72
73const (
74_FP_args = 48 // 48 bytes for passing arguments to this function
75_FP_fargs = 64 // 64 bytes for passing arguments to other Go functions
76_FP_saves = 64 // 64 bytes for saving the registers before CALL instructions
77_FP_locals = 24 // 24 bytes for local variables
78)
79
80const (
81_FP_offs = _FP_fargs + _FP_saves + _FP_locals
82_FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer
83_FP_base = _FP_size + 8 // 8 bytes for the return address
84)
85
86const (
87_FM_exp32 = 0x7f800000
88_FM_exp64 = 0x7ff0000000000000
89)
90
91const (
92_IM_null = 0x6c6c756e // 'null'
93_IM_true = 0x65757274 // 'true'
94_IM_fals = 0x736c6166 // 'fals' ('false' without the 'e')
95_IM_open = 0x00225c22 // '"\"∅'
96_IM_array = 0x5d5b // '[]'
97_IM_object = 0x7d7b // '{}'
98_IM_mulv = -0x5555555555555555
99)
100
101const (
102_LB_more_space = "_more_space"
103_LB_more_space_return = "_more_space_return_"
104)
105
106const (
107_LB_error = "_error"
108_LB_error_too_deep = "_error_too_deep"
109_LB_error_invalid_number = "_error_invalid_number"
110_LB_error_nan_or_infinite = "_error_nan_or_infinite"
111_LB_panic = "_panic"
112)
113
114var (
115_AX = jit.Reg("AX")
116_CX = jit.Reg("CX")
117_DX = jit.Reg("DX")
118_DI = jit.Reg("DI")
119_SI = jit.Reg("SI")
120_BP = jit.Reg("BP")
121_SP = jit.Reg("SP")
122_R8 = jit.Reg("R8")
123)
124
125var (
126_X0 = jit.Reg("X0")
127_Y0 = jit.Reg("Y0")
128)
129
130var (
131_ST = jit.Reg("BX")
132_RP = jit.Reg("DI")
133_RL = jit.Reg("SI")
134_RC = jit.Reg("DX")
135)
136
137var (
138_LR = jit.Reg("R9")
139_R10 = jit.Reg("R10") // used for gcWriterBarrier
140_ET = jit.Reg("R10")
141_EP = jit.Reg("R11")
142)
143
144var (
145_SP_p = jit.Reg("R12")
146_SP_q = jit.Reg("R13")
147_SP_x = jit.Reg("R14")
148_SP_f = jit.Reg("R15")
149)
150
151var (
152_ARG_rb = jit.Ptr(_SP, _FP_base)
153_ARG_vp = jit.Ptr(_SP, _FP_base + 8)
154_ARG_sb = jit.Ptr(_SP, _FP_base + 16)
155_ARG_fv = jit.Ptr(_SP, _FP_base + 24)
156)
157
158var (
159_RET_et = jit.Ptr(_SP, _FP_base + 32)
160_RET_ep = jit.Ptr(_SP, _FP_base + 40)
161)
162
163var (
164_VAR_sp = jit.Ptr(_SP, _FP_fargs + _FP_saves)
165_VAR_dn = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
166_VAR_vp = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
167)
168
169var (
170_REG_ffi = []obj.Addr{_RP, _RL, _RC}
171_REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL}
172_REG_jsr = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR}
173_REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC}
174)
175
176type _Assembler struct {
177jit.BaseAssembler
178p _Program
179x int
180name string
181}
182
183func newAssembler(p _Program) *_Assembler {
184return new(_Assembler).Init(p)
185}
186
187/** Assembler Interface **/
188func (self *_Assembler) Load() _Encoder {
189return ptoenc(self.BaseAssembler.Load("encode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
190}
191
192func (self *_Assembler) Init(p _Program) *_Assembler {
193self.p = p
194self.BaseAssembler.Init(self.compile)
195return self
196}
197
198func (self *_Assembler) compile() {
199self.prologue()
200self.instrs()
201self.epilogue()
202self.builtins()
203}
204
205/** Assembler Stages **/
206
207var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
208_OP_null : (*_Assembler)._asm_OP_null,
209_OP_empty_arr : (*_Assembler)._asm_OP_empty_arr,
210_OP_empty_obj : (*_Assembler)._asm_OP_empty_obj,
211_OP_bool : (*_Assembler)._asm_OP_bool,
212_OP_i8 : (*_Assembler)._asm_OP_i8,
213_OP_i16 : (*_Assembler)._asm_OP_i16,
214_OP_i32 : (*_Assembler)._asm_OP_i32,
215_OP_i64 : (*_Assembler)._asm_OP_i64,
216_OP_u8 : (*_Assembler)._asm_OP_u8,
217_OP_u16 : (*_Assembler)._asm_OP_u16,
218_OP_u32 : (*_Assembler)._asm_OP_u32,
219_OP_u64 : (*_Assembler)._asm_OP_u64,
220_OP_f32 : (*_Assembler)._asm_OP_f32,
221_OP_f64 : (*_Assembler)._asm_OP_f64,
222_OP_str : (*_Assembler)._asm_OP_str,
223_OP_bin : (*_Assembler)._asm_OP_bin,
224_OP_quote : (*_Assembler)._asm_OP_quote,
225_OP_number : (*_Assembler)._asm_OP_number,
226_OP_eface : (*_Assembler)._asm_OP_eface,
227_OP_iface : (*_Assembler)._asm_OP_iface,
228_OP_byte : (*_Assembler)._asm_OP_byte,
229_OP_text : (*_Assembler)._asm_OP_text,
230_OP_deref : (*_Assembler)._asm_OP_deref,
231_OP_index : (*_Assembler)._asm_OP_index,
232_OP_load : (*_Assembler)._asm_OP_load,
233_OP_save : (*_Assembler)._asm_OP_save,
234_OP_drop : (*_Assembler)._asm_OP_drop,
235_OP_drop_2 : (*_Assembler)._asm_OP_drop_2,
236_OP_recurse : (*_Assembler)._asm_OP_recurse,
237_OP_is_nil : (*_Assembler)._asm_OP_is_nil,
238_OP_is_nil_p1 : (*_Assembler)._asm_OP_is_nil_p1,
239_OP_is_zero_1 : (*_Assembler)._asm_OP_is_zero_1,
240_OP_is_zero_2 : (*_Assembler)._asm_OP_is_zero_2,
241_OP_is_zero_4 : (*_Assembler)._asm_OP_is_zero_4,
242_OP_is_zero_8 : (*_Assembler)._asm_OP_is_zero_8,
243_OP_is_zero_map : (*_Assembler)._asm_OP_is_zero_map,
244_OP_goto : (*_Assembler)._asm_OP_goto,
245_OP_map_iter : (*_Assembler)._asm_OP_map_iter,
246_OP_map_stop : (*_Assembler)._asm_OP_map_stop,
247_OP_map_check_key : (*_Assembler)._asm_OP_map_check_key,
248_OP_map_write_key : (*_Assembler)._asm_OP_map_write_key,
249_OP_map_value_next : (*_Assembler)._asm_OP_map_value_next,
250_OP_slice_len : (*_Assembler)._asm_OP_slice_len,
251_OP_slice_next : (*_Assembler)._asm_OP_slice_next,
252_OP_marshal : (*_Assembler)._asm_OP_marshal,
253_OP_marshal_p : (*_Assembler)._asm_OP_marshal_p,
254_OP_marshal_text : (*_Assembler)._asm_OP_marshal_text,
255_OP_marshal_text_p : (*_Assembler)._asm_OP_marshal_text_p,
256_OP_cond_set : (*_Assembler)._asm_OP_cond_set,
257_OP_cond_testc : (*_Assembler)._asm_OP_cond_testc,
258}
259
260func (self *_Assembler) instr(v *_Instr) {
261if fn := _OpFuncTab[v.op()]; fn != nil {
262fn(self, v)
263} else {
264panic(fmt.Sprintf("invalid opcode: %d", v.op()))
265}
266}
267
268func (self *_Assembler) instrs() {
269for i, v := range self.p {
270self.Mark(i)
271self.instr(&v)
272self.debug_instr(i, &v)
273}
274}
275
276func (self *_Assembler) builtins() {
277self.more_space()
278self.error_too_deep()
279self.error_invalid_number()
280self.error_nan_or_infinite()
281self.go_panic()
282}
283
284func (self *_Assembler) epilogue() {
285self.Mark(len(self.p))
286self.Emit("XORL", _ET, _ET)
287self.Emit("XORL", _EP, _EP)
288self.Link(_LB_error)
289self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX
290self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX)
291self.Emit("MOVQ", _ET, _RET_et) // MOVQ ET, et<>+24(FP)
292self.Emit("MOVQ", _EP, _RET_ep) // MOVQ EP, ep<>+32(FP)
293self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP
294self.Emit("ADDQ", jit.Imm(_FP_size), _SP) // ADDQ $_FP_size, SP
295self.Emit("RET") // RET
296}
297
298func (self *_Assembler) prologue() {
299self.Emit("SUBQ", jit.Imm(_FP_size), _SP) // SUBQ $_FP_size, SP
300self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP)
301self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP
302self.load_buffer() // LOAD {buf}
303self.Emit("MOVQ", _ARG_vp, _SP_p) // MOVQ vp<>+8(FP), SP.p
304self.Emit("MOVQ", _ARG_sb, _ST) // MOVQ sb<>+16(FP), ST
305self.Emit("XORL", _SP_x, _SP_x) // XORL SP.x, SP.x
306self.Emit("XORL", _SP_f, _SP_f) // XORL SP.f, SP.f
307self.Emit("XORL", _SP_q, _SP_q) // XORL SP.q, SP.q
308}
309
310/** Assembler Inline Functions **/
311
312func (self *_Assembler) xsave(reg ...obj.Addr) {
313for i, v := range reg {
314if i > _FP_saves / 8 - 1 {
315panic("too many registers to save")
316} else {
317self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
318}
319}
320}
321
322func (self *_Assembler) xload(reg ...obj.Addr) {
323for i, v := range reg {
324if i > _FP_saves / 8 - 1 {
325panic("too many registers to load")
326} else {
327self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
328}
329}
330}
331
332func (self *_Assembler) rbuf_di() {
333if _RP.Reg != x86.REG_DI {
334panic("register allocation messed up: RP != DI")
335} else {
336self.Emit("ADDQ", _RL, _RP)
337}
338}
339
340func (self *_Assembler) store_int(nd int, fn obj.Addr, ins string) {
341self.check_size(nd)
342self.save_c() // SAVE $C_regs
343self.rbuf_di() // MOVQ RP, DI
344self.Emit(ins, jit.Ptr(_SP_p, 0), _SI) // $ins (SP.p), SI
345self.call_c(fn) // CALL_C $fn
346self.Emit("ADDQ", _AX, _RL) // ADDQ AX, RL
347}
348
349func (self *_Assembler) store_str(s string) {
350i := 0
351m := rt.Str2Mem(s)
352
353/* 8-byte stores */
354for i <= len(m) - 8 {
355self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX) // MOVQ $s[i:], AX
356self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i))) // MOVQ AX, i(RP)(RL)
357i += 8
358}
359
360/* 4-byte stores */
361if i <= len(m) - 4 {
362self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVL $s[i:], i(RP)(RL)
363i += 4
364}
365
366/* 2-byte stores */
367if i <= len(m) - 2 {
368self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVW $s[i:], i(RP)(RL)
369i += 2
370}
371
372/* last byte */
373if i < len(m) {
374self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i))) // MOVB $s[i:], i(RP)(RL)
375}
376}
377
378func (self *_Assembler) check_size(n int) {
379self.check_size_rl(jit.Ptr(_RL, int64(n)))
380}
381
382func (self *_Assembler) check_size_r(r obj.Addr, d int) {
383self.check_size_rl(jit.Sib(_RL, r, 1, int64(d)))
384}
385
386func (self *_Assembler) check_size_rl(v obj.Addr) {
387idx := self.x
388key := _LB_more_space_return + strconv.Itoa(idx)
389
390/* the following code relies on LR == R9 to work */
391if _LR.Reg != x86.REG_R9 {
392panic("register allocation messed up: LR != R9")
393}
394
395/* check for buffer capacity */
396self.x++
397self.Emit("LEAQ", v, _AX) // LEAQ $v, AX
398self.Emit("CMPQ", _AX, _RC) // CMPQ AX, RC
399self.Sjmp("JBE" , key) // JBE _more_space_return_{n}
400self.slice_grow_ax(key) // GROW $key
401self.Link(key) // _more_space_return_{n}:
402}
403
404func (self *_Assembler) slice_grow_ax(ret string) {
405self.Byte(0x4c, 0x8d, 0x0d) // LEAQ ?(PC), R9
406self.Sref(ret, 4) // .... &ret
407self.Sjmp("JMP" , _LB_more_space) // JMP _more_space
408}
409
410/** State Stack Helpers **/
411
412const (
413_StateSize = int64(unsafe.Sizeof(_State{}))
414_StackLimit = _MaxStack * _StateSize
415)
416
417func (self *_Assembler) save_state() {
418self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX) // MOVQ (ST), CX
419self.Emit("LEAQ", jit.Ptr(_CX, _StateSize), _R8) // LEAQ _StateSize(CX), R8
420self.Emit("CMPQ", _R8, jit.Imm(_StackLimit)) // CMPQ R8, $_StackLimit
421self.Sjmp("JAE" , _LB_error_too_deep) // JA _error_too_deep
422self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8)) // MOVQ SP.x, 8(ST)(CX)
423self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16)) // MOVQ SP.f, 16(ST)(CX)
424self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24)) // MOVQ SP.p, 24(ST)(CX)
425self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32)) // MOVQ SP.q, 32(ST)(CX)
426self.Emit("MOVQ", _R8, jit.Ptr(_ST, 0)) // MOVQ R8, (ST)
427}
428
429func (self *_Assembler) drop_state(decr int64) {
430self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
431self.Emit("SUBQ" , jit.Imm(decr), _AX) // SUBQ $decr, AX
432self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST)
433self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _SP_x) // MOVQ 8(ST)(AX), SP.x
434self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 16), _SP_f) // MOVQ 16(ST)(AX), SP.f
435self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 24), _SP_p) // MOVQ 24(ST)(AX), SP.p
436self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 32), _SP_q) // MOVQ 32(ST)(AX), SP.q
437self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
438self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX)
439self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24)) // MOVOU X0, 24(ST)(AX)
440}
441
442/** Buffer Helpers **/
443
444func (self *_Assembler) add_char(ch byte) {
445self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVB $ch, (RP)(RL)
446self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL
447}
448
449func (self *_Assembler) add_long(ch uint32, n int64) {
450self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVL $ch, (RP)(RL)
451self.Emit("ADDQ", jit.Imm(n), _RL) // ADDQ $n, RL
452}
453
454func (self *_Assembler) add_text(ss string) {
455self.store_str(ss) // TEXT $ss
456self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL) // ADDQ ${len(ss)}, RL
457}
458
459func (self *_Assembler) prep_buffer() {
460self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX
461self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX)
462self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
463}
464
465func (self *_Assembler) prep_buffer_c() {
466self.Emit("MOVQ", _ARG_rb, _DI) // MOVQ rb<>+0(FP), DI
467self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8)) // MOVQ RL, 8(DI)
468}
469
470func (self *_Assembler) save_buffer() {
471self.Emit("MOVQ", _ARG_rb, _CX) // MOVQ rb<>+0(FP), CX
472self.Emit("MOVQ", _RP, jit.Ptr(_CX, 0)) // MOVQ RP, (CX)
473self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8)) // MOVQ RL, 8(CX)
474self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16)) // MOVQ RC, 16(CX)
475}
476
477func (self *_Assembler) load_buffer() {
478self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX
479self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP) // MOVQ (AX), RP
480self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL) // MOVQ 8(AX), RL
481self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), RC
482}
483
484/** Function Interface Helpers **/
485
486func (self *_Assembler) call(pc obj.Addr) {
487self.Emit("MOVQ", pc, _AX) // MOVQ $pc, AX
488self.Rjmp("CALL", _AX) // CALL AX
489}
490
491func (self *_Assembler) save_c() {
492self.xsave(_REG_ffi...) // SAVE $REG_ffi
493}
494
495func (self *_Assembler) call_c(pc obj.Addr) {
496self.call(pc) // CALL $pc
497self.xload(_REG_ffi...) // LOAD $REG_ffi
498}
499
500func (self *_Assembler) call_go(pc obj.Addr) {
501self.xsave(_REG_all...) // SAVE $REG_all
502self.call(pc) // CALL $pc
503self.xload(_REG_all...) // LOAD $REG_all
504}
505
506func (self *_Assembler) call_encoder(pc obj.Addr) {
507self.xsave(_REG_enc...) // SAVE $REG_enc
508self.call(pc) // CALL $pc
509self.xload(_REG_enc...) // LOAD $REG_enc
510self.load_buffer() // LOAD {buf}
511}
512
513func (self *_Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) {
514switch vt.Kind() {
515case reflect.Interface : self.call_marshaler_i(fn, it)
516case reflect.Ptr, reflect.Map: self.call_marshaler_v(fn, it, vt, true)
517// struct/array of 1 direct iface type can be direct
518default : self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect())
519}
520}
521
522func (self *_Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) {
523self.Emit("MOVQ" , jit.Gtype(it), _AX) // MOVQ $it, AX
524self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
525self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
526self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX) // MOVQ 8(SP.p), CX
527self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
528self.Sjmp("JZ" , "_null_{n}") // JZ _null_{n}
529self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
530self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 16)) // MOVQ CX, 16(SP)
531self.call_go(_F_assertI2I) // CALL_GO assertI2I
532self.prep_buffer() // MOVE {buf}, (SP)
533self.Emit("MOVOU", jit.Ptr(_SP, 24), _X0) // MOVOU 24(SP), X0
534self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP)
535self.Emit("MOVQ", _ARG_fv, _CX) // MOVQ ARG.fv, CX
536self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24)) // MOVQ CX, 24(SP)
537self.call_encoder(fn) // CALL $fn
538self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
539self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
540self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
541self.Sjmp("JNZ" , _LB_error) // JNZ _error
542self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n}
543self.Link("_null_{n}") // _null_{n}:
544self.check_size(4) // SIZE $4
545self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1)
546self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL
547self.Link("_done_{n}") // _done_{n}:
548}
549
550func (self *_Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) {
551self.prep_buffer() // MOVE {buf}, (SP)
552self.Emit("MOVQ", jit.Itab(it, vt), _AX) // MOVQ $(itab(it, vt)), AX
553self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
554
555/* dereference the pointer if needed */
556if !deref {
557self.Emit("MOVQ", _SP_p, jit.Ptr(_SP, 16)) // MOVQ SP.p, 16(SP)
558} else {
559self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
560self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
561}
562
563/* call the encoder, and perform error checks */
564self.Emit("MOVQ", _ARG_fv, _CX) // MOVQ ARG.fv, CX
565self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24)) // MOVQ CX, 24(SP)
566self.call_encoder(fn) // CALL $fn
567self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
568self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
569self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
570self.Sjmp("JNZ" , _LB_error) // JNZ _error
571}
572
573/** Builtin: _more_space **/
574
575var (
576_T_byte = jit.Type(byteType)
577_F_growslice = jit.Func(growslice)
578)
579
580func (self *_Assembler) more_space() {
581self.Link(_LB_more_space)
582self.Emit("MOVQ", _T_byte, _AX) // MOVQ $_T_byte, _AX
583self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ _AX, (SP)
584self.Emit("MOVQ", _RP, jit.Ptr(_SP, 8)) // MOVQ RP, 8(SP)
585self.Emit("MOVQ", _RL, jit.Ptr(_SP, 16)) // MOVQ RL, 16(SP)
586self.Emit("MOVQ", _RC, jit.Ptr(_SP, 24)) // MOVQ RC, 24(SP)
587self.Emit("MOVQ", _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP)
588self.xsave(_REG_jsr...) // SAVE $REG_jsr
589self.call(_F_growslice) // CALL $pc
590self.xload(_REG_jsr...) // LOAD $REG_jsr
591self.Emit("MOVQ", jit.Ptr(_SP, 40), _RP) // MOVQ 40(SP), RP
592self.Emit("MOVQ", jit.Ptr(_SP, 48), _RL) // MOVQ 48(SP), RL
593self.Emit("MOVQ", jit.Ptr(_SP, 56), _RC) // MOVQ 56(SP), RC
594self.save_buffer() // SAVE {buf}
595self.Rjmp("JMP" , _LR) // JMP LR
596}
597
598/** Builtin Errors **/
599
600var (
601_V_ERR_too_deep = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_too_deep))))
602_V_ERR_nan_or_infinite = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_nan_or_infinite))))
603_I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(errorType), jsonUnsupportedValueType)
604)
605
606func (self *_Assembler) error_too_deep() {
607self.Link(_LB_error_too_deep)
608self.Emit("MOVQ", _V_ERR_too_deep, _EP) // MOVQ $_V_ERR_too_deep, EP
609self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET
610self.Sjmp("JMP" , _LB_error) // JMP _error
611}
612
613func (self *_Assembler) error_invalid_number() {
614self.Link(_LB_error_invalid_number)
615self.call_go(_F_error_number) // CALL_GO error_number
616self.Emit("MOVQ", jit.Ptr(_SP, 16), _ET) // MOVQ 16(SP), ET
617self.Emit("MOVQ", jit.Ptr(_SP, 24), _EP) // MOVQ 24(SP), EP
618self.Sjmp("JMP" , _LB_error) // JMP _error
619}
620
621func (self *_Assembler) error_nan_or_infinite() {
622self.Link(_LB_error_nan_or_infinite)
623self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP) // MOVQ $_V_ERR_nan_or_infinite, EP
624self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET
625self.Sjmp("JMP" , _LB_error) // JMP _error
626}
627
628/** String Encoding Routine **/
629
630var (
631_F_quote = jit.Imm(int64(native.S_quote))
632_F_panic = jit.Func(goPanic)
633)
634
635func (self *_Assembler) go_panic() {
636self.Link(_LB_panic)
637self.Emit("MOVQ", _SP_p, jit.Ptr(_SP, 8))
638self.call_go(_F_panic)
639}
640
641func (self *_Assembler) encode_string(doubleQuote bool) {
642self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX
643self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
644self.Sjmp("JZ" , "_str_empty_{n}") // JZ _str_empty_{n}
645self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
646self.Sjmp("JNE" , "_str_next_{n}")
647self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), jit.Ptr(_SP, 0))
648self.Sjmp("JMP", _LB_panic)
649self.Link("_str_next_{n}")
650
651/* openning quote, check for double quote */
652if !doubleQuote {
653self.check_size_r(_AX, 2) // SIZE $2
654self.add_char('"') // CHAR $'"'
655} else {
656self.check_size_r(_AX, 6) // SIZE $6
657self.add_long(_IM_open, 3) // TEXT $`"\"`
658}
659
660/* quoting loop */
661self.Emit("XORL", _AX, _AX) // XORL AX, AX
662self.Emit("MOVQ", _AX, _VAR_sp) // MOVQ AX, sp
663self.Link("_str_loop_{n}") // _str_loop_{n}:
664self.save_c() // SAVE $REG_ffi
665
666/* load the output buffer first, and then input buffer,
667* because the parameter registers collide with RP / RL / RC */
668self.Emit("MOVQ", _RC, _CX) // MOVQ RC, CX
669self.Emit("SUBQ", _RL, _CX) // SUBQ RL, CX
670self.Emit("MOVQ", _CX, _VAR_dn) // MOVQ CX, dn
671self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX) // LEAQ (RP)(RL), DX
672self.Emit("LEAQ", _VAR_dn, _CX) // LEAQ dn, CX
673self.Emit("MOVQ", _VAR_sp, _AX) // MOVQ sp, AX
674self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI) // MOVQ (SP.p), DI
675self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI) // MOVQ 8(SP.p), SI
676self.Emit("ADDQ", _AX, _DI) // ADDQ AX, DI
677self.Emit("SUBQ", _AX, _SI) // SUBQ AX, SI
678
679/* set the flags based on `doubleQuote` */
680if !doubleQuote {
681self.Emit("XORL", _R8, _R8) // XORL R8, R8
682} else {
683self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8
684}
685
686/* call the native quoter */
687self.call_c(_F_quote) // CALL quote
688self.Emit("ADDQ" , _VAR_dn, _RL) // ADDQ dn, RL
689self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
690self.Sjmp("JS" , "_str_space_{n}") // JS _str_space_{n}
691
692/* close the string, check for double quote */
693if !doubleQuote {
694self.check_size(1) // SIZE $1
695self.add_char('"') // CHAR $'"'
696self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n}
697} else {
698self.check_size(3) // SIZE $3
699self.add_text("\\\"\"") // TEXT $'\""'
700self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n}
701}
702
703/* not enough space to contain the quoted string */
704self.Link("_str_space_{n}") // _str_space_{n}:
705self.Emit("NOTQ", _AX) // NOTQ AX
706self.Emit("ADDQ", _AX, _VAR_sp) // ADDQ AX, sp
707self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX) // LEAQ (RC)(RC), AX
708self.slice_grow_ax("_str_loop_{n}") // GROW _str_loop_{n}
709
710/* empty string, check for double quote */
711if !doubleQuote {
712self.Link("_str_empty_{n}") // _str_empty_{n}:
713self.check_size(2) // SIZE $2
714self.add_text("\"\"") // TEXT $'""'
715self.Link("_str_end_{n}") // _str_end_{n}:
716} else {
717self.Link("_str_empty_{n}") // _str_empty_{n}:
718self.check_size(6) // SIZE $6
719self.add_text("\"\\\"\\\"\"") // TEXT $'"\"\""'
720self.Link("_str_end_{n}") // _str_end_{n}:
721}
722}
723
724/** OpCode Assembler Functions **/
725
726var (
727_T_json_Marshaler = rt.UnpackType(jsonMarshalerType)
728_T_encoding_TextMarshaler = rt.UnpackType(encodingTextMarshalerType)
729)
730
731var (
732_F_f64toa = jit.Imm(int64(native.S_f64toa))
733_F_f32toa = jit.Imm(int64(native.S_f32toa))
734_F_i64toa = jit.Imm(int64(native.S_i64toa))
735_F_u64toa = jit.Imm(int64(native.S_u64toa))
736_F_b64encode = jit.Imm(int64(_subr__b64encode))
737)
738
739var (
740_F_memmove = jit.Func(memmove)
741_F_error_number = jit.Func(error_number)
742_F_isValidNumber = jit.Func(isValidNumber)
743)
744
745var (
746_F_iteratorStop = jit.Func(iteratorStop)
747_F_iteratorNext = jit.Func(iteratorNext)
748_F_iteratorStart = jit.Func(iteratorStart)
749)
750
751var (
752_F_encodeTypedPointer obj.Addr
753_F_encodeJsonMarshaler obj.Addr
754_F_encodeTextMarshaler obj.Addr
755)
756
757const (
758_MODE_AVX2 = 1 << 2
759)
760
761func init() {
762_F_encodeTypedPointer = jit.Func(encodeTypedPointer)
763_F_encodeJsonMarshaler = jit.Func(encodeJsonMarshaler)
764_F_encodeTextMarshaler = jit.Func(encodeTextMarshaler)
765}
766
767func (self *_Assembler) _asm_OP_null(_ *_Instr) {
768self.check_size(4)
769self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1)
770self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL
771}
772
773func (self *_Assembler) _asm_OP_empty_arr(_ *_Instr) {
774self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
775self.Sjmp("JC", "_empty_arr_{n}")
776self._asm_OP_null(nil)
777self.Sjmp("JMP", "_empty_arr_end_{n}")
778self.Link("_empty_arr_{n}")
779self.check_size(2)
780self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0))
781self.Emit("ADDQ", jit.Imm(2), _RL)
782self.Link("_empty_arr_end_{n}")
783}
784
785func (self *_Assembler) _asm_OP_empty_obj(_ *_Instr) {
786self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
787self.Sjmp("JC", "_empty_obj_{n}")
788self._asm_OP_null(nil)
789self.Sjmp("JMP", "_empty_obj_end_{n}")
790self.Link("_empty_obj_{n}")
791self.check_size(2)
792self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0))
793self.Emit("ADDQ", jit.Imm(2), _RL)
794self.Link("_empty_obj_end_{n}")
795}
796
797func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
798self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0
799self.Sjmp("JE" , "_false_{n}") // JE _false_{n}
800self.check_size(4) // SIZE $4
801self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'true', (RP)(RL*1)
802self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL
803self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n}
804self.Link("_false_{n}") // _false_{n}:
805self.check_size(5) // SIZE $5
806self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'fals', (RP)(RL*1)
807self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4)) // MOVB $'e', 4(RP)(RL*1)
808self.Emit("ADDQ", jit.Imm(5), _RL) // ADDQ $5, RL
809self.Link("_end_{n}") // _end_{n}:
810}
811
812func (self *_Assembler) _asm_OP_i8(_ *_Instr) {
813self.store_int(4, _F_i64toa, "MOVBQSX")
814}
815
816func (self *_Assembler) _asm_OP_i16(_ *_Instr) {
817self.store_int(6, _F_i64toa, "MOVWQSX")
818}
819
820func (self *_Assembler) _asm_OP_i32(_ *_Instr) {
821self.store_int(17, _F_i64toa, "MOVLQSX")
822}
823
824func (self *_Assembler) _asm_OP_i64(_ *_Instr) {
825self.store_int(21, _F_i64toa, "MOVQ")
826}
827
828func (self *_Assembler) _asm_OP_u8(_ *_Instr) {
829self.store_int(3, _F_u64toa, "MOVBQZX")
830}
831
832func (self *_Assembler) _asm_OP_u16(_ *_Instr) {
833self.store_int(5, _F_u64toa, "MOVWQZX")
834}
835
836func (self *_Assembler) _asm_OP_u32(_ *_Instr) {
837self.store_int(16, _F_u64toa, "MOVLQZX")
838}
839
840func (self *_Assembler) _asm_OP_u64(_ *_Instr) {
841self.store_int(20, _F_u64toa, "MOVQ")
842}
843
844func (self *_Assembler) _asm_OP_f32(_ *_Instr) {
845self.check_size(32)
846self.Emit("MOVL" , jit.Ptr(_SP_p, 0), _AX) // MOVL (SP.p), AX
847self.Emit("ANDL" , jit.Imm(_FM_exp32), _AX) // ANDL $_FM_exp32, AX
848self.Emit("XORL" , jit.Imm(_FM_exp32), _AX) // XORL $_FM_exp32, AX
849self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite
850self.save_c() // SAVE $C_regs
851self.rbuf_di() // MOVQ RP, DI
852self.Emit("MOVSS" , jit.Ptr(_SP_p, 0), _X0) // MOVSS (SP.p), X0
853self.call_c(_F_f32toa) // CALL_C f64toa
854self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL
855}
856
857func (self *_Assembler) _asm_OP_f64(_ *_Instr) {
858self.check_size(32)
859self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
860self.Emit("MOVQ" , jit.Imm(_FM_exp64), _CX) // MOVQ $_FM_exp64, CX
861self.Emit("ANDQ" , _CX, _AX) // ANDQ CX, AX
862self.Emit("XORQ" , _CX, _AX) // XORQ CX, AX
863self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite
864self.save_c() // SAVE $C_regs
865self.rbuf_di() // MOVQ RP, DI
866self.Emit("MOVSD" , jit.Ptr(_SP_p, 0), _X0) // MOVSD (SP.p), X0
867self.call_c(_F_f64toa) // CALL_C f64toa
868self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL
869}
870
871func (self *_Assembler) _asm_OP_str(_ *_Instr) {
872self.encode_string(false)
873}
874
875func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
876self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX
877self.Emit("ADDQ", jit.Imm(2), _AX) // ADDQ $2, AX
878self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX) // MOVQ $_MF_mulv, CX
879self.Emit("MOVQ", _DX, _R8) // MOVQ DX, R8
880self.From("MULQ", _CX) // MULQ CX
881self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX) // LEAQ 1(DX)(DX), AX
882self.Emit("ORQ" , jit.Imm(2), _AX) // ORQ $2, AX
883self.Emit("MOVQ", _R8, _DX) // MOVQ R8, DX
884self.check_size_r(_AX, 0) // SIZE AX
885self.add_char('"') // CHAR $'"'
886self.save_c() // SAVE $REG_ffi
887self.prep_buffer_c() // MOVE {buf}, DI
888self.Emit("MOVQ", _SP_p, _SI) // MOVQ SP.p, SI
889
890/* check for AVX2 support */
891if !cpu.HasAVX2 {
892self.Emit("XORL", _DX, _DX) // XORL DX, DX
893} else {
894self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX) // MOVL $_MODE_AVX2, DX
895}
896
897/* call the encoder */
898self.call_c(_F_b64encode) // CALL b64encode
899self.load_buffer() // LOAD {buf}
900self.add_char('"') // CHAR $'"'
901}
902
903func (self *_Assembler) _asm_OP_quote(_ *_Instr) {
904self.encode_string(true)
905}
906
907func (self *_Assembler) _asm_OP_number(_ *_Instr) {
908self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX) // MOVQ (SP.p), CX
909self.Emit("TESTQ", _CX, _CX) // TESTQ CX, CX
910self.Sjmp("JZ" , "_empty_{n}") // JZ _empty_{n}
911self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
912self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
913self.Sjmp("JNZ" , "_number_next_{n}")
914self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), jit.Ptr(_SP, 0))
915self.Sjmp("JMP", _LB_panic)
916self.Link("_number_next_{n}")
917self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
918self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
919self.call_go(_F_isValidNumber) // CALL_GO isValidNumber
920self.Emit("CMPB" , jit.Ptr(_SP, 16), jit.Imm(0)) // CMPB 16(SP), $0
921self.Sjmp("JE" , _LB_error_invalid_number) // JE _error_invalid_number
922self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX
923self.check_size_r(_AX, 0) // SIZE AX
924self.Emit("LEAQ" , jit.Sib(_RP, _RL, 1, 0), _AX) // LEAQ (RP)(RL), AX
925self.Emit("ADDQ" , jit.Ptr(_SP_p, 8), _RL) // ADDQ 8(SP.p), RL
926self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
927self.Emit("MOVOU", jit.Ptr(_SP_p, 0), _X0) // MOVOU (SP.p), X0
928self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP)
929self.call_go(_F_memmove) // CALL_GO memmove
930self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n}
931self.Link("_empty_{n}") // _empty_{n}:
932self.check_size(1) // SIZE $1
933self.add_char('0') // CHAR $'0'
934self.Link("_done_{n}") // _done_{n}:
935}
936
937func (self *_Assembler) _asm_OP_eface(_ *_Instr) {
938self.prep_buffer() // MOVE {buf}, (SP)s
939self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
940self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
941self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _AX) // LEAQ 8(SP.p), AX
942self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
943self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24)) // MOVQ ST, 24(SP)
944self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX
945self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP)
946self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer
947self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET) // MOVQ 40(SP), ET
948self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP) // MOVQ 48(SP), EP
949self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
950self.Sjmp("JNZ" , _LB_error) // JNZ _error
951}
952
953func (self *_Assembler) _asm_OP_iface(_ *_Instr) {
954self.prep_buffer() // MOVE {buf}, (SP)
955self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
956self.Emit("MOVQ" , jit.Ptr(_AX, 8), _AX) // MOVQ 8(AX), AX
957self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
958self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _AX) // LEAQ 8(SP.p), AX
959self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
960self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24)) // MOVQ ST, 24(SP)
961self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX
962self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP)
963self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer
964self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET) // MOVQ 40(SP), ET
965self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP) // MOVQ 48(SP), EP
966self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
967self.Sjmp("JNZ" , _LB_error) // JNZ _error
968}
969
970func (self *_Assembler) _asm_OP_byte(p *_Instr) {
971self.check_size(1)
972self.Emit("MOVB", jit.Imm(p.i64()), jit.Sib(_RP, _RL, 1, 0)) // MOVL p.vi(), (RP)(RL*1)
973self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL
974}
975
976func (self *_Assembler) _asm_OP_text(p *_Instr) {
977self.check_size(len(p.vs())) // SIZE ${len(p.vs())}
978self.add_text(p.vs()) // TEXT ${p.vs()}
979}
980
981func (self *_Assembler) _asm_OP_deref(_ *_Instr) {
982self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p
983}
984
985func (self *_Assembler) _asm_OP_index(p *_Instr) {
986self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ $p.vi(), AX
987self.Emit("ADDQ", _AX, _SP_p) // ADDQ AX, SP.p
988}
989
990func (self *_Assembler) _asm_OP_load(_ *_Instr) {
991self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
992self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x) // MOVQ -24(ST)(AX), SP.x
993self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p) // MOVQ -8(ST)(AX), SP.p
994self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q) // MOVQ (ST)(AX), SP.q
995}
996
997func (self *_Assembler) _asm_OP_save(_ *_Instr) {
998self.save_state()
999}
1000
1001func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
1002self.drop_state(_StateSize)
1003}
1004
1005func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
1006self.drop_state(_StateSize * 2) // DROP $(_StateSize * 2)
1007self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56)) // MOVOU X0, 56(ST)(AX)
1008}
1009
1010func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
1011self.prep_buffer() // MOVE {buf}, (SP)
1012vt, pv := p.vp()
1013self.Emit("MOVQ", jit.Type(vt), _AX) // MOVQ $(type(p.vt())), AX
1014self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
1015
1016/* check for indirection */
1017if !rt.UnpackType(vt).Indirect() {
1018self.Emit("MOVQ", _SP_p, _AX) // MOVQ SP.p, AX
1019} else {
1020self.Emit("MOVQ", _SP_p, _VAR_vp) // MOVQ SP.p, 48(SP)
1021self.Emit("LEAQ", _VAR_vp, _AX) // LEAQ 48(SP), AX
1022}
1023
1024/* call the encoder */
1025self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
1026self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24)) // MOVQ ST, 24(SP)
1027self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX
1028if pv {
1029self.Emit("BTCQ", jit.Imm(bitPointerValue), _AX) // BTCQ $1, AX
1030}
1031self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP)
1032self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer
1033self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET) // MOVQ 40(SP), ET
1034self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP) // MOVQ 48(SP), EP
1035self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
1036self.Sjmp("JNZ" , _LB_error) // JNZ _error
1037}
1038
1039func (self *_Assembler) _asm_OP_is_nil(p *_Instr) {
1040self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0
1041self.Xjmp("JE" , p.vi()) // JE p.vi()
1042}
1043
1044func (self *_Assembler) _asm_OP_is_nil_p1(p *_Instr) {
1045self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0)) // CMPQ 8(SP.p), $0
1046self.Xjmp("JE" , p.vi()) // JE p.vi()
1047}
1048
1049func (self *_Assembler) _asm_OP_is_zero_1(p *_Instr) {
1050self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0
1051self.Xjmp("JE" , p.vi()) // JE p.vi()
1052}
1053
1054func (self *_Assembler) _asm_OP_is_zero_2(p *_Instr) {
1055self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPW (SP.p), $0
1056self.Xjmp("JE" , p.vi()) // JE p.vi()
1057}
1058
1059func (self *_Assembler) _asm_OP_is_zero_4(p *_Instr) {
1060self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPL (SP.p), $0
1061self.Xjmp("JE" , p.vi()) // JE p.vi()
1062}
1063
1064func (self *_Assembler) _asm_OP_is_zero_8(p *_Instr) {
1065self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0
1066self.Xjmp("JE" , p.vi()) // JE p.vi()
1067}
1068
1069func (self *_Assembler) _asm_OP_is_zero_map(p *_Instr) {
1070self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
1071self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
1072self.Xjmp("JZ" , p.vi()) // JZ p.vi()
1073self.Emit("CMPQ" , jit.Ptr(_AX, 0), jit.Imm(0)) // CMPQ (AX), $0
1074self.Xjmp("JE" , p.vi()) // JE p.vi()
1075}
1076
1077func (self *_Assembler) _asm_OP_goto(p *_Instr) {
1078self.Xjmp("JMP", p.vi())
1079}
1080
1081func (self *_Assembler) _asm_OP_map_iter(p *_Instr) {
1082self.Emit("MOVQ" , jit.Type(p.vt()), _AX) // MOVQ $p.vt(), AX
1083self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _CX) // MOVQ (SP.p), CX
1084self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
1085self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
1086self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX
1087self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
1088self.call_go(_F_iteratorStart) // CALL_GO iteratorStart
1089self.Emit("MOVQ" , jit.Ptr(_SP, 24), _SP_q) // MOVQ 24(SP), SP.q
1090self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
1091self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
1092self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
1093self.Sjmp("JNZ" , _LB_error) // JNZ _error
1094}
1095
1096func (self *_Assembler) _asm_OP_map_stop(_ *_Instr) {
1097self.Emit("MOVQ", _SP_q, jit.Ptr(_SP, 0)) // MOVQ SP.q, 0(SP)
1098self.call_go(_F_iteratorStop) // CALL_GO iteratorStop
1099self.Emit("XORL", _SP_q, _SP_q) // XORL SP.q, SP.q
1100}
1101
1102func (self *_Assembler) _asm_OP_map_check_key(p *_Instr) {
1103self.Emit("MOVQ" , jit.Ptr(_SP_q, 0), _SP_p) // MOVQ (SP.q), SP.p
1104self.Emit("TESTQ", _SP_p, _SP_p) // TESTQ SP.p, SP.p
1105self.Xjmp("JZ" , p.vi()) // JNZ p.vi()
1106}
1107
1108func (self *_Assembler) _asm_OP_map_write_key(p *_Instr) {
1109self.Emit("BTQ", jit.Imm(bitSortMapKeys), _ARG_fv) // BTQ ${SortMapKeys}, fv
1110self.Sjmp("JNC", "_unordered_key_{n}") // JNC _unordered_key_{n}
1111self.encode_string(false) // STR $false
1112self.Xjmp("JMP", p.vi()) // JMP ${p.vi()}
1113self.Link("_unordered_key_{n}") // _unordered_key_{n}:
1114}
1115
1116func (self *_Assembler) _asm_OP_map_value_next(_ *_Instr) {
1117self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p) // MOVQ 8(SP.q), SP.p
1118self.Emit("MOVQ", _SP_q, jit.Ptr(_SP, 0)) // MOVQ SP.q, (SP)
1119self.call_go(_F_iteratorNext) // CALL_GO iteratorNext
1120}
1121
1122func (self *_Assembler) _asm_OP_slice_len(_ *_Instr) {
1123self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _SP_x) // MOVQ 8(SP.p), SP.x
1124self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p
1125self.Emit("ORQ" , jit.Imm(1 << _S_init), _SP_f) // ORQ $(1<<_S_init), SP.f
1126}
1127
1128func (self *_Assembler) _asm_OP_slice_next(p *_Instr) {
1129self.Emit("TESTQ" , _SP_x, _SP_x) // TESTQ SP.x, SP.x
1130self.Xjmp("JZ" , p.vi()) // JZ p.vi()
1131self.Emit("SUBQ" , jit.Imm(1), _SP_x) // SUBQ $1, SP.x
1132self.Emit("BTRQ" , jit.Imm(_S_init), _SP_f) // BTRQ $_S_init, SP.f
1133self.Emit("LEAQ" , jit.Ptr(_SP_p, int64(p.vlen())), _AX) // LEAQ $(p.vlen())(SP.p), AX
1134self.Emit("CMOVQCC", _AX, _SP_p) // CMOVQNC AX, SP.p
1135}
1136
1137func (self *_Assembler) _asm_OP_marshal(p *_Instr) {
1138self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt())
1139}
1140
1141func (self *_Assembler) _asm_OP_marshal_p(p *_Instr) {
1142if p.vk() != reflect.Ptr {
1143panic("marshal_p: invalid type")
1144} else {
1145self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt(), false)
1146}
1147}
1148
1149func (self *_Assembler) _asm_OP_marshal_text(p *_Instr) {
1150self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt())
1151}
1152
1153func (self *_Assembler) _asm_OP_marshal_text_p(p *_Instr) {
1154if p.vk() != reflect.Ptr {
1155panic("marshal_text_p: invalid type")
1156} else {
1157self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt(), false)
1158}
1159}
1160
1161func (self *_Assembler) _asm_OP_cond_set(_ *_Instr) {
1162self.Emit("ORQ", jit.Imm(1 << _S_cond), _SP_f) // ORQ $(1<<_S_cond), SP.f
1163}
1164
1165func (self *_Assembler) _asm_OP_cond_testc(p *_Instr) {
1166self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f) // BTRQ $_S_cond, SP.f
1167self.Xjmp("JC" , p.vi())
1168}
1169
1170func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
1171self.Emit("MOVQ", jit.Imm(int64(p2.op())), jit.Ptr(_SP, 16))// MOVQ $(p2.op()), 16(SP)
1172self.Emit("MOVQ", jit.Imm(int64(p1.op())), jit.Ptr(_SP, 8)) // MOVQ $(p1.op()), 8(SP)
1173self.Emit("MOVQ", jit.Imm(int64(i)), jit.Ptr(_SP, 0)) // MOVQ $(i), (SP)
1174self.call_go(_F_println)
1175}
1176