podman
1175 строк · 45.3 Кб
1//go:build go1.17 && !go1.22
2// +build go1.17,!go1.22
3
4/*
5* Copyright 2021 ByteDance Inc.
6*
7* Licensed under the Apache License, Version 2.0 (the "License");
8* you may not use this file except in compliance with the License.
9* You may obtain a copy of the License at
10*
11* http://www.apache.org/licenses/LICENSE-2.0
12*
13* Unless required by applicable law or agreed to in writing, software
14* distributed under the License is distributed on an "AS IS" BASIS,
15* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16* See the License for the specific language governing permissions and
17* limitations under the License.
18*/
19
20package encoder21
22import (23`fmt`24`reflect`25`strconv`26`unsafe`27
28`github.com/bytedance/sonic/internal/cpu`29`github.com/bytedance/sonic/internal/jit`30`github.com/bytedance/sonic/internal/native/types`31`github.com/twitchyliquid64/golang-asm/obj`32`github.com/twitchyliquid64/golang-asm/obj/x86`33
34`github.com/bytedance/sonic/internal/native`35`github.com/bytedance/sonic/internal/rt`36)
37
38/** Register Allocations
39*
40* State Registers:
41*
42* %rbx : stack base
43* %rdi : result pointer
44* %rsi : result length
45* %rdx : result capacity
46* %r12 : sp->p
47* %r13 : sp->q
48* %r14 : sp->x
49* %r15 : sp->f
50*
51* Error Registers:
52*
53* %r10 : error type register
54* %r11 : error pointer register
55*/
56
57/** Function Prototype & Stack Map
58*
59* func (buf *[]byte, p unsafe.Pointer, sb *_Stack, fv uint64) (err error)
60*
61* buf : (FP)
62* p : 8(FP)
63* sb : 16(FP)
64* fv : 24(FP)
65* err.vt : 32(FP)
66* err.vp : 40(FP)
67*/
68
69const (70_S_cond = iota71_S_init
72)
73
74const (75_FP_args = 32 // 32 bytes for spill registers of arguments76_FP_fargs = 40 // 40 bytes for passing arguments to other Go functions77_FP_saves = 64 // 64 bytes for saving the registers before CALL instructions78_FP_locals = 24 // 24 bytes for local variables79)
80
81const (82_FP_loffs = _FP_fargs + _FP_saves83_FP_offs = _FP_loffs + _FP_locals84// _FP_offs = _FP_loffs + _FP_locals + _FP_debug85_FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer86_FP_base = _FP_size + 8 // 8 bytes for the return address87)
88
89const (90_FM_exp32 = 0x7f80000091_FM_exp64 = 0x7ff000000000000092)
93
94const (95_IM_null = 0x6c6c756e // 'null'96_IM_true = 0x65757274 // 'true'97_IM_fals = 0x736c6166 // 'fals' ('false' without the 'e')98_IM_open = 0x00225c22 // '"\"∅'99_IM_array = 0x5d5b // '[]'100_IM_object = 0x7d7b // '{}'101_IM_mulv = -0x5555555555555555102)
103
104const (105_LB_more_space = "_more_space"106_LB_more_space_return = "_more_space_return_"107)
108
109const (110_LB_error = "_error"111_LB_error_too_deep = "_error_too_deep"112_LB_error_invalid_number = "_error_invalid_number"113_LB_error_nan_or_infinite = "_error_nan_or_infinite"114_LB_panic = "_panic"115)
116
117var (118_AX = jit.Reg("AX")119_BX = jit.Reg("BX")120_CX = jit.Reg("CX")121_DX = jit.Reg("DX")122_DI = jit.Reg("DI")123_SI = jit.Reg("SI")124_BP = jit.Reg("BP")125_SP = jit.Reg("SP")126_R8 = jit.Reg("R8")127_R9 = jit.Reg("R9")128)
129
130var (131_X0 = jit.Reg("X0")132_Y0 = jit.Reg("Y0")133)
134
135var (136_ST = jit.Reg("R15") // can't use R14 since it's always scratched by Go...137_RP = jit.Reg("DI")138_RL = jit.Reg("SI")139_RC = jit.Reg("DX")140)
141
142var (143_LR = jit.Reg("R9")144_ET = jit.Reg("AX")145_EP = jit.Reg("BX")146)
147
148var (149_SP_p = jit.Reg("R10") // saved on BX when call_c150_SP_q = jit.Reg("R11") // saved on BP when call_c151_SP_x = jit.Reg("R12")152_SP_f = jit.Reg("R13")153)
154
155var (156_ARG_rb = jit.Ptr(_SP, _FP_base)157_ARG_vp = jit.Ptr(_SP, _FP_base + 8)158_ARG_sb = jit.Ptr(_SP, _FP_base + 16)159_ARG_fv = jit.Ptr(_SP, _FP_base + 24)160)
161
162var (163_RET_et = _ET164_RET_ep = _EP165)
166
167var (168_VAR_sp = jit.Ptr(_SP, _FP_fargs + _FP_saves)169_VAR_dn = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)170_VAR_vp = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)171)
172
173var (174_REG_ffi = []obj.Addr{ _RP, _RL, _RC, _SP_q}175_REG_b64 = []obj.Addr{_SP_p, _SP_q}176
177_REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC}178_REG_ms = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR}179_REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL}180)
181
182type _Assembler struct {183jit.BaseAssembler184p _Program
185x int186name string187}
188
189func newAssembler(p _Program) *_Assembler {190return new(_Assembler).Init(p)191}
192
193/** Assembler Interface **/
194
195func (self *_Assembler) Load() _Encoder {196return ptoenc(self.BaseAssembler.Load("encode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))197}
198
199func (self *_Assembler) Init(p _Program) *_Assembler {200self.p = p201self.BaseAssembler.Init(self.compile)202return self203}
204
205func (self *_Assembler) compile() {206self.prologue()207self.instrs()208self.epilogue()209self.builtins()210}
211
212/** Assembler Stages **/
213
214var _OpFuncTab = [256]func(*_Assembler, *_Instr) {215_OP_null : (*_Assembler)._asm_OP_null,216_OP_empty_arr : (*_Assembler)._asm_OP_empty_arr,217_OP_empty_obj : (*_Assembler)._asm_OP_empty_obj,218_OP_bool : (*_Assembler)._asm_OP_bool,219_OP_i8 : (*_Assembler)._asm_OP_i8,220_OP_i16 : (*_Assembler)._asm_OP_i16,221_OP_i32 : (*_Assembler)._asm_OP_i32,222_OP_i64 : (*_Assembler)._asm_OP_i64,223_OP_u8 : (*_Assembler)._asm_OP_u8,224_OP_u16 : (*_Assembler)._asm_OP_u16,225_OP_u32 : (*_Assembler)._asm_OP_u32,226_OP_u64 : (*_Assembler)._asm_OP_u64,227_OP_f32 : (*_Assembler)._asm_OP_f32,228_OP_f64 : (*_Assembler)._asm_OP_f64,229_OP_str : (*_Assembler)._asm_OP_str,230_OP_bin : (*_Assembler)._asm_OP_bin,231_OP_quote : (*_Assembler)._asm_OP_quote,232_OP_number : (*_Assembler)._asm_OP_number,233_OP_eface : (*_Assembler)._asm_OP_eface,234_OP_iface : (*_Assembler)._asm_OP_iface,235_OP_byte : (*_Assembler)._asm_OP_byte,236_OP_text : (*_Assembler)._asm_OP_text,237_OP_deref : (*_Assembler)._asm_OP_deref,238_OP_index : (*_Assembler)._asm_OP_index,239_OP_load : (*_Assembler)._asm_OP_load,240_OP_save : (*_Assembler)._asm_OP_save,241_OP_drop : (*_Assembler)._asm_OP_drop,242_OP_drop_2 : (*_Assembler)._asm_OP_drop_2,243_OP_recurse : (*_Assembler)._asm_OP_recurse,244_OP_is_nil : (*_Assembler)._asm_OP_is_nil,245_OP_is_nil_p1 : (*_Assembler)._asm_OP_is_nil_p1,246_OP_is_zero_1 : (*_Assembler)._asm_OP_is_zero_1,247_OP_is_zero_2 : (*_Assembler)._asm_OP_is_zero_2,248_OP_is_zero_4 : (*_Assembler)._asm_OP_is_zero_4,249_OP_is_zero_8 : (*_Assembler)._asm_OP_is_zero_8,250_OP_is_zero_map : (*_Assembler)._asm_OP_is_zero_map,251_OP_goto : (*_Assembler)._asm_OP_goto,252_OP_map_iter : (*_Assembler)._asm_OP_map_iter,253_OP_map_stop : (*_Assembler)._asm_OP_map_stop,254_OP_map_check_key : (*_Assembler)._asm_OP_map_check_key,255_OP_map_write_key : (*_Assembler)._asm_OP_map_write_key,256_OP_map_value_next : (*_Assembler)._asm_OP_map_value_next,257_OP_slice_len : (*_Assembler)._asm_OP_slice_len,258_OP_slice_next : (*_Assembler)._asm_OP_slice_next,259_OP_marshal : (*_Assembler)._asm_OP_marshal,260_OP_marshal_p : (*_Assembler)._asm_OP_marshal_p,261_OP_marshal_text : (*_Assembler)._asm_OP_marshal_text,262_OP_marshal_text_p : (*_Assembler)._asm_OP_marshal_text_p,263_OP_cond_set : (*_Assembler)._asm_OP_cond_set,264_OP_cond_testc : (*_Assembler)._asm_OP_cond_testc,265}
266
267func (self *_Assembler) instr(v *_Instr) {268if fn := _OpFuncTab[v.op()]; fn != nil {269fn(self, v)270} else {271panic(fmt.Sprintf("invalid opcode: %d", v.op()))272}273}
274
275func (self *_Assembler) instrs() {276for i, v := range self.p {277self.Mark(i)278self.instr(&v)279self.debug_instr(i, &v)280}281}
282
283func (self *_Assembler) builtins() {284self.more_space()285self.error_too_deep()286self.error_invalid_number()287self.error_nan_or_infinite()288self.go_panic()289}
290
291func (self *_Assembler) epilogue() {292self.Mark(len(self.p))293self.Emit("XORL", _ET, _ET)294self.Emit("XORL", _EP, _EP)295self.Link(_LB_error)296self.Emit("MOVQ", _ARG_rb, _CX) // MOVQ rb<>+0(FP), CX297self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8)) // MOVQ RL, 8(CX)298self.Emit("MOVQ", jit.Imm(0), _ARG_rb) // MOVQ AX, rb<>+0(FP)299self.Emit("MOVQ", jit.Imm(0), _ARG_vp) // MOVQ BX, vp<>+8(FP)300self.Emit("MOVQ", jit.Imm(0), _ARG_sb) // MOVQ CX, sb<>+16(FP)301self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP302self.Emit("ADDQ", jit.Imm(_FP_size), _SP) // ADDQ $_FP_size, SP303self.Emit("RET") // RET304}
305
306func (self *_Assembler) prologue() {307self.Emit("SUBQ", jit.Imm(_FP_size), _SP) // SUBQ $_FP_size, SP308self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP)309self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP310self.Emit("MOVQ", _AX, _ARG_rb) // MOVQ AX, rb<>+0(FP)311self.Emit("MOVQ", _BX, _ARG_vp) // MOVQ BX, vp<>+8(FP)312self.Emit("MOVQ", _CX, _ARG_sb) // MOVQ CX, sb<>+16(FP)313self.Emit("MOVQ", _DI, _ARG_fv) // MOVQ DI, rb<>+24(FP)314self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP) // MOVQ (AX) , DI315self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL) // MOVQ 8(AX) , SI316self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), DX317self.Emit("MOVQ", _BX, _SP_p) // MOVQ BX, R10318self.Emit("MOVQ", _CX, _ST) // MOVQ CX, R8319self.Emit("XORL", _SP_x, _SP_x) // XORL R10, R12320self.Emit("XORL", _SP_f, _SP_f) // XORL R11, R13321self.Emit("XORL", _SP_q, _SP_q) // XORL R13, R11322}
323
324/** Assembler Inline Functions **/
325
326func (self *_Assembler) xsave(reg ...obj.Addr) {327for i, v := range reg {328if i > _FP_saves / 8 - 1 {329panic("too many registers to save")330} else {331self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))332}333}334}
335
336func (self *_Assembler) xload(reg ...obj.Addr) {337for i, v := range reg {338if i > _FP_saves / 8 - 1 {339panic("too many registers to load")340} else {341self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)342}343}344}
345
346func (self *_Assembler) rbuf_di() {347if _RP.Reg != x86.REG_DI {348panic("register allocation messed up: RP != DI")349} else {350self.Emit("ADDQ", _RL, _RP)351}352}
353
354func (self *_Assembler) store_int(nd int, fn obj.Addr, ins string) {355self.check_size(nd)356self.save_c() // SAVE $C_regs357self.rbuf_di() // MOVQ RP, DI358self.Emit(ins, jit.Ptr(_SP_p, 0), _SI) // $ins (SP.p), SI359self.call_c(fn) // CALL_C $fn360self.Emit("ADDQ", _AX, _RL) // ADDQ AX, RL361}
362
363func (self *_Assembler) store_str(s string) {364i := 0365m := rt.Str2Mem(s)366
367/* 8-byte stores */368for i <= len(m) - 8 {369self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX) // MOVQ $s[i:], AX370self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i))) // MOVQ AX, i(RP)(RL)371i += 8372}373
374/* 4-byte stores */375if i <= len(m) - 4 {376self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVL $s[i:], i(RP)(RL)377i += 4378}379
380/* 2-byte stores */381if i <= len(m) - 2 {382self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVW $s[i:], i(RP)(RL)383i += 2384}385
386/* last byte */387if i < len(m) {388self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i))) // MOVB $s[i:], i(RP)(RL)389}390}
391
392func (self *_Assembler) check_size(n int) {393self.check_size_rl(jit.Ptr(_RL, int64(n)))394}
395
396func (self *_Assembler) check_size_r(r obj.Addr, d int) {397self.check_size_rl(jit.Sib(_RL, r, 1, int64(d)))398}
399
400func (self *_Assembler) check_size_rl(v obj.Addr) {401idx := self.x402key := _LB_more_space_return + strconv.Itoa(idx)403
404/* the following code relies on LR == R9 to work */405if _LR.Reg != x86.REG_R9 {406panic("register allocation messed up: LR != R9")407}408
409/* check for buffer capacity */410self.x++411self.Emit("LEAQ", v, _AX) // LEAQ $v, AX412self.Emit("CMPQ", _AX, _RC) // CMPQ AX, RC413self.Sjmp("JBE" , key) // JBE _more_space_return_{n}414self.slice_grow_ax(key) // GROW $key415self.Link(key) // _more_space_return_{n}:416}
417
418func (self *_Assembler) slice_grow_ax(ret string) {419self.Byte(0x4c, 0x8d, 0x0d) // LEAQ ?(PC), R9420self.Sref(ret, 4) // .... &ret421self.Sjmp("JMP" , _LB_more_space) // JMP _more_space422}
423
424/** State Stack Helpers **/
425
426const (427_StateSize = int64(unsafe.Sizeof(_State{}))428_StackLimit = _MaxStack * _StateSize429)
430
431func (self *_Assembler) save_state() {432self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX) // MOVQ (ST), CX433self.Emit("LEAQ", jit.Ptr(_CX, _StateSize), _R9) // LEAQ _StateSize(CX), R9434self.Emit("CMPQ", _R9, jit.Imm(_StackLimit)) // CMPQ R9, $_StackLimit435self.Sjmp("JAE" , _LB_error_too_deep) // JA _error_too_deep436self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8)) // MOVQ SP.x, 8(ST)(CX)437self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16)) // MOVQ SP.f, 16(ST)(CX)438self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24)) // MOVQ SP.p, 24(ST)(CX)439self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32)) // MOVQ SP.q, 32(ST)(CX)440self.Emit("MOVQ", _R9, jit.Ptr(_ST, 0)) // MOVQ R9, (ST)441}
442
443func (self *_Assembler) drop_state(decr int64) {444self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX445self.Emit("SUBQ" , jit.Imm(decr), _AX) // SUBQ $decr, AX446self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST)447self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _SP_x) // MOVQ 8(ST)(AX), SP.x448self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 16), _SP_f) // MOVQ 16(ST)(AX), SP.f449self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 24), _SP_p) // MOVQ 24(ST)(AX), SP.p450self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 32), _SP_q) // MOVQ 32(ST)(AX), SP.q451self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0452self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX)453self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24)) // MOVOU X0, 24(ST)(AX)454}
455
456/** Buffer Helpers **/
457
458func (self *_Assembler) add_char(ch byte) {459self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVB $ch, (RP)(RL)460self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL461}
462
463func (self *_Assembler) add_long(ch uint32, n int64) {464self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVL $ch, (RP)(RL)465self.Emit("ADDQ", jit.Imm(n), _RL) // ADDQ $n, RL466}
467
468func (self *_Assembler) add_text(ss string) {469self.store_str(ss) // TEXT $ss470self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL) // ADDQ ${len(ss)}, RL471}
472
473// get *buf at AX
474func (self *_Assembler) prep_buffer_AX() {475self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX476self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX)477}
478
479func (self *_Assembler) save_buffer() {480self.Emit("MOVQ", _ARG_rb, _CX) // MOVQ rb<>+0(FP), CX481self.Emit("MOVQ", _RP, jit.Ptr(_CX, 0)) // MOVQ RP, (CX)482self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8)) // MOVQ RL, 8(CX)483self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16)) // MOVQ RC, 16(CX)484}
485
486// get *buf at AX
487func (self *_Assembler) load_buffer_AX() {488self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX489self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP) // MOVQ (AX), RP490self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL) // MOVQ 8(AX), RL491self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), RC492}
493
494/** Function Interface Helpers **/
495
496func (self *_Assembler) call(pc obj.Addr) {497self.Emit("MOVQ", pc, _LR) // MOVQ $pc, AX498self.Rjmp("CALL", _LR) // CALL AX499}
500
501func (self *_Assembler) save_c() {502self.xsave(_REG_ffi...) // SAVE $REG_ffi503}
504
505func (self *_Assembler) call_b64(pc obj.Addr) {506self.xsave(_REG_b64...) // SAVE $REG_all507self.call(pc) // CALL $pc508self.xload(_REG_b64...) // LOAD $REG_ffi509}
510
511func (self *_Assembler) call_c(pc obj.Addr) {512self.Emit("XCHGQ", _SP_p, _BX)513self.call(pc) // CALL $pc514self.xload(_REG_ffi...) // LOAD $REG_ffi515self.Emit("XCHGQ", _SP_p, _BX)516}
517
518func (self *_Assembler) call_go(pc obj.Addr) {519self.xsave(_REG_all...) // SAVE $REG_all520self.call(pc) // CALL $pc521self.xload(_REG_all...) // LOAD $REG_all522}
523
524func (self *_Assembler) call_more_space(pc obj.Addr) {525self.xsave(_REG_ms...) // SAVE $REG_all526self.call(pc) // CALL $pc527self.xload(_REG_ms...) // LOAD $REG_all528}
529
530func (self *_Assembler) call_encoder(pc obj.Addr) {531self.xsave(_REG_enc...) // SAVE $REG_all532self.call(pc) // CALL $pc533self.xload(_REG_enc...) // LOAD $REG_all534}
535
536func (self *_Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) {537switch vt.Kind() {538case reflect.Interface : self.call_marshaler_i(fn, it)539case reflect.Ptr, reflect.Map : self.call_marshaler_v(fn, it, vt, true)540// struct/array of 1 direct iface type can be direct541default : self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect())542}543}
544
545func (self *_Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) {546self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX547self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX548self.Sjmp("JZ" , "_null_{n}") // JZ _null_{n}549self.Emit("MOVQ" , _AX, _BX) // MOVQ AX, BX550self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX) // MOVQ 8(SP.p), CX551self.Emit("MOVQ" , jit.Gtype(it), _AX) // MOVQ $it, AX552self.call_go(_F_assertI2I) // CALL_GO assertI2I553self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX554self.Sjmp("JZ" , "_null_{n}") // JZ _null_{n}555self.Emit("MOVQ", _BX, _CX) // MOVQ BX, CX556self.Emit("MOVQ", _AX, _BX) // MOVQ AX, BX557self.prep_buffer_AX()558self.Emit("MOVQ", _ARG_fv, _DI) // MOVQ ARG.fv, DI559self.call_go(fn) // CALL $fn560self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET561self.Sjmp("JNZ" , _LB_error) // JNZ _error562self.load_buffer_AX()563self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n}564self.Link("_null_{n}") // _null_{n}:565self.check_size(4) // SIZE $4566self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1)567self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL568self.Link("_done_{n}") // _done_{n}:569}
570
571func (self *_Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) {572self.prep_buffer_AX() // MOVE {buf}, (SP)573self.Emit("MOVQ", jit.Itab(it, vt), _BX) // MOVQ $(itab(it, vt)), BX574
575/* dereference the pointer if needed */576if !deref {577self.Emit("MOVQ", _SP_p, _CX) // MOVQ SP.p, CX578} else {579self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _CX) // MOVQ 0(SP.p), CX580}581
582/* call the encoder, and perform error checks */583self.Emit("MOVQ", _ARG_fv, _DI) // MOVQ ARG.fv, DI584self.call_go(fn) // CALL $fn585self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET586self.Sjmp("JNZ" , _LB_error) // JNZ _error587self.load_buffer_AX()588}
589
590/** Builtin: _more_space **/
591
592var (593_T_byte = jit.Type(byteType)594_F_growslice = jit.Func(growslice)595)
596
597// AX must saving n
598func (self *_Assembler) more_space() {599self.Link(_LB_more_space)600self.Emit("MOVQ", _RP, _BX) // MOVQ DI, BX601self.Emit("MOVQ", _RL, _CX) // MOVQ SI, CX602self.Emit("MOVQ", _RC, _DI) // MOVQ DX, DI603self.Emit("MOVQ", _AX, _SI) // MOVQ AX, SI604self.Emit("MOVQ", _T_byte, _AX) // MOVQ $_T_byte, AX605self.call_more_space(_F_growslice) // CALL $pc606self.Emit("MOVQ", _AX, _RP) // MOVQ AX, DI607self.Emit("MOVQ", _BX, _RL) // MOVQ BX, SI608self.Emit("MOVQ", _CX, _RC) // MOVQ CX, DX609self.save_buffer() // SAVE {buf}610self.Rjmp("JMP" , _LR) // JMP LR611}
612
613/** Builtin Errors **/
614
615var (616_V_ERR_too_deep = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_too_deep))))617_V_ERR_nan_or_infinite = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_nan_or_infinite))))618_I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(errorType), jsonUnsupportedValueType)619)
620
621func (self *_Assembler) error_too_deep() {622self.Link(_LB_error_too_deep)623self.Emit("MOVQ", _V_ERR_too_deep, _EP) // MOVQ $_V_ERR_too_deep, EP624self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET625self.Sjmp("JMP" , _LB_error) // JMP _error626}
627
628func (self *_Assembler) error_invalid_number() {629self.Link(_LB_error_invalid_number)630self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ 0(SP), AX631self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _BX) // MOVQ 8(SP), BX632self.call_go(_F_error_number) // CALL_GO error_number633self.Sjmp("JMP" , _LB_error) // JMP _error634}
635
636func (self *_Assembler) error_nan_or_infinite() {637self.Link(_LB_error_nan_or_infinite)638self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP) // MOVQ $_V_ERR_nan_or_infinite, EP639self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET640self.Sjmp("JMP" , _LB_error) // JMP _error641}
642
643/** String Encoding Routine **/
644
645var (646_F_quote = jit.Imm(int64(native.S_quote))647_F_panic = jit.Func(goPanic)648)
649
650func (self *_Assembler) go_panic() {651self.Link(_LB_panic)652self.Emit("MOVQ", _SP_p, _BX)653self.call_go(_F_panic)654}
655
656func (self *_Assembler) encode_string(doubleQuote bool) {657self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX658self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX659self.Sjmp("JZ" , "_str_empty_{n}") // JZ _str_empty_{n}660self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))661self.Sjmp("JNE" , "_str_next_{n}")662self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), _AX)663self.Sjmp("JMP", _LB_panic)664self.Link("_str_next_{n}")665
666/* openning quote, check for double quote */667if !doubleQuote {668self.check_size_r(_AX, 2) // SIZE $2669self.add_char('"') // CHAR $'"'670} else {671self.check_size_r(_AX, 6) // SIZE $6672self.add_long(_IM_open, 3) // TEXT $`"\"`673}674
675/* quoting loop */676self.Emit("XORL", _AX, _AX) // XORL AX, AX677self.Emit("MOVQ", _AX, _VAR_sp) // MOVQ AX, sp678self.Link("_str_loop_{n}") // _str_loop_{n}:679self.save_c() // SAVE $REG_ffi680
681/* load the output buffer first, and then input buffer,682* because the parameter registers collide with RP / RL / RC */
683self.Emit("MOVQ", _RC, _CX) // MOVQ RC, CX684self.Emit("SUBQ", _RL, _CX) // SUBQ RL, CX685self.Emit("MOVQ", _CX, _VAR_dn) // MOVQ CX, dn686self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX) // LEAQ (RP)(RL), DX687self.Emit("LEAQ", _VAR_dn, _CX) // LEAQ dn, CX688self.Emit("MOVQ", _VAR_sp, _AX) // MOVQ sp, AX689self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI) // MOVQ (SP.p), DI690self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI) // MOVQ 8(SP.p), SI691self.Emit("ADDQ", _AX, _DI) // ADDQ AX, DI692self.Emit("SUBQ", _AX, _SI) // SUBQ AX, SI693
694/* set the flags based on `doubleQuote` */695if !doubleQuote {696self.Emit("XORL", _R8, _R8) // XORL R8, R8697} else {698self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8699}700
701/* call the native quoter */702self.call_c(_F_quote) // CALL quote703self.Emit("ADDQ" , _VAR_dn, _RL) // ADDQ dn, RL704
705self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX706self.Sjmp("JS" , "_str_space_{n}") // JS _str_space_{n}707
708/* close the string, check for double quote */709if !doubleQuote {710self.check_size(1) // SIZE $1711self.add_char('"') // CHAR $'"'712self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n}713} else {714self.check_size(3) // SIZE $3715self.add_text("\\\"\"") // TEXT $'\""'716self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n}717}718
719/* not enough space to contain the quoted string */720self.Link("_str_space_{n}") // _str_space_{n}:721self.Emit("NOTQ", _AX) // NOTQ AX722self.Emit("ADDQ", _AX, _VAR_sp) // ADDQ AX, sp723self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX) // LEAQ (RC)(RC), AX724self.slice_grow_ax("_str_loop_{n}") // GROW _str_loop_{n}725
726/* empty string, check for double quote */727if !doubleQuote {728self.Link("_str_empty_{n}") // _str_empty_{n}:729self.check_size(2) // SIZE $2730self.add_text("\"\"") // TEXT $'""'731self.Link("_str_end_{n}") // _str_end_{n}:732} else {733self.Link("_str_empty_{n}") // _str_empty_{n}:734self.check_size(6) // SIZE $6735self.add_text("\"\\\"\\\"\"") // TEXT $'"\"\""'736self.Link("_str_end_{n}") // _str_end_{n}:737}738}
739
740/** OpCode Assembler Functions **/
741
742var (743_T_json_Marshaler = rt.UnpackType(jsonMarshalerType)744_T_encoding_TextMarshaler = rt.UnpackType(encodingTextMarshalerType)745)
746
747var (748_F_f64toa = jit.Imm(int64(native.S_f64toa))749_F_f32toa = jit.Imm(int64(native.S_f32toa))750_F_i64toa = jit.Imm(int64(native.S_i64toa))751_F_u64toa = jit.Imm(int64(native.S_u64toa))752_F_b64encode = jit.Imm(int64(_subr__b64encode))753)
754
755var (756_F_memmove = jit.Func(memmove)757_F_error_number = jit.Func(error_number)758_F_isValidNumber = jit.Func(isValidNumber)759)
760
761var (762_F_iteratorStop = jit.Func(iteratorStop)763_F_iteratorNext = jit.Func(iteratorNext)764_F_iteratorStart = jit.Func(iteratorStart)765)
766
767var (768_F_encodeTypedPointer obj.Addr769_F_encodeJsonMarshaler obj.Addr770_F_encodeTextMarshaler obj.Addr771)
772
773const (774_MODE_AVX2 = 1 << 2775)
776
777func init() {778_F_encodeTypedPointer = jit.Func(encodeTypedPointer)779_F_encodeJsonMarshaler = jit.Func(encodeJsonMarshaler)780_F_encodeTextMarshaler = jit.Func(encodeTextMarshaler)781}
782
783func (self *_Assembler) _asm_OP_null(_ *_Instr) {784self.check_size(4)785self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1)786self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL787}
788
789func (self *_Assembler) _asm_OP_empty_arr(_ *_Instr) {790self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)791self.Sjmp("JC", "_empty_arr_{n}")792self._asm_OP_null(nil)793self.Sjmp("JMP", "_empty_arr_end_{n}")794self.Link("_empty_arr_{n}")795self.check_size(2)796self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0))797self.Emit("ADDQ", jit.Imm(2), _RL)798self.Link("_empty_arr_end_{n}")799}
800
801func (self *_Assembler) _asm_OP_empty_obj(_ *_Instr) {802self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)803self.Sjmp("JC", "_empty_obj_{n}")804self._asm_OP_null(nil)805self.Sjmp("JMP", "_empty_obj_end_{n}")806self.Link("_empty_obj_{n}")807self.check_size(2)808self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0))809self.Emit("ADDQ", jit.Imm(2), _RL)810self.Link("_empty_obj_end_{n}")811}
812
813func (self *_Assembler) _asm_OP_bool(_ *_Instr) {814self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0815self.Sjmp("JE" , "_false_{n}") // JE _false_{n}816self.check_size(4) // SIZE $4817self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'true', (RP)(RL*1)818self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL819self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n}820self.Link("_false_{n}") // _false_{n}:821self.check_size(5) // SIZE $5822self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'fals', (RP)(RL*1)823self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4)) // MOVB $'e', 4(RP)(RL*1)824self.Emit("ADDQ", jit.Imm(5), _RL) // ADDQ $5, RL825self.Link("_end_{n}") // _end_{n}:826}
827
828func (self *_Assembler) _asm_OP_i8(_ *_Instr) {829self.store_int(4, _F_i64toa, "MOVBQSX")830}
831
832func (self *_Assembler) _asm_OP_i16(_ *_Instr) {833self.store_int(6, _F_i64toa, "MOVWQSX")834}
835
836func (self *_Assembler) _asm_OP_i32(_ *_Instr) {837self.store_int(17, _F_i64toa, "MOVLQSX")838}
839
840func (self *_Assembler) _asm_OP_i64(_ *_Instr) {841self.store_int(21, _F_i64toa, "MOVQ")842}
843
844func (self *_Assembler) _asm_OP_u8(_ *_Instr) {845self.store_int(3, _F_u64toa, "MOVBQZX")846}
847
848func (self *_Assembler) _asm_OP_u16(_ *_Instr) {849self.store_int(5, _F_u64toa, "MOVWQZX")850}
851
852func (self *_Assembler) _asm_OP_u32(_ *_Instr) {853self.store_int(16, _F_u64toa, "MOVLQZX")854}
855
856func (self *_Assembler) _asm_OP_u64(_ *_Instr) {857self.store_int(20, _F_u64toa, "MOVQ")858}
859
860func (self *_Assembler) _asm_OP_f32(_ *_Instr) {861self.check_size(32)862self.Emit("MOVL" , jit.Ptr(_SP_p, 0), _AX) // MOVL (SP.p), AX863self.Emit("ANDL" , jit.Imm(_FM_exp32), _AX) // ANDL $_FM_exp32, AX864self.Emit("XORL" , jit.Imm(_FM_exp32), _AX) // XORL $_FM_exp32, AX865self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite866self.save_c() // SAVE $C_regs867self.rbuf_di() // MOVQ RP, DI868self.Emit("MOVSS" , jit.Ptr(_SP_p, 0), _X0) // MOVSS (SP.p), X0869self.call_c(_F_f32toa) // CALL_C f64toa870self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL871}
872
873func (self *_Assembler) _asm_OP_f64(_ *_Instr) {874self.check_size(32)875self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX876self.Emit("MOVQ" , jit.Imm(_FM_exp64), _CX) // MOVQ $_FM_exp64, CX877self.Emit("ANDQ" , _CX, _AX) // ANDQ CX, AX878self.Emit("XORQ" , _CX, _AX) // XORQ CX, AX879self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite880self.save_c() // SAVE $C_regs881self.rbuf_di() // MOVQ RP, DI882self.Emit("MOVSD" , jit.Ptr(_SP_p, 0), _X0) // MOVSD (SP.p), X0883self.call_c(_F_f64toa) // CALL_C f64toa884self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL885}
886
887func (self *_Assembler) _asm_OP_str(_ *_Instr) {888self.encode_string(false)889}
890
891func (self *_Assembler) _asm_OP_bin(_ *_Instr) {892self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX893self.Emit("ADDQ", jit.Imm(2), _AX) // ADDQ $2, AX894self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX) // MOVQ $_MF_mulv, CX895self.Emit("MOVQ", _DX, _BX) // MOVQ DX, BX896self.From("MULQ", _CX) // MULQ CX897self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX) // LEAQ 1(DX)(DX), AX898self.Emit("ORQ" , jit.Imm(2), _AX) // ORQ $2, AX899self.Emit("MOVQ", _BX, _DX) // MOVQ BX, DX900self.check_size_r(_AX, 0) // SIZE AX901self.add_char('"') // CHAR $'"'902self.Emit("MOVQ", _ARG_rb, _DI) // MOVQ rb<>+0(FP), DI903self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8)) // MOVQ SI, 8(DI)904self.Emit("MOVQ", _SP_p, _SI) // MOVQ SP.p, SI905
906/* check for AVX2 support */907if !cpu.HasAVX2 {908self.Emit("XORL", _DX, _DX) // XORL DX, DX909} else {910self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX) // MOVL $_MODE_AVX2, DX911}912
913/* call the encoder */914self.call_b64(_F_b64encode) // CALL b64encode915self.load_buffer_AX() // LOAD {buf}916self.add_char('"') // CHAR $'"'917}
918
919func (self *_Assembler) _asm_OP_quote(_ *_Instr) {920self.encode_string(true)921}
922
923func (self *_Assembler) _asm_OP_number(_ *_Instr) {924self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _BX) // MOVQ (SP.p), BX925self.Emit("TESTQ", _BX, _BX) // TESTQ BX, BX926self.Sjmp("JZ" , "_empty_{n}")927self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX928self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX929self.Sjmp("JNZ" , "_number_next_{n}")930self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), _AX)931self.Sjmp("JMP", _LB_panic)932self.Link("_number_next_{n}")933self.call_go(_F_isValidNumber) // CALL_GO isValidNumber934self.Emit("CMPB" , _AX, jit.Imm(0)) // CMPB AX, $0935self.Sjmp("JE" , _LB_error_invalid_number) // JE _error_invalid_number936self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _BX) // MOVQ (SP.p), BX937self.check_size_r(_BX, 0) // SIZE BX938self.Emit("LEAQ" , jit.Sib(_RP, _RL, 1, 0), _AX) // LEAQ (RP)(RL), AX939self.Emit("ADDQ" , jit.Ptr(_SP_p, 8), _RL) // ADDQ 8(SP.p), RL940self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _BX) // MOVOU (SP.p), BX941self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _CX) // MOVOU X0, 8(SP)942self.call_go(_F_memmove) // CALL_GO memmove943self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX944self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX)945self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n}946self.Link("_empty_{n}") // _empty_{n}947self.check_size(1) // SIZE $1948self.add_char('0') // CHAR $'0'949self.Link("_done_{n}") // _done_{n}:950}
951
952func (self *_Assembler) _asm_OP_eface(_ *_Instr) {953self.prep_buffer_AX() // MOVE {buf}, AX954self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _BX) // MOVQ (SP.p), BX955self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _CX) // LEAQ 8(SP.p), CX956self.Emit("MOVQ" , _ST, _DI) // MOVQ ST, DI957self.Emit("MOVQ" , _ARG_fv, _SI) // MOVQ fv, AX958self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer959self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET960self.Sjmp("JNZ" , _LB_error) // JNZ _error961self.load_buffer_AX()962}
963
964func (self *_Assembler) _asm_OP_iface(_ *_Instr) {965self.prep_buffer_AX() // MOVE {buf}, AX966self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _CX) // MOVQ (SP.p), CX967self.Emit("MOVQ" , jit.Ptr(_CX, 8), _BX) // MOVQ 8(CX), BX968self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _CX) // LEAQ 8(SP.p), CX969self.Emit("MOVQ" , _ST, _DI) // MOVQ ST, DI970self.Emit("MOVQ" , _ARG_fv, _SI) // MOVQ fv, AX971self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer972self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET973self.Sjmp("JNZ" , _LB_error) // JNZ _error974self.load_buffer_AX()975}
976
977func (self *_Assembler) _asm_OP_byte(p *_Instr) {978self.check_size(1)979self.Emit("MOVB", jit.Imm(p.i64()), jit.Sib(_RP, _RL, 1, 0)) // MOVL p.vi(), (RP)(RL*1)980self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL981}
982
983func (self *_Assembler) _asm_OP_text(p *_Instr) {984self.check_size(len(p.vs())) // SIZE ${len(p.vs())}985self.add_text(p.vs()) // TEXT ${p.vs()}986}
987
988func (self *_Assembler) _asm_OP_deref(_ *_Instr) {989self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p990}
991
992func (self *_Assembler) _asm_OP_index(p *_Instr) {993self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ $p.vi(), AX994self.Emit("ADDQ", _AX, _SP_p) // ADDQ AX, SP.p995}
996
997func (self *_Assembler) _asm_OP_load(_ *_Instr) {998self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX999self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x) // MOVQ -24(ST)(AX), SP.x1000self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p) // MOVQ -8(ST)(AX), SP.p1001self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q) // MOVQ (ST)(AX), SP.q1002}
1003
1004func (self *_Assembler) _asm_OP_save(_ *_Instr) {1005self.save_state()1006}
1007
1008func (self *_Assembler) _asm_OP_drop(_ *_Instr) {1009self.drop_state(_StateSize)1010}
1011
1012func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {1013self.drop_state(_StateSize * 2) // DROP $(_StateSize * 2)1014self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56)) // MOVOU X0, 56(ST)(AX)1015}
1016
1017func (self *_Assembler) _asm_OP_recurse(p *_Instr) {1018self.prep_buffer_AX() // MOVE {buf}, (SP)1019vt, pv := p.vp()1020self.Emit("MOVQ", jit.Type(vt), _BX) // MOVQ $(type(p.vt())), BX1021
1022/* check for indirection */1023if !rt.UnpackType(vt).Indirect() {1024self.Emit("MOVQ", _SP_p, _CX) // MOVQ SP.p, CX1025} else {1026self.Emit("MOVQ", _SP_p, _VAR_vp) // MOVQ SP.p, VAR.vp1027self.Emit("LEAQ", _VAR_vp, _CX) // LEAQ VAR.vp, CX1028}1029
1030/* call the encoder */1031self.Emit("MOVQ" , _ST, _DI) // MOVQ ST, DI1032self.Emit("MOVQ" , _ARG_fv, _SI) // MOVQ $fv, SI1033if pv {1034self.Emit("BTCQ", jit.Imm(bitPointerValue), _SI) // BTCQ $1, SI1035}1036self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer1037self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET1038self.Sjmp("JNZ" , _LB_error) // JNZ _error1039self.load_buffer_AX()1040}
1041
1042func (self *_Assembler) _asm_OP_is_nil(p *_Instr) {1043self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $01044self.Xjmp("JE" , p.vi()) // JE p.vi()1045}
1046
1047func (self *_Assembler) _asm_OP_is_nil_p1(p *_Instr) {1048self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0)) // CMPQ 8(SP.p), $01049self.Xjmp("JE" , p.vi()) // JE p.vi()1050}
1051
1052func (self *_Assembler) _asm_OP_is_zero_1(p *_Instr) {1053self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $01054self.Xjmp("JE" , p.vi()) // JE p.vi()1055}
1056
1057func (self *_Assembler) _asm_OP_is_zero_2(p *_Instr) {1058self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPW (SP.p), $01059self.Xjmp("JE" , p.vi()) // JE p.vi()1060}
1061
1062func (self *_Assembler) _asm_OP_is_zero_4(p *_Instr) {1063self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPL (SP.p), $01064self.Xjmp("JE" , p.vi()) // JE p.vi()1065}
1066
1067func (self *_Assembler) _asm_OP_is_zero_8(p *_Instr) {1068self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $01069self.Xjmp("JE" , p.vi()) // JE p.vi()1070}
1071
1072func (self *_Assembler) _asm_OP_is_zero_map(p *_Instr) {1073self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX1074self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX1075self.Xjmp("JZ" , p.vi()) // JZ p.vi()1076self.Emit("CMPQ" , jit.Ptr(_AX, 0), jit.Imm(0)) // CMPQ (AX), $01077self.Xjmp("JE" , p.vi()) // JE p.vi()1078}
1079
1080func (self *_Assembler) _asm_OP_goto(p *_Instr) {1081self.Xjmp("JMP", p.vi())1082}
1083
1084func (self *_Assembler) _asm_OP_map_iter(p *_Instr) {1085self.Emit("MOVQ" , jit.Type(p.vt()), _AX) // MOVQ $p.vt(), AX1086self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _BX) // MOVQ (SP.p), BX1087self.Emit("MOVQ" , _ARG_fv, _CX) // MOVQ fv, CX1088self.call_go(_F_iteratorStart) // CALL_GO iteratorStart1089self.Emit("MOVQ" , _AX, _SP_q) // MOVQ AX, SP.q1090self.Emit("MOVQ" , _BX, _ET) // MOVQ 32(SP), ET1091self.Emit("MOVQ" , _CX, _EP) // MOVQ 40(SP), EP1092self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET1093self.Sjmp("JNZ" , _LB_error) // JNZ _error1094}
1095
1096func (self *_Assembler) _asm_OP_map_stop(_ *_Instr) {1097self.Emit("MOVQ", _SP_q, _AX) // MOVQ SP.q, AX1098self.call_go(_F_iteratorStop) // CALL_GO iteratorStop1099self.Emit("XORL", _SP_q, _SP_q) // XORL SP.q, SP.q1100}
1101
1102func (self *_Assembler) _asm_OP_map_check_key(p *_Instr) {1103self.Emit("MOVQ" , jit.Ptr(_SP_q, 0), _SP_p) // MOVQ (SP.q), SP.p1104self.Emit("TESTQ", _SP_p, _SP_p) // TESTQ SP.p, SP.p1105self.Xjmp("JZ" , p.vi()) // JNZ p.vi()1106}
1107
1108func (self *_Assembler) _asm_OP_map_write_key(p *_Instr) {1109self.Emit("BTQ", jit.Imm(bitSortMapKeys), _ARG_fv) // BTQ ${SortMapKeys}, fv1110self.Sjmp("JNC", "_unordered_key_{n}") // JNC _unordered_key_{n}1111self.encode_string(false) // STR $false1112self.Xjmp("JMP", p.vi()) // JMP ${p.vi()}1113self.Link("_unordered_key_{n}") // _unordered_key_{n}:1114}
1115
1116func (self *_Assembler) _asm_OP_map_value_next(_ *_Instr) {1117self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p) // MOVQ 8(SP.q), SP.p1118self.Emit("MOVQ", _SP_q, _AX) // MOVQ SP.q, AX1119self.call_go(_F_iteratorNext) // CALL_GO iteratorNext1120}
1121
1122func (self *_Assembler) _asm_OP_slice_len(_ *_Instr) {1123self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _SP_x) // MOVQ 8(SP.p), SP.x1124self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p1125self.Emit("ORQ" , jit.Imm(1 << _S_init), _SP_f) // ORQ $(1<<_S_init), SP.f1126}
1127
1128func (self *_Assembler) _asm_OP_slice_next(p *_Instr) {1129self.Emit("TESTQ" , _SP_x, _SP_x) // TESTQ SP.x, SP.x1130self.Xjmp("JZ" , p.vi()) // JZ p.vi()1131self.Emit("SUBQ" , jit.Imm(1), _SP_x) // SUBQ $1, SP.x1132self.Emit("BTRQ" , jit.Imm(_S_init), _SP_f) // BTRQ $_S_init, SP.f1133self.Emit("LEAQ" , jit.Ptr(_SP_p, int64(p.vlen())), _AX) // LEAQ $(p.vlen())(SP.p), AX1134self.Emit("CMOVQCC", _AX, _SP_p) // CMOVQNC AX, SP.p1135}
1136
1137func (self *_Assembler) _asm_OP_marshal(p *_Instr) {1138self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt())1139}
1140
1141func (self *_Assembler) _asm_OP_marshal_p(p *_Instr) {1142if p.vk() != reflect.Ptr {1143panic("marshal_p: invalid type")1144} else {1145self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt(), false)1146}1147}
1148
1149func (self *_Assembler) _asm_OP_marshal_text(p *_Instr) {1150self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt())1151}
1152
1153func (self *_Assembler) _asm_OP_marshal_text_p(p *_Instr) {1154if p.vk() != reflect.Ptr {1155panic("marshal_text_p: invalid type")1156} else {1157self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt(), false)1158}1159}
1160
1161func (self *_Assembler) _asm_OP_cond_set(_ *_Instr) {1162self.Emit("ORQ", jit.Imm(1 << _S_cond), _SP_f) // ORQ $(1<<_S_cond), SP.f1163}
1164
1165func (self *_Assembler) _asm_OP_cond_testc(p *_Instr) {1166self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f) // BTRQ $_S_cond, SP.f1167self.Xjmp("JC" , p.vi())1168}
1169
1170func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {1171self.Emit("MOVQ", jit.Imm(int64(p2.op())), _CX) // MOVQ $(p2.op()), AX1172self.Emit("MOVQ", jit.Imm(int64(p1.op())), _BX) // MOVQ $(p1.op()), BX1173self.Emit("MOVQ", jit.Imm(int64(i)), _AX) // MOVQ $(i), CX1174self.call_go(_F_println)1175}
1176