podman
1930 строк · 87.4 Кб
1//go:build go1.17 && !go1.22
2// +build go1.17,!go1.22
3
4/*
5* Copyright 2021 ByteDance Inc.
6*
7* Licensed under the Apache License, Version 2.0 (the "License");
8* you may not use this file except in compliance with the License.
9* You may obtain a copy of the License at
10*
11* http://www.apache.org/licenses/LICENSE-2.0
12*
13* Unless required by applicable law or agreed to in writing, software
14* distributed under the License is distributed on an "AS IS" BASIS,
15* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16* See the License for the specific language governing permissions and
17* limitations under the License.
18*/
19
20package decoder21
22import (23`encoding/json`24`fmt`25`math`26`reflect`27`unsafe`28
29`github.com/bytedance/sonic/internal/caching`30`github.com/bytedance/sonic/internal/jit`31`github.com/bytedance/sonic/internal/native`32`github.com/bytedance/sonic/internal/native/types`33`github.com/bytedance/sonic/internal/rt`34`github.com/twitchyliquid64/golang-asm/obj`35)
36
37/** Register Allocations
38*
39* State Registers:
40*
41* %r13 : stack base
42* %r10 : input pointer
43* %r12 : input length
44* %r11 : input cursor
45* %r15 : value pointer
46*
47* Error Registers:
48*
49* %rax : error type register
50* %rbx : error pointer register
51*/
52
53/** Function Prototype & Stack Map
54*
55* func (s string, ic int, vp unsafe.Pointer, sb *_Stack, fv uint64, sv string) (rc int, err error)
56*
57* s.buf : (FP)
58* s.len : 8(FP)
59* ic : 16(FP)
60* vp : 24(FP)
61* sb : 32(FP)
62* fv : 40(FP)
63* sv : 56(FP)
64* err.vt : 72(FP)
65* err.vp : 80(FP)
66*/
67
68const (69_FP_args = 72 // 72 bytes to pass and spill register arguements70_FP_fargs = 80 // 80 bytes for passing arguments to other Go functions71_FP_saves = 48 // 48 bytes for saving the registers before CALL instructions72_FP_locals = 144 // 144 bytes for local variables73)
74
75const (76_FP_offs = _FP_fargs + _FP_saves + _FP_locals77_FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer78_FP_base = _FP_size + 8 // 8 bytes for the return address79)
80
81const (82_IM_null = 0x6c6c756e // 'null'83_IM_true = 0x65757274 // 'true'84_IM_alse = 0x65736c61 // 'alse' ('false' without the 'f')85)
86
87const (88_BM_space = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n')89)
90
91const (92_MODE_JSON = 1 << 3 // base64 mode93)
94
95const (96_LB_error = "_error"97_LB_im_error = "_im_error"98_LB_eof_error = "_eof_error"99_LB_type_error = "_type_error"100_LB_field_error = "_field_error"101_LB_range_error = "_range_error"102_LB_stack_error = "_stack_error"103_LB_base64_error = "_base64_error"104_LB_unquote_error = "_unquote_error"105_LB_parsing_error = "_parsing_error"106_LB_parsing_error_v = "_parsing_error_v"107_LB_mismatch_error = "_mismatch_error"108)
109
110const (111_LB_char_0_error = "_char_0_error"112_LB_char_1_error = "_char_1_error"113_LB_char_2_error = "_char_2_error"114_LB_char_3_error = "_char_3_error"115_LB_char_4_error = "_char_4_error"116_LB_char_m2_error = "_char_m2_error"117_LB_char_m3_error = "_char_m3_error"118)
119
120const (121_LB_skip_one = "_skip_one"122_LB_skip_key_value = "_skip_key_value"123)
124
125var (126_AX = jit.Reg("AX")127_BX = jit.Reg("BX")128_CX = jit.Reg("CX")129_DX = jit.Reg("DX")130_DI = jit.Reg("DI")131_SI = jit.Reg("SI")132_BP = jit.Reg("BP")133_SP = jit.Reg("SP")134_R8 = jit.Reg("R8")135_R9 = jit.Reg("R9")136_X0 = jit.Reg("X0")137_X1 = jit.Reg("X1")138)
139
140var (141_IP = jit.Reg("R10") // saved on BP when callc142_IC = jit.Reg("R11") // saved on BX when call_c143_IL = jit.Reg("R12")144_ST = jit.Reg("R13")145_VP = jit.Reg("R15")146)
147
148var (149_DF = jit.Reg("AX") // reuse AX in generic decoder for flags150_ET = jit.Reg("AX")151_EP = jit.Reg("BX")152)
153
154
155
156var (157_ARG_s = _ARG_sp158_ARG_sp = jit.Ptr(_SP, _FP_base + 0)159_ARG_sl = jit.Ptr(_SP, _FP_base + 8)160_ARG_ic = jit.Ptr(_SP, _FP_base + 16)161_ARG_vp = jit.Ptr(_SP, _FP_base + 24)162_ARG_sb = jit.Ptr(_SP, _FP_base + 32)163_ARG_fv = jit.Ptr(_SP, _FP_base + 40)164)
165
166var (167_ARG_sv = _ARG_sv_p168_ARG_sv_p = jit.Ptr(_SP, _FP_base + 48)169_ARG_sv_n = jit.Ptr(_SP, _FP_base + 56)170_ARG_vk = jit.Ptr(_SP, _FP_base + 64)171)
172
173var (174_VAR_st = _VAR_st_Vt175_VAR_sr = jit.Ptr(_SP, _FP_fargs + _FP_saves)176)
177
178var (179_VAR_st_Vt = jit.Ptr(_SP, _FP_fargs + _FP_saves + 0)180_VAR_st_Dv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)181_VAR_st_Iv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)182_VAR_st_Ep = jit.Ptr(_SP, _FP_fargs + _FP_saves + 24)183_VAR_st_Db = jit.Ptr(_SP, _FP_fargs + _FP_saves + 32)184_VAR_st_Dc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 40)185)
186
187var (188_VAR_ss_AX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 48)189_VAR_ss_CX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 56)190_VAR_ss_SI = jit.Ptr(_SP, _FP_fargs + _FP_saves + 64)191_VAR_ss_R8 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 72)192_VAR_ss_R9 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 80)193)
194
195var (196_VAR_bs_p = jit.Ptr(_SP, _FP_fargs + _FP_saves + 88)197_VAR_bs_n = jit.Ptr(_SP, _FP_fargs + _FP_saves + 96)198_VAR_bs_LR = jit.Ptr(_SP, _FP_fargs + _FP_saves + 104)199)
200
201var _VAR_fl = jit.Ptr(_SP, _FP_fargs + _FP_saves + 112)202
203var (204_VAR_et = jit.Ptr(_SP, _FP_fargs + _FP_saves + 120) // save dismatched type205_VAR_pc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 128) // save skip return pc206_VAR_ic = jit.Ptr(_SP, _FP_fargs + _FP_saves + 136) // save dismatched position207)
208
209type _Assembler struct {210jit.BaseAssembler211p _Program
212name string213}
214
215func newAssembler(p _Program) *_Assembler {216return new(_Assembler).Init(p)217}
218
219/** Assembler Interface **/
220
221func (self *_Assembler) Load() _Decoder {222return ptodec(self.BaseAssembler.Load("decode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))223}
224
225func (self *_Assembler) Init(p _Program) *_Assembler {226self.p = p227self.BaseAssembler.Init(self.compile)228return self229}
230
231func (self *_Assembler) compile() {232self.prologue()233self.instrs()234self.epilogue()235self.copy_string()236self.escape_string()237self.escape_string_twice()238self.skip_one()239self.skip_key_value()240self.type_error()241self.mismatch_error()242self.field_error()243self.range_error()244self.stack_error()245self.base64_error()246self.parsing_error()247}
248
249/** Assembler Stages **/
250
251var _OpFuncTab = [256]func(*_Assembler, *_Instr) {252_OP_any : (*_Assembler)._asm_OP_any,253_OP_dyn : (*_Assembler)._asm_OP_dyn,254_OP_str : (*_Assembler)._asm_OP_str,255_OP_bin : (*_Assembler)._asm_OP_bin,256_OP_bool : (*_Assembler)._asm_OP_bool,257_OP_num : (*_Assembler)._asm_OP_num,258_OP_i8 : (*_Assembler)._asm_OP_i8,259_OP_i16 : (*_Assembler)._asm_OP_i16,260_OP_i32 : (*_Assembler)._asm_OP_i32,261_OP_i64 : (*_Assembler)._asm_OP_i64,262_OP_u8 : (*_Assembler)._asm_OP_u8,263_OP_u16 : (*_Assembler)._asm_OP_u16,264_OP_u32 : (*_Assembler)._asm_OP_u32,265_OP_u64 : (*_Assembler)._asm_OP_u64,266_OP_f32 : (*_Assembler)._asm_OP_f32,267_OP_f64 : (*_Assembler)._asm_OP_f64,268_OP_unquote : (*_Assembler)._asm_OP_unquote,269_OP_nil_1 : (*_Assembler)._asm_OP_nil_1,270_OP_nil_2 : (*_Assembler)._asm_OP_nil_2,271_OP_nil_3 : (*_Assembler)._asm_OP_nil_3,272_OP_deref : (*_Assembler)._asm_OP_deref,273_OP_index : (*_Assembler)._asm_OP_index,274_OP_is_null : (*_Assembler)._asm_OP_is_null,275_OP_is_null_quote : (*_Assembler)._asm_OP_is_null_quote,276_OP_map_init : (*_Assembler)._asm_OP_map_init,277_OP_map_key_i8 : (*_Assembler)._asm_OP_map_key_i8,278_OP_map_key_i16 : (*_Assembler)._asm_OP_map_key_i16,279_OP_map_key_i32 : (*_Assembler)._asm_OP_map_key_i32,280_OP_map_key_i64 : (*_Assembler)._asm_OP_map_key_i64,281_OP_map_key_u8 : (*_Assembler)._asm_OP_map_key_u8,282_OP_map_key_u16 : (*_Assembler)._asm_OP_map_key_u16,283_OP_map_key_u32 : (*_Assembler)._asm_OP_map_key_u32,284_OP_map_key_u64 : (*_Assembler)._asm_OP_map_key_u64,285_OP_map_key_f32 : (*_Assembler)._asm_OP_map_key_f32,286_OP_map_key_f64 : (*_Assembler)._asm_OP_map_key_f64,287_OP_map_key_str : (*_Assembler)._asm_OP_map_key_str,288_OP_map_key_utext : (*_Assembler)._asm_OP_map_key_utext,289_OP_map_key_utext_p : (*_Assembler)._asm_OP_map_key_utext_p,290_OP_array_skip : (*_Assembler)._asm_OP_array_skip,291_OP_array_clear : (*_Assembler)._asm_OP_array_clear,292_OP_array_clear_p : (*_Assembler)._asm_OP_array_clear_p,293_OP_slice_init : (*_Assembler)._asm_OP_slice_init,294_OP_slice_append : (*_Assembler)._asm_OP_slice_append,295_OP_object_skip : (*_Assembler)._asm_OP_object_skip,296_OP_object_next : (*_Assembler)._asm_OP_object_next,297_OP_struct_field : (*_Assembler)._asm_OP_struct_field,298_OP_unmarshal : (*_Assembler)._asm_OP_unmarshal,299_OP_unmarshal_p : (*_Assembler)._asm_OP_unmarshal_p,300_OP_unmarshal_text : (*_Assembler)._asm_OP_unmarshal_text,301_OP_unmarshal_text_p : (*_Assembler)._asm_OP_unmarshal_text_p,302_OP_lspace : (*_Assembler)._asm_OP_lspace,303_OP_match_char : (*_Assembler)._asm_OP_match_char,304_OP_check_char : (*_Assembler)._asm_OP_check_char,305_OP_load : (*_Assembler)._asm_OP_load,306_OP_save : (*_Assembler)._asm_OP_save,307_OP_drop : (*_Assembler)._asm_OP_drop,308_OP_drop_2 : (*_Assembler)._asm_OP_drop_2,309_OP_recurse : (*_Assembler)._asm_OP_recurse,310_OP_goto : (*_Assembler)._asm_OP_goto,311_OP_switch : (*_Assembler)._asm_OP_switch,312_OP_check_char_0 : (*_Assembler)._asm_OP_check_char_0,313_OP_dismatch_err : (*_Assembler)._asm_OP_dismatch_err,314_OP_go_skip : (*_Assembler)._asm_OP_go_skip,315_OP_add : (*_Assembler)._asm_OP_add,316_OP_check_empty : (*_Assembler)._asm_OP_check_empty,317_OP_debug : (*_Assembler)._asm_OP_debug,318}
319
320func (self *_Assembler) _asm_OP_debug(_ *_Instr) {321self.Byte(0xcc)322}
323
324func (self *_Assembler) instr(v *_Instr) {325if fn := _OpFuncTab[v.op()]; fn != nil {326fn(self, v)327} else {328panic(fmt.Sprintf("invalid opcode: %d", v.op()))329}330}
331
332func (self *_Assembler) instrs() {333for i, v := range self.p {334self.Mark(i)335self.instr(&v)336self.debug_instr(i, &v)337}338}
339
340func (self *_Assembler) epilogue() {341self.Mark(len(self.p))342self.Emit("XORL", _EP, _EP) // XORL EP, EP343self.Emit("MOVQ", _VAR_et, _ET) // MOVQ VAR_et, ET344self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET345self.Sjmp("JNZ", _LB_mismatch_error) // JNZ _LB_mismatch_error346self.Link(_LB_error) // _error:347self.Emit("MOVQ", _EP, _CX) // MOVQ BX, CX348self.Emit("MOVQ", _ET, _BX) // MOVQ AX, BX349self.Emit("MOVQ", _IC, _AX) // MOVQ IC, AX350self.Emit("MOVQ", jit.Imm(0), _ARG_sp) // MOVQ $0, sv.p<>+48(FP)351self.Emit("MOVQ", jit.Imm(0), _ARG_vp) // MOVQ $0, sv.p<>+48(FP)352self.Emit("MOVQ", jit.Imm(0), _ARG_sv_p) // MOVQ $0, sv.p<>+48(FP)353self.Emit("MOVQ", jit.Imm(0), _ARG_vk) // MOVQ $0, vk<>+64(FP)354self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP355self.Emit("ADDQ", jit.Imm(_FP_size), _SP) // ADDQ $_FP_size, SP356self.Emit("RET") // RET357}
358
359func (self *_Assembler) prologue() {360self.Emit("SUBQ", jit.Imm(_FP_size), _SP) // SUBQ $_FP_size, SP361self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP)362self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP363self.Emit("MOVQ", _AX, _ARG_sp) // MOVQ AX, s.p<>+0(FP)364self.Emit("MOVQ", _AX, _IP) // MOVQ AX, IP365self.Emit("MOVQ", _BX, _ARG_sl) // MOVQ BX, s.l<>+8(FP)366self.Emit("MOVQ", _BX, _IL) // MOVQ BX, IL367self.Emit("MOVQ", _CX, _ARG_ic) // MOVQ CX, ic<>+16(FP)368self.Emit("MOVQ", _CX, _IC) // MOVQ CX, IC369self.Emit("MOVQ", _DI, _ARG_vp) // MOVQ DI, vp<>+24(FP)370self.Emit("MOVQ", _DI, _VP) // MOVQ DI, VP371self.Emit("MOVQ", _SI, _ARG_sb) // MOVQ SI, sb<>+32(FP)372self.Emit("MOVQ", _SI, _ST) // MOVQ SI, ST373self.Emit("MOVQ", _R8, _ARG_fv) // MOVQ R8, fv<>+40(FP)374self.Emit("MOVQ", jit.Imm(0), _ARG_sv_p) // MOVQ $0, sv.p<>+48(FP)375self.Emit("MOVQ", jit.Imm(0), _ARG_sv_n) // MOVQ $0, sv.n<>+56(FP)376self.Emit("MOVQ", jit.Imm(0), _ARG_vk) // MOVQ $0, vk<>+64(FP)377self.Emit("MOVQ", jit.Imm(0), _VAR_et) // MOVQ $0, et<>+120(FP)378// initialize digital buffer first379self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_st_Dc) // MOVQ $_MaxDigitNums, ss.Dcap380self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX) // LEAQ _DbufOffset(ST), AX381self.Emit("MOVQ", _AX, _VAR_st_Db) // MOVQ AX, ss.Dbuf382}
383
384/** Function Calling Helpers **/
385
386var (387_REG_go = []obj.Addr { _ST, _VP, _IP, _IL, _IC }388_REG_rt = []obj.Addr { _ST, _VP, _IP, _IL, _IC, _IL }389)
390
391func (self *_Assembler) save(r ...obj.Addr) {392for i, v := range r {393if i > _FP_saves / 8 - 1 {394panic("too many registers to save")395} else {396self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))397}398}399}
400
401func (self *_Assembler) load(r ...obj.Addr) {402for i, v := range r {403if i > _FP_saves / 8 - 1 {404panic("too many registers to load")405} else {406self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)407}408}409}
410
411func (self *_Assembler) call(fn obj.Addr) {412self.Emit("MOVQ", fn, _R9) // MOVQ ${fn}, R11413self.Rjmp("CALL", _R9) // CALL R11414}
415
416func (self *_Assembler) call_go(fn obj.Addr) {417self.save(_REG_go...) // SAVE $REG_go418self.call(fn)419self.load(_REG_go...) // LOAD $REG_go420}
421
422func (self *_Assembler) callc(fn obj.Addr) {423self.save(_IP)424self.call(fn)425self.load(_IP)426}
427
428func (self *_Assembler) call_c(fn obj.Addr) {429self.Emit("XCHGQ", _IC, _BX)430self.callc(fn)431self.Emit("XCHGQ", _IC, _BX)432}
433
434func (self *_Assembler) call_sf(fn obj.Addr) {435self.Emit("LEAQ", _ARG_s, _DI) // LEAQ s<>+0(FP), DI436self.Emit("MOVQ", _IC, _ARG_ic) // MOVQ IC, ic<>+16(FP)437self.Emit("LEAQ", _ARG_ic, _SI) // LEAQ ic<>+16(FP), SI438self.Emit("LEAQ", jit.Ptr(_ST, _FsmOffset), _DX) // LEAQ _FsmOffset(ST), DX439self.Emit("MOVQ", _ARG_fv, _CX)440self.callc(fn)441self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC442}
443
444func (self *_Assembler) call_vf(fn obj.Addr) {445self.Emit("LEAQ", _ARG_s, _DI) // LEAQ s<>+0(FP), DI446self.Emit("MOVQ", _IC, _ARG_ic) // MOVQ IC, ic<>+16(FP)447self.Emit("LEAQ", _ARG_ic, _SI) // LEAQ ic<>+16(FP), SI448self.Emit("LEAQ", _VAR_st, _DX) // LEAQ st, DX449self.callc(fn)450self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC451}
452
453/** Assembler Error Handlers **/
454
455var (456_F_convT64 = jit.Func(convT64)457_F_error_wrap = jit.Func(error_wrap)458_F_error_type = jit.Func(error_type)459_F_error_field = jit.Func(error_field)460_F_error_value = jit.Func(error_value)461_F_error_mismatch = jit.Func(error_mismatch)462)
463
464var (465_I_int8 , _T_int8 = rtype(reflect.TypeOf(int8(0)))466_I_int16 , _T_int16 = rtype(reflect.TypeOf(int16(0)))467_I_int32 , _T_int32 = rtype(reflect.TypeOf(int32(0)))468_I_uint8 , _T_uint8 = rtype(reflect.TypeOf(uint8(0)))469_I_uint16 , _T_uint16 = rtype(reflect.TypeOf(uint16(0)))470_I_uint32 , _T_uint32 = rtype(reflect.TypeOf(uint32(0)))471_I_float32 , _T_float32 = rtype(reflect.TypeOf(float32(0)))472)
473
474var (475_T_error = rt.UnpackType(errorType)476_I_base64_CorruptInputError = jit.Itab(_T_error, base64CorruptInputError)477)
478
479var (480_V_stackOverflow = jit.Imm(int64(uintptr(unsafe.Pointer(&stackOverflow))))481_I_json_UnsupportedValueError = jit.Itab(_T_error, reflect.TypeOf(new(json.UnsupportedValueError)))482_I_json_MismatchTypeError = jit.Itab(_T_error, reflect.TypeOf(new(MismatchTypeError)))483)
484
485func (self *_Assembler) type_error() {486self.Link(_LB_type_error) // _type_error:487self.call_go(_F_error_type) // CALL_GO error_type488self.Sjmp("JMP" , _LB_error) // JMP _error489}
490
491func (self *_Assembler) mismatch_error() {492self.Link(_LB_mismatch_error) // _type_error:493self.Emit("MOVQ", _VAR_et, _ET) // MOVQ _VAR_et, ET494self.Emit("MOVQ", _VAR_ic, _EP) // MOVQ _VAR_ic, EP495self.Emit("MOVQ", _I_json_MismatchTypeError, _CX) // MOVQ _I_json_MismatchType, CX496self.Emit("CMPQ", _ET, _CX) // CMPQ ET, CX497self.Sjmp("JE" , _LB_error) // JE _LB_error498self.Emit("MOVQ", _ARG_sp, _AX)499self.Emit("MOVQ", _ARG_sl, _BX)500self.Emit("MOVQ", _VAR_ic, _CX)501self.Emit("MOVQ", _VAR_et, _DI)502self.call_go(_F_error_mismatch) // CALL_GO error_type503self.Sjmp("JMP" , _LB_error) // JMP _error504}
505
506func (self *_Assembler) field_error() {507self.Link(_LB_field_error) // _field_error:508self.Emit("MOVQ", _ARG_sv_p, _AX) // MOVQ sv.p, AX509self.Emit("MOVQ", _ARG_sv_n, _BX) // MOVQ sv.n, BX510self.call_go(_F_error_field) // CALL_GO error_field511self.Sjmp("JMP" , _LB_error) // JMP _error512}
513
514func (self *_Assembler) range_error() {515self.Link(_LB_range_error) // _range_error:516self.Emit("MOVQ", _ET, _CX) // MOVQ ET, CX517self.slice_from(_VAR_st_Ep, 0) // SLICE st.Ep, $0518self.Emit("MOVQ", _DI, _AX) // MOVQ DI, AX519self.Emit("MOVQ", _EP, _DI) // MOVQ EP, DI520self.Emit("MOVQ", _SI, _BX) // MOVQ SI, BX521self.call_go(_F_error_value) // CALL_GO error_value522self.Sjmp("JMP" , _LB_error) // JMP _error523}
524
525func (self *_Assembler) stack_error() {526self.Link(_LB_stack_error) // _stack_error:527self.Emit("MOVQ", _V_stackOverflow, _EP) // MOVQ ${_V_stackOverflow}, EP528self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ ${_I_json_UnsupportedValueError}, ET529self.Sjmp("JMP" , _LB_error) // JMP _error530}
531
532func (self *_Assembler) base64_error() {533self.Link(_LB_base64_error)534self.Emit("NEGQ", _AX) // NEGQ AX535self.Emit("SUBQ", jit.Imm(1), _AX) // SUBQ $1, AX536self.call_go(_F_convT64) // CALL_GO convT64537self.Emit("MOVQ", _AX, _EP) // MOVQ AX, EP538self.Emit("MOVQ", _I_base64_CorruptInputError, _ET) // MOVQ ${itab(base64.CorruptInputError)}, ET539self.Sjmp("JMP" , _LB_error) // JMP _error540}
541
542func (self *_Assembler) parsing_error() {543self.Link(_LB_eof_error) // _eof_error:544self.Emit("MOVQ" , _IL, _IC) // MOVQ IL, IC545self.Emit("MOVL" , jit.Imm(int64(types.ERR_EOF)), _EP) // MOVL ${types.ERR_EOF}, EP546self.Sjmp("JMP" , _LB_parsing_error) // JMP _parsing_error547self.Link(_LB_unquote_error) // _unquote_error:548self.Emit("SUBQ" , _VAR_sr, _SI) // SUBQ sr, SI549self.Emit("SUBQ" , _SI, _IC) // SUBQ IL, IC550self.Link(_LB_parsing_error_v) // _parsing_error_v:551self.Emit("MOVQ" , _AX, _EP) // MOVQ AX, EP552self.Emit("NEGQ" , _EP) // NEGQ EP553self.Sjmp("JMP" , _LB_parsing_error) // JMP _parsing_error554self.Link(_LB_char_m3_error) // _char_m3_error:555self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC556self.Link(_LB_char_m2_error) // _char_m2_error:557self.Emit("SUBQ" , jit.Imm(2), _IC) // SUBQ $2, IC558self.Sjmp("JMP" , _LB_char_0_error) // JMP _char_0_error559self.Link(_LB_im_error) // _im_error:560self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPB CX, (IP)(IC)561self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error562self.Emit("SHRL" , jit.Imm(8), _CX) // SHRL $8, CX563self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 1)) // CMPB CX, 1(IP)(IC)564self.Sjmp("JNE" , _LB_char_1_error) // JNE _char_1_error565self.Emit("SHRL" , jit.Imm(8), _CX) // SHRL $8, CX566self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 2)) // CMPB CX, 2(IP)(IC)567self.Sjmp("JNE" , _LB_char_2_error) // JNE _char_2_error568self.Sjmp("JMP" , _LB_char_3_error) // JNE _char_3_error569self.Link(_LB_char_4_error) // _char_4_error:570self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC571self.Link(_LB_char_3_error) // _char_3_error:572self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC573self.Link(_LB_char_2_error) // _char_2_error:574self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC575self.Link(_LB_char_1_error) // _char_1_error:576self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC577self.Link(_LB_char_0_error) // _char_0_error:578self.Emit("MOVL" , jit.Imm(int64(types.ERR_INVALID_CHAR)), _EP) // MOVL ${types.ERR_INVALID_CHAR}, EP579self.Link(_LB_parsing_error) // _parsing_error:580self.Emit("MOVQ" , _EP, _DI) // MOVQ EP, DI581self.Emit("MOVQ", _ARG_sp, _AX) // MOVQ sp, AX582self.Emit("MOVQ", _ARG_sl, _BX) // MOVQ sl, BX583self.Emit("MOVQ" , _IC, _CX) // MOVQ IC, CX584self.call_go(_F_error_wrap) // CALL_GO error_wrap585self.Sjmp("JMP" , _LB_error) // JMP _error586}
587
588func (self *_Assembler) _asm_OP_dismatch_err(p *_Instr) {589self.Emit("MOVQ", _IC, _VAR_ic)590self.Emit("MOVQ", jit.Type(p.vt()), _ET)591self.Emit("MOVQ", _ET, _VAR_et)592}
593
594func (self *_Assembler) _asm_OP_go_skip(p *_Instr) {595self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9596self.Xref(p.vi(), 4)597// self.Byte(0xcc)598self.Emit("MOVQ", _R9, _VAR_pc)599self.Sjmp("JMP" , _LB_skip_one) // JMP _skip_one600}
601
602func (self *_Assembler) skip_one() {603self.Link(_LB_skip_one) // _skip:604self.Emit("MOVQ", _VAR_ic, _IC) // MOVQ _VAR_ic, IC605self.call_sf(_F_skip_one) // CALL_SF skip_one606self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX607self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v608self.Emit("MOVQ" , _VAR_pc, _R9) // MOVQ pc, R9609// self.Byte(0xcc)610self.Rjmp("JMP" , _R9) // JMP (R9)611}
612
613func (self *_Assembler) skip_key_value() {614self.Link(_LB_skip_key_value) // _skip:615// skip the key616self.Emit("MOVQ", _VAR_ic, _IC) // MOVQ _VAR_ic, IC617self.call_sf(_F_skip_one) // CALL_SF skip_one618self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX619self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v620// match char ':'621self.lspace("_global_1")622self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(':'))623self.Sjmp("JNE" , _LB_parsing_error_v) // JNE _parse_error_v624self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC625self.lspace("_global_2")626// skip the value627self.call_sf(_F_skip_one) // CALL_SF skip_one628self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX629self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v630// jump back to specified address631self.Emit("MOVQ" , _VAR_pc, _R9) // MOVQ pc, R9632self.Rjmp("JMP" , _R9) // JMP (R9)633}
634
635
636/** Memory Management Routines **/
637
638var (639_T_byte = jit.Type(byteType)640_F_mallocgc = jit.Func(mallocgc)641)
642
643func (self *_Assembler) malloc_AX(nb obj.Addr, ret obj.Addr) {644self.Emit("MOVQ", nb, _AX) // MOVQ ${nb}, AX645self.Emit("MOVQ", _T_byte, _BX) // MOVQ ${type(byte)}, BX646self.Emit("XORL", _CX, _CX) // XORL CX, CX647self.call_go(_F_mallocgc) // CALL_GO mallocgc648self.Emit("MOVQ", _AX, ret) // MOVQ AX, ${ret}649}
650
651func (self *_Assembler) valloc(vt reflect.Type, ret obj.Addr) {652self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX) // MOVQ ${vt.Size()}, AX653self.Emit("MOVQ", jit.Type(vt), _BX) // MOVQ ${vt}, BX654self.Emit("MOVB", jit.Imm(1), _CX) // MOVB $1, CX655self.call_go(_F_mallocgc) // CALL_GO mallocgc656self.Emit("MOVQ", _AX, ret) // MOVQ AX, ${ret}657}
658
659func (self *_Assembler) valloc_AX(vt reflect.Type) {660self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX) // MOVQ ${vt.Size()}, AX661self.Emit("MOVQ", jit.Type(vt), _BX) // MOVQ ${vt}, BX662self.Emit("MOVB", jit.Imm(1), _CX) // MOVB $1, CX663self.call_go(_F_mallocgc) // CALL_GO mallocgc664}
665
666func (self *_Assembler) vfollow(vt reflect.Type) {667self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX668self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX669self.Sjmp("JNZ" , "_end_{n}") // JNZ _end_{n}670self.valloc_AX(vt) // VALLOC ${vt}, AX671self.WritePtrAX(1, jit.Ptr(_VP, 0), true) // MOVQ AX, (VP)672self.Link("_end_{n}") // _end_{n}:673self.Emit("MOVQ" , _AX, _VP) // MOVQ AX, VP674}
675
676/** Value Parsing Routines **/
677
678var (679_F_vstring = jit.Imm(int64(native.S_vstring))680_F_vnumber = jit.Imm(int64(native.S_vnumber))681_F_vsigned = jit.Imm(int64(native.S_vsigned))682_F_vunsigned = jit.Imm(int64(native.S_vunsigned))683)
684
685func (self *_Assembler) check_err(vt reflect.Type, pin string, pin2 int) {686self.Emit("MOVQ" , _VAR_st_Vt, _AX) // MOVQ st.Vt, AX687self.Emit("TESTQ", _AX, _AX) // CMPQ AX, ${native.V_STRING}688// try to skip the value689if vt != nil {690self.Sjmp("JNS" , "_check_err_{n}") // JNE _parsing_error_v691self.Emit("MOVQ", jit.Type(vt), _ET)692self.Emit("MOVQ", _ET, _VAR_et)693if pin2 != -1 {694self.Emit("SUBQ", jit.Imm(1), _BX)695self.Emit("MOVQ", _BX, _VAR_ic)696self.Byte(0x4c , 0x8d, 0x0d) // LEAQ (PC), R9697self.Xref(pin2, 4)698self.Emit("MOVQ", _R9, _VAR_pc)699self.Sjmp("JMP" , _LB_skip_key_value)700} else {701self.Emit("MOVQ", _BX, _VAR_ic)702self.Byte(0x4c , 0x8d, 0x0d) // LEAQ (PC), R9703self.Sref(pin, 4)704self.Emit("MOVQ", _R9, _VAR_pc)705self.Sjmp("JMP" , _LB_skip_one)706}707self.Link("_check_err_{n}")708} else {709self.Sjmp("JS" , _LB_parsing_error_v) // JNE _parsing_error_v710}711}
712
713func (self *_Assembler) check_eof(d int64) {714if d == 1 {715self.Emit("CMPQ", _IC, _IL) // CMPQ IC, IL716self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error717} else {718self.Emit("LEAQ", jit.Ptr(_IC, d), _AX) // LEAQ ${d}(IC), AX719self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL720self.Sjmp("JA" , _LB_eof_error) // JA _eof_error721}722}
723
724
725func (self *_Assembler) parse_string() {726self.Emit("MOVQ", _ARG_fv, _CX)727self.call_vf(_F_vstring)728self.check_err(nil, "", -1)729}
730
731func (self *_Assembler) parse_number(vt reflect.Type, pin string, pin2 int) {732self.Emit("MOVQ", _IC, _BX) // save ic when call native func733self.call_vf(_F_vnumber)734self.check_err(vt, pin, pin2)735}
736
737func (self *_Assembler) parse_signed(vt reflect.Type, pin string, pin2 int) {738self.Emit("MOVQ", _IC, _BX) // save ic when call native func739self.call_vf(_F_vsigned)740self.check_err(vt, pin, pin2)741}
742
743func (self *_Assembler) parse_unsigned(vt reflect.Type, pin string, pin2 int) {744self.Emit("MOVQ", _IC, _BX) // save ic when call native func745self.call_vf(_F_vunsigned)746self.check_err(vt, pin, pin2)747}
748
749// Pointer: DI, Size: SI, Return: R9
750func (self *_Assembler) copy_string() {751self.Link("_copy_string")752self.Emit("MOVQ", _DI, _VAR_bs_p)753self.Emit("MOVQ", _SI, _VAR_bs_n)754self.Emit("MOVQ", _R9, _VAR_bs_LR)755self.malloc_AX(_SI, _ARG_sv_p)756self.Emit("MOVQ", _VAR_bs_p, _BX)757self.Emit("MOVQ", _VAR_bs_n, _CX)758self.call_go(_F_memmove)759self.Emit("MOVQ", _ARG_sv_p, _DI)760self.Emit("MOVQ", _VAR_bs_n, _SI)761self.Emit("MOVQ", _VAR_bs_LR, _R9)762self.Rjmp("JMP", _R9)763}
764
765// Pointer: DI, Size: SI, Return: R9
766func (self *_Assembler) escape_string() {767self.Link("_escape_string")768self.Emit("MOVQ" , _DI, _VAR_bs_p)769self.Emit("MOVQ" , _SI, _VAR_bs_n)770self.Emit("MOVQ" , _R9, _VAR_bs_LR)771self.malloc_AX(_SI, _DX) // MALLOC SI, DX772self.Emit("MOVQ" , _DX, _ARG_sv_p)773self.Emit("MOVQ" , _VAR_bs_p, _DI)774self.Emit("MOVQ" , _VAR_bs_n, _SI)775self.Emit("LEAQ" , _VAR_sr, _CX) // LEAQ sr, CX776self.Emit("XORL" , _R8, _R8) // XORL R8, R8777self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv) // BTQ ${_F_disable_urc}, fv778self.Emit("SETCC", _R8) // SETCC R8779self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8) // SHLQ ${types.B_UNICODE_REPLACE}, R8780self.call_c(_F_unquote) // CALL unquote781self.Emit("MOVQ" , _VAR_bs_n, _SI) // MOVQ ${n}, SI782self.Emit("ADDQ" , jit.Imm(1), _SI) // ADDQ $1, SI783self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX784self.Sjmp("JS" , _LB_unquote_error) // JS _unquote_error785self.Emit("MOVQ" , _AX, _SI)786self.Emit("MOVQ" , _ARG_sv_p, _DI)787self.Emit("MOVQ" , _VAR_bs_LR, _R9)788self.Rjmp("JMP", _R9)789}
790
791func (self *_Assembler) escape_string_twice() {792self.Link("_escape_string_twice")793self.Emit("MOVQ" , _DI, _VAR_bs_p)794self.Emit("MOVQ" , _SI, _VAR_bs_n)795self.Emit("MOVQ" , _R9, _VAR_bs_LR)796self.malloc_AX(_SI, _DX) // MALLOC SI, DX797self.Emit("MOVQ" , _DX, _ARG_sv_p)798self.Emit("MOVQ" , _VAR_bs_p, _DI)799self.Emit("MOVQ" , _VAR_bs_n, _SI)800self.Emit("LEAQ" , _VAR_sr, _CX) // LEAQ sr, CX801self.Emit("MOVL" , jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8802self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv) // BTQ ${_F_disable_urc}, AX803self.Emit("XORL" , _AX, _AX) // XORL AX, AX804self.Emit("SETCC", _AX) // SETCC AX805self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _AX) // SHLQ ${types.B_UNICODE_REPLACE}, AX806self.Emit("ORQ" , _AX, _R8) // ORQ AX, R8807self.call_c(_F_unquote) // CALL unquote808self.Emit("MOVQ" , _VAR_bs_n, _SI) // MOVQ ${n}, SI809self.Emit("ADDQ" , jit.Imm(3), _SI) // ADDQ $3, SI810self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX811self.Sjmp("JS" , _LB_unquote_error) // JS _unquote_error812self.Emit("MOVQ" , _AX, _SI)813self.Emit("MOVQ" , _ARG_sv_p, _DI)814self.Emit("MOVQ" , _VAR_bs_LR, _R9)815self.Rjmp("JMP", _R9)816}
817
818/** Range Checking Routines **/
819
820var (821_V_max_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_max_f32))))822_V_min_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_min_f32))))823)
824
825var (826_Vp_max_f32 = new(float32)827_Vp_min_f32 = new(float32)828)
829
830func init() {831*_Vp_max_f32 = math.MaxFloat32832*_Vp_min_f32 = -math.MaxFloat32833}
834
835func (self *_Assembler) range_single_X0() {836self.Emit("CVTSD2SS", _VAR_st_Dv, _X0) // CVTSD2SS _VAR_st_Dv, X0837self.Emit("MOVQ" , _V_max_f32, _CX) // MOVQ _max_f32, CX838self.Emit("MOVQ" , jit.Gitab(_I_float32), _ET) // MOVQ ${itab(float32)}, ET839self.Emit("MOVQ" , jit.Gtype(_T_float32), _EP) // MOVQ ${type(float32)}, EP840self.Emit("UCOMISS" , jit.Ptr(_CX, 0), _X0) // UCOMISS (CX), X0841self.Sjmp("JA" , _LB_range_error) // JA _range_error842self.Emit("MOVQ" , _V_min_f32, _CX) // MOVQ _min_f32, CX843self.Emit("UCOMISS" , jit.Ptr(_CX, 0), _X0) // UCOMISS (CX), X0844self.Sjmp("JB" , _LB_range_error) // JB _range_error845}
846
847func (self *_Assembler) range_signed_CX(i *rt.GoItab, t *rt.GoType, a int64, b int64) {848self.Emit("MOVQ", _VAR_st_Iv, _CX) // MOVQ st.Iv, CX849self.Emit("MOVQ", jit.Gitab(i), _ET) // MOVQ ${i}, ET850self.Emit("MOVQ", jit.Gtype(t), _EP) // MOVQ ${t}, EP851self.Emit("CMPQ", _CX, jit.Imm(a)) // CMPQ CX, ${a}852self.Sjmp("JL" , _LB_range_error) // JL _range_error853self.Emit("CMPQ", _CX, jit.Imm(b)) // CMPQ CX, ${B}854self.Sjmp("JG" , _LB_range_error) // JG _range_error855}
856
857func (self *_Assembler) range_unsigned_CX(i *rt.GoItab, t *rt.GoType, v uint64) {858self.Emit("MOVQ" , _VAR_st_Iv, _CX) // MOVQ st.Iv, CX859self.Emit("MOVQ" , jit.Gitab(i), _ET) // MOVQ ${i}, ET860self.Emit("MOVQ" , jit.Gtype(t), _EP) // MOVQ ${t}, EP861self.Emit("TESTQ", _CX, _CX) // TESTQ CX, CX862self.Sjmp("JS" , _LB_range_error) // JS _range_error863self.Emit("CMPQ" , _CX, jit.Imm(int64(v))) // CMPQ CX, ${a}864self.Sjmp("JA" , _LB_range_error) // JA _range_error865}
866
867/** String Manipulating Routines **/
868
869var (870_F_unquote = jit.Imm(int64(native.S_unquote))871)
872
873func (self *_Assembler) slice_from(p obj.Addr, d int64) {874self.Emit("MOVQ", p, _SI) // MOVQ ${p}, SI875self.slice_from_r(_SI, d) // SLICE_R SI, ${d}876}
877
878func (self *_Assembler) slice_from_r(p obj.Addr, d int64) {879self.Emit("LEAQ", jit.Sib(_IP, p, 1, 0), _DI) // LEAQ (IP)(${p}), DI880self.Emit("NEGQ", p) // NEGQ ${p}881self.Emit("LEAQ", jit.Sib(_IC, p, 1, d), _SI) // LEAQ d(IC)(${p}), SI882}
883
884func (self *_Assembler) unquote_once(p obj.Addr, n obj.Addr, stack bool, copy bool) {885self.slice_from(_VAR_st_Iv, -1) // SLICE st.Iv, $-1886self.Emit("CMPQ", _VAR_st_Ep, jit.Imm(-1)) // CMPQ st.Ep, $-1887self.Sjmp("JE" , "_noescape_{n}") // JE _escape_{n}888self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9889self.Sref("_unquote_once_write_{n}", 4)890self.Sjmp("JMP" , "_escape_string")891self.Link("_noescape_{n}")892if copy {893self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv)894self.Sjmp("JNC", "_unquote_once_write_{n}")895self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9896self.Sref("_unquote_once_write_{n}", 4)897self.Sjmp("JMP", "_copy_string")898}899self.Link("_unquote_once_write_{n}")900self.Emit("MOVQ", _SI, n) // MOVQ SI, ${n}901if stack {902self.Emit("MOVQ", _DI, p)903} else {904self.WriteRecNotAX(10, _DI, p, false, false)905}906}
907
908func (self *_Assembler) unquote_twice(p obj.Addr, n obj.Addr, stack bool) {909self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1)) // CMPQ st.Ep, $-1910self.Sjmp("JE" , _LB_eof_error) // JE _eof_error911self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -3), jit.Imm('\\')) // CMPB -3(IP)(IC), $'\\'912self.Sjmp("JNE" , _LB_char_m3_error) // JNE _char_m3_error913self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -2), jit.Imm('"')) // CMPB -2(IP)(IC), $'"'914self.Sjmp("JNE" , _LB_char_m2_error) // JNE _char_m2_error915self.slice_from(_VAR_st_Iv, -3) // SLICE st.Iv, $-3916self.Emit("MOVQ" , _SI, _AX) // MOVQ SI, AX917self.Emit("ADDQ" , _VAR_st_Iv, _AX) // ADDQ st.Iv, AX918self.Emit("CMPQ" , _VAR_st_Ep, _AX) // CMPQ st.Ep, AX919self.Sjmp("JE" , "_noescape_{n}") // JE _noescape_{n}920self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9921self.Sref("_unquote_twice_write_{n}", 4)922self.Sjmp("JMP" , "_escape_string_twice")923self.Link("_noescape_{n}") // _noescape_{n}:924self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv)925self.Sjmp("JNC", "_unquote_twice_write_{n}")926self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9927self.Sref("_unquote_twice_write_{n}", 4)928self.Sjmp("JMP", "_copy_string")929self.Link("_unquote_twice_write_{n}")930self.Emit("MOVQ" , _SI, n) // MOVQ SI, ${n}931if stack {932self.Emit("MOVQ", _DI, p)933} else {934self.WriteRecNotAX(12, _DI, p, false, false)935}936self.Link("_unquote_twice_end_{n}")937}
938
939/** Memory Clearing Routines **/
940
941var (942_F_memclrHasPointers = jit.Func(memclrHasPointers)943_F_memclrNoHeapPointers = jit.Func(memclrNoHeapPointers)944)
945
946func (self *_Assembler) mem_clear_fn(ptrfree bool) {947if !ptrfree {948self.call_go(_F_memclrHasPointers)949} else {950self.call_go(_F_memclrNoHeapPointers)951}952}
953
954func (self *_Assembler) mem_clear_rem(size int64, ptrfree bool) {955self.Emit("MOVQ", jit.Imm(size), _BX) // MOVQ ${size}, BX956self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX957self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _AX) // MOVQ (ST)(AX), AX958self.Emit("SUBQ", _VP, _AX) // SUBQ VP, AX959self.Emit("ADDQ", _AX, _BX) // ADDQ AX, BX960self.Emit("MOVQ", _VP, _AX) // MOVQ VP, (SP)961self.mem_clear_fn(ptrfree) // CALL_GO memclr{Has,NoHeap}Pointers962}
963
964/** Map Assigning Routines **/
965
966var (967_F_mapassign = jit.Func(mapassign)968_F_mapassign_fast32 = jit.Func(mapassign_fast32)969_F_mapassign_faststr = jit.Func(mapassign_faststr)970_F_mapassign_fast64ptr = jit.Func(mapassign_fast64ptr)971)
972
973var (974_F_decodeJsonUnmarshaler obj.Addr975_F_decodeTextUnmarshaler obj.Addr976)
977
978func init() {979_F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler)980_F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler)981}
982
983func (self *_Assembler) mapaccess_ptr(t reflect.Type) {984if rt.MapType(rt.UnpackType(t)).IndirectElem() {985self.vfollow(t.Elem())986}987}
988
989func (self *_Assembler) mapassign_std(t reflect.Type, v obj.Addr) {990self.Emit("LEAQ", v, _AX) // LEAQ ${v}, AX991self.mapassign_call_from_AX(t, _F_mapassign) // MAPASSIGN ${t}, mapassign992}
993
994func (self *_Assembler) mapassign_str_fast(t reflect.Type, p obj.Addr, n obj.Addr) {995self.Emit("MOVQ", jit.Type(t), _AX) // MOVQ ${t}, AX996self.Emit("MOVQ", _VP, _BX) // MOVQ VP, BX997self.Emit("MOVQ", p, _CX) // MOVQ ${p}, CX998self.Emit("MOVQ", n, _DI) // MOVQ ${n}, DI999self.call_go(_F_mapassign_faststr) // CALL_GO ${fn}1000self.Emit("MOVQ", _AX, _VP) // MOVQ AX, VP1001self.mapaccess_ptr(t)1002}
1003
1004func (self *_Assembler) mapassign_call_from_AX(t reflect.Type, fn obj.Addr) {1005self.Emit("MOVQ", _AX, _CX)1006self.Emit("MOVQ", jit.Type(t), _AX) // MOVQ ${t}, AX1007self.Emit("MOVQ", _VP, _BX) // MOVQ VP, _BX1008self.call_go(fn) // CALL_GO ${fn}1009self.Emit("MOVQ", _AX, _VP) // MOVQ AX, VP1010}
1011
1012func (self *_Assembler) mapassign_fastx(t reflect.Type, fn obj.Addr) {1013self.mapassign_call_from_AX(t, fn)1014self.mapaccess_ptr(t)1015}
1016
1017func (self *_Assembler) mapassign_utext(t reflect.Type, addressable bool) {1018pv := false1019vk := t.Key()1020tk := t.Key()1021
1022/* deref pointer if needed */1023if vk.Kind() == reflect.Ptr {1024pv = true1025vk = vk.Elem()1026}1027
1028/* addressable value with pointer receiver */1029if addressable {1030pv = false1031tk = reflect.PtrTo(tk)1032}1033
1034/* allocate the key, and call the unmarshaler */1035self.valloc(vk, _BX) // VALLOC ${vk}, BX1036// must spill vk pointer since next call_go may invoke GC1037self.Emit("MOVQ" , _BX, _ARG_vk)1038self.Emit("MOVQ" , jit.Type(tk), _AX) // MOVQ ${tk}, AX1039self.Emit("MOVQ" , _ARG_sv_p, _CX) // MOVQ sv.p, CX1040self.Emit("MOVQ" , _ARG_sv_n, _DI) // MOVQ sv.n, DI1041self.call_go(_F_decodeTextUnmarshaler) // CALL_GO decodeTextUnmarshaler1042self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET1043self.Sjmp("JNZ" , _LB_error) // JNZ _error1044self.Emit("MOVQ" , _ARG_vk, _AX) // MOVQ VAR.vk, AX1045self.Emit("MOVQ", jit.Imm(0), _ARG_vk)1046
1047/* select the correct assignment function */1048if !pv {1049self.mapassign_call_from_AX(t, _F_mapassign)1050} else {1051self.mapassign_fastx(t, _F_mapassign_fast64ptr)1052}1053}
1054
1055/** External Unmarshaler Routines **/
1056
1057var (1058_F_skip_one = jit.Imm(int64(native.S_skip_one))1059_F_skip_array = jit.Imm(int64(native.S_skip_array))1060_F_skip_object = jit.Imm(int64(native.S_skip_object))1061_F_skip_number = jit.Imm(int64(native.S_skip_number))1062)
1063
1064func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) {1065self.call_sf(_F_skip_one) // CALL_SF skip_one1066self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX1067self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v1068self.slice_from_r(_AX, 0) // SLICE_R AX, $01069self.Emit("MOVQ" , _DI, _ARG_sv_p) // MOVQ DI, sv.p1070self.Emit("MOVQ" , _SI, _ARG_sv_n) // MOVQ SI, sv.n1071self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref) // UNMARSHAL json, ${t}, ${deref}1072}
1073
1074func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) {1075self.parse_string() // PARSE STRING1076self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true) // UNQUOTE once, sv.p, sv.n1077self.unmarshal_func(t, _F_decodeTextUnmarshaler, deref) // UNMARSHAL text, ${t}, ${deref}1078}
1079
1080func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool) {1081pt := t1082vk := t.Kind()1083
1084/* allocate the field if needed */1085if deref && vk == reflect.Ptr {1086self.Emit("MOVQ" , _VP, _BX) // MOVQ VP, BX1087self.Emit("MOVQ" , jit.Ptr(_BX, 0), _BX) // MOVQ (BX), BX1088self.Emit("TESTQ", _BX, _BX) // TESTQ BX, BX1089self.Sjmp("JNZ" , "_deref_{n}") // JNZ _deref_{n}1090self.valloc(t.Elem(), _BX) // VALLOC ${t.Elem()}, BX1091self.WriteRecNotAX(3, _BX, jit.Ptr(_VP, 0), false, false) // MOVQ BX, (VP)1092self.Link("_deref_{n}") // _deref_{n}:1093} else {1094/* set value pointer */1095self.Emit("MOVQ", _VP, _BX) // MOVQ (VP), BX1096}1097
1098/* set value type */1099self.Emit("MOVQ", jit.Type(pt), _AX) // MOVQ ${pt}, AX1100
1101/* set the source string and call the unmarshaler */1102self.Emit("MOVQ" , _ARG_sv_p, _CX) // MOVQ sv.p, CX1103self.Emit("MOVQ" , _ARG_sv_n, _DI) // MOVQ sv.n, DI1104self.call_go(fn) // CALL_GO ${fn}1105self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET1106self.Sjmp("JNZ" , _LB_error) // JNZ _error1107}
1108
1109/** Dynamic Decoding Routine **/
1110
1111var (1112_F_decodeTypedPointer obj.Addr1113)
1114
1115func init() {1116_F_decodeTypedPointer = jit.Func(decodeTypedPointer)1117}
1118
1119func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) {1120self.Emit("MOVQ" , vp, _SI) // MOVQ ${vp}, SI1121self.Emit("MOVQ" , vt, _DI) // MOVQ ${vt}, DI1122self.Emit("MOVQ", _ARG_sp, _AX) // MOVQ sp, AX1123self.Emit("MOVQ", _ARG_sl, _BX) // MOVQ sp, BX1124self.Emit("MOVQ" , _IC, _CX) // MOVQ IC, CX1125self.Emit("MOVQ" , _ST, _R8) // MOVQ ST, R81126self.Emit("MOVQ" , _ARG_fv, _R9) // MOVQ fv, R91127self.save(_REG_rt...)1128self.Emit("MOVQ", _F_decodeTypedPointer, _IL) // MOVQ ${fn}, R111129self.Rjmp("CALL", _IL) // CALL R111130self.load(_REG_rt...)1131self.Emit("MOVQ" , _AX, _IC) // MOVQ AX, IC1132self.Emit("MOVQ" , _BX, _ET) // MOVQ BX, ET1133self.Emit("MOVQ" , _CX, _EP) // MOVQ CX, EP1134self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET1135self.Sjmp("JE", "_decode_dynamic_end_{n}") // JE, _decode_dynamic_end_{n}1136self.Emit("MOVQ", _I_json_MismatchTypeError, _CX) // MOVQ _I_json_MismatchTypeError, CX1137self.Emit("CMPQ", _ET, _CX) // CMPQ ET, CX1138self.Sjmp("JNE", _LB_error) // JNE LB_error1139self.Emit("MOVQ", _EP, _VAR_ic) // MOVQ EP, VAR_ic1140self.Emit("MOVQ", _ET, _VAR_et) // MOVQ ET, VAR_et1141self.Link("_decode_dynamic_end_{n}")1142}
1143
1144/** OpCode Assembler Functions **/
1145
1146var (1147_F_memequal = jit.Func(memequal)1148_F_memmove = jit.Func(memmove)1149_F_growslice = jit.Func(growslice)1150_F_makeslice = jit.Func(makeslice)1151_F_makemap_small = jit.Func(makemap_small)1152_F_mapassign_fast64 = jit.Func(mapassign_fast64)1153)
1154
1155var (1156_F_lspace = jit.Imm(int64(native.S_lspace))1157_F_strhash = jit.Imm(int64(caching.S_strhash))1158)
1159
1160var (1161_F_b64decode = jit.Imm(int64(_subr__b64decode))1162_F_decodeValue = jit.Imm(int64(_subr_decode_value))1163)
1164
1165var (1166_F_FieldMap_GetCaseInsensitive obj.Addr1167_Empty_Slice = []byte{}1168_Zero_Base = int64(uintptr(((*rt.GoSlice)(unsafe.Pointer(&_Empty_Slice))).Ptr))1169)
1170
1171const (1172_MODE_AVX2 = 1 << 21173)
1174
1175const (1176_Fe_ID = int64(unsafe.Offsetof(caching.FieldEntry{}.ID))1177_Fe_Name = int64(unsafe.Offsetof(caching.FieldEntry{}.Name))1178_Fe_Hash = int64(unsafe.Offsetof(caching.FieldEntry{}.Hash))1179)
1180
1181const (1182_Vk_Ptr = int64(reflect.Ptr)1183_Gt_KindFlags = int64(unsafe.Offsetof(rt.GoType{}.KindFlags))1184)
1185
1186func init() {1187_F_FieldMap_GetCaseInsensitive = jit.Func((*caching.FieldMap).GetCaseInsensitive)1188}
1189
1190func (self *_Assembler) _asm_OP_any(_ *_Instr) {1191self.Emit("MOVQ" , jit.Ptr(_VP, 8), _CX) // MOVQ 8(VP), CX1192self.Emit("TESTQ" , _CX, _CX) // TESTQ CX, CX1193self.Sjmp("JZ" , "_decode_{n}") // JZ _decode_{n}1194self.Emit("CMPQ" , _CX, _VP) // CMPQ CX, VP1195self.Sjmp("JE" , "_decode_{n}") // JE _decode_{n}1196self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX1197self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX) // MOVBLZX _Gt_KindFlags(AX), DX1198self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX) // ANDL ${F_kind_mask}, DX1199self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr)) // CMPL DX, ${reflect.Ptr}1200self.Sjmp("JNE" , "_decode_{n}") // JNE _decode_{n}1201self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI) // LEAQ 8(VP), DI1202self.decode_dynamic(_AX, _DI) // DECODE AX, DI1203self.Sjmp("JMP" , "_decode_end_{n}") // JMP _decode_end_{n}1204self.Link("_decode_{n}") // _decode_{n}:1205self.Emit("MOVQ" , _ARG_fv, _DF) // MOVQ fv, DF1206self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 0)) // MOVQ _ST, (SP)1207self.call(_F_decodeValue) // CALL decodeValue1208self.Emit("MOVQ" , jit.Imm(0), jit.Ptr(_SP, 0)) // MOVQ _ST, (SP)1209self.Emit("TESTQ" , _EP, _EP) // TESTQ EP, EP1210self.Sjmp("JNZ" , _LB_parsing_error) // JNZ _parsing_error1211self.Link("_decode_end_{n}") // _decode_end_{n}:1212}
1213
1214func (self *_Assembler) _asm_OP_dyn(p *_Instr) {1215self.Emit("MOVQ" , jit.Type(p.vt()), _ET) // MOVQ ${p.vt()}, ET1216self.Emit("CMPQ" , jit.Ptr(_VP, 8), jit.Imm(0)) // CMPQ 8(VP), $01217self.Sjmp("JE" , _LB_type_error) // JE _type_error1218self.Emit("MOVQ" , jit.Ptr(_VP, 0), _CX) // MOVQ (VP), CX1219self.Emit("MOVQ" , jit.Ptr(_CX, 8), _CX) // MOVQ 8(CX), CX1220self.Emit("MOVBLZX", jit.Ptr(_CX, _Gt_KindFlags), _DX) // MOVBLZX _Gt_KindFlags(CX), DX1221self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX) // ANDL ${F_kind_mask}, DX1222self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr)) // CMPL DX, ${reflect.Ptr}1223self.Sjmp("JNE" , _LB_type_error) // JNE _type_error1224self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI) // LEAQ 8(VP), DI1225self.decode_dynamic(_CX, _DI) // DECODE CX, DI1226self.Link("_decode_end_{n}") // _decode_end_{n}:1227}
1228
1229func (self *_Assembler) _asm_OP_str(_ *_Instr) {1230self.parse_string() // PARSE STRING1231self.unquote_once(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false, true) // UNQUOTE once, (VP), 8(VP)1232}
1233
1234func (self *_Assembler) _asm_OP_bin(_ *_Instr) {1235self.parse_string() // PARSE STRING1236self.slice_from(_VAR_st_Iv, -1) // SLICE st.Iv, $-11237self.Emit("MOVQ" , _DI, jit.Ptr(_VP, 0)) // MOVQ DI, (VP)1238self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 8)) // MOVQ SI, 8(VP)1239self.Emit("SHRQ" , jit.Imm(2), _SI) // SHRQ $2, SI1240self.Emit("LEAQ" , jit.Sib(_SI, _SI, 2, 0), _SI) // LEAQ (SI)(SI*2), SI1241self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16)) // MOVQ SI, 16(VP)1242self.malloc_AX(_SI, _SI) // MALLOC SI, SI1243
1244// TODO: due to base64x's bug, only use AVX mode now1245self.Emit("MOVL", jit.Imm(_MODE_JSON), _CX) // MOVL $_MODE_JSON, CX1246
1247/* call the decoder */1248self.Emit("XORL" , _DX, _DX) // XORL DX, DX1249self.Emit("MOVQ" , _VP, _DI) // MOVQ VP, DI1250
1251self.Emit("MOVQ" , jit.Ptr(_VP, 0), _R8) // MOVQ SI, (VP)1252self.WriteRecNotAX(4, _SI, jit.Ptr(_VP, 0), true, false) // XCHGQ SI, (VP)1253self.Emit("MOVQ" , _R8, _SI)1254
1255self.Emit("XCHGQ", _DX, jit.Ptr(_VP, 8)) // XCHGQ DX, 8(VP)1256self.call_c(_F_b64decode) // CALL b64decode1257self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX1258self.Sjmp("JS" , _LB_base64_error) // JS _base64_error1259self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP)1260}
1261
1262func (self *_Assembler) _asm_OP_bool(_ *_Instr) {1263self.Emit("LEAQ", jit.Ptr(_IC, 4), _AX) // LEAQ 4(IC), AX1264self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL1265self.Sjmp("JA" , _LB_eof_error) // JA _eof_error1266self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('f')) // CMPB (IP)(IC), $'f'1267self.Sjmp("JE" , "_false_{n}") // JE _false_{n}1268self.Emit("MOVL", jit.Imm(_IM_true), _CX) // MOVL $"true", CX1269self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPL CX, (IP)(IC)1270self.Sjmp("JE" , "_bool_true_{n}")1271// try to skip the value1272self.Emit("MOVQ", _IC, _VAR_ic)1273self.Emit("MOVQ", _T_bool, _ET)1274self.Emit("MOVQ", _ET, _VAR_et)1275self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R91276self.Sref("_end_{n}", 4)1277self.Emit("MOVQ", _R9, _VAR_pc)1278self.Sjmp("JMP" , _LB_skip_one)1279
1280self.Link("_bool_true_{n}")1281self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC1282self.Emit("MOVB", jit.Imm(1), jit.Ptr(_VP, 0)) // MOVB $1, (VP)1283self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n}1284self.Link("_false_{n}") // _false_{n}:1285self.Emit("ADDQ", jit.Imm(1), _AX) // ADDQ $1, AX1286self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC1287self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL1288self.Sjmp("JA" , _LB_eof_error) // JA _eof_error1289self.Emit("MOVL", jit.Imm(_IM_alse), _CX) // MOVL $"alse", CX1290self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPL CX, (IP)(IC)1291self.Sjmp("JNE" , _LB_im_error) // JNE _im_error1292self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC1293self.Emit("XORL", _AX, _AX) // XORL AX, AX1294self.Emit("MOVB", _AX, jit.Ptr(_VP, 0)) // MOVB AX, (VP)1295self.Link("_end_{n}") // _end_{n}:1296}
1297
1298func (self *_Assembler) _asm_OP_num(_ *_Instr) {1299self.Emit("MOVQ", jit.Imm(0), _VAR_fl)1300self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))1301self.Emit("MOVQ", _IC, _BX)1302self.Sjmp("JNE", "_skip_number_{n}")1303self.Emit("MOVQ", jit.Imm(1), _VAR_fl)1304self.Emit("ADDQ", jit.Imm(1), _IC)1305self.Link("_skip_number_{n}")1306
1307/* call skip_number */1308self.Emit("LEAQ", _ARG_s, _DI) // LEAQ s<>+0(FP), DI1309self.Emit("MOVQ", _IC, _ARG_ic) // MOVQ IC, ic<>+16(FP)1310self.Emit("LEAQ", _ARG_ic, _SI) // LEAQ ic<>+16(FP), SI1311self.callc(_F_skip_number) // CALL _F_skip_number1312self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC1313self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX1314self.Sjmp("JNS" , "_num_next_{n}")1315
1316/* call skip one */1317self.Emit("MOVQ", _BX, _VAR_ic)1318self.Emit("MOVQ", _T_number, _ET)1319self.Emit("MOVQ", _ET, _VAR_et)1320self.Byte(0x4c, 0x8d, 0x0d)1321self.Sref("_num_end_{n}", 4)1322self.Emit("MOVQ", _R9, _VAR_pc)1323self.Sjmp("JMP" , _LB_skip_one)1324
1325/* assgin string */1326self.Link("_num_next_{n}")1327self.slice_from_r(_AX, 0)1328self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv)1329self.Sjmp("JNC", "_num_write_{n}")1330self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R91331self.Sref("_num_write_{n}", 4)1332self.Sjmp("JMP", "_copy_string")1333self.Link("_num_write_{n}")1334self.Emit("MOVQ", _SI, jit.Ptr(_VP, 8)) // MOVQ SI, 8(VP)1335self.WriteRecNotAX(13, _DI, jit.Ptr(_VP, 0), false, false)1336self.Emit("CMPQ", _VAR_fl, jit.Imm(1))1337self.Sjmp("JNE", "_num_end_{n}")1338self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))1339self.Sjmp("JNE", _LB_char_0_error)1340self.Emit("ADDQ", jit.Imm(1), _IC)1341self.Link("_num_end_{n}")1342}
1343
1344func (self *_Assembler) _asm_OP_i8(_ *_Instr) {1345var pin = "_i8_end_{n}"1346self.parse_signed(int8Type, pin, -1) // PARSE int81347self.range_signed_CX(_I_int8, _T_int8, math.MinInt8, math.MaxInt8) // RANGE int81348self.Emit("MOVB", _CX, jit.Ptr(_VP, 0)) // MOVB CX, (VP)1349self.Link(pin)1350}
1351
1352func (self *_Assembler) _asm_OP_i16(_ *_Instr) {1353var pin = "_i16_end_{n}"1354self.parse_signed(int16Type, pin, -1) // PARSE int161355self.range_signed_CX(_I_int16, _T_int16, math.MinInt16, math.MaxInt16) // RANGE int161356self.Emit("MOVW", _CX, jit.Ptr(_VP, 0)) // MOVW CX, (VP)1357self.Link(pin)1358}
1359
1360func (self *_Assembler) _asm_OP_i32(_ *_Instr) {1361var pin = "_i32_end_{n}"1362self.parse_signed(int32Type, pin, -1) // PARSE int321363self.range_signed_CX(_I_int32, _T_int32, math.MinInt32, math.MaxInt32) // RANGE int321364self.Emit("MOVL", _CX, jit.Ptr(_VP, 0)) // MOVL CX, (VP)1365self.Link(pin)1366}
1367
1368func (self *_Assembler) _asm_OP_i64(_ *_Instr) {1369var pin = "_i64_end_{n}"1370self.parse_signed(int64Type, pin, -1) // PARSE int641371self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX1372self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP)1373self.Link(pin)1374}
1375
1376func (self *_Assembler) _asm_OP_u8(_ *_Instr) {1377var pin = "_u8_end_{n}"1378self.parse_unsigned(uint8Type, pin, -1) // PARSE uint81379self.range_unsigned_CX(_I_uint8, _T_uint8, math.MaxUint8) // RANGE uint81380self.Emit("MOVB", _CX, jit.Ptr(_VP, 0)) // MOVB CX, (VP)1381self.Link(pin)1382}
1383
1384func (self *_Assembler) _asm_OP_u16(_ *_Instr) {1385var pin = "_u16_end_{n}"1386self.parse_unsigned(uint16Type, pin, -1) // PARSE uint161387self.range_unsigned_CX(_I_uint16, _T_uint16, math.MaxUint16) // RANGE uint161388self.Emit("MOVW", _CX, jit.Ptr(_VP, 0)) // MOVW CX, (VP)1389self.Link(pin)1390}
1391
1392func (self *_Assembler) _asm_OP_u32(_ *_Instr) {1393var pin = "_u32_end_{n}"1394self.parse_unsigned(uint32Type, pin, -1) // PARSE uint321395self.range_unsigned_CX(_I_uint32, _T_uint32, math.MaxUint32) // RANGE uint321396self.Emit("MOVL", _CX, jit.Ptr(_VP, 0)) // MOVL CX, (VP)1397self.Link(pin)1398}
1399
1400func (self *_Assembler) _asm_OP_u64(_ *_Instr) {1401var pin = "_u64_end_{n}"1402self.parse_unsigned(uint64Type, pin, -1) // PARSE uint641403self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX1404self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP)1405self.Link(pin)1406}
1407
1408func (self *_Assembler) _asm_OP_f32(_ *_Instr) {1409var pin = "_f32_end_{n}"1410self.parse_number(float32Type, pin, -1) // PARSE NUMBER1411self.range_single_X0() // RANGE float321412self.Emit("MOVSS", _X0, jit.Ptr(_VP, 0)) // MOVSS X0, (VP)1413self.Link(pin)1414}
1415
1416func (self *_Assembler) _asm_OP_f64(_ *_Instr) {1417var pin = "_f64_end_{n}"1418self.parse_number(float64Type, pin, -1) // PARSE NUMBER1419self.Emit("MOVSD", _VAR_st_Dv, _X0) // MOVSD st.Dv, X01420self.Emit("MOVSD", _X0, jit.Ptr(_VP, 0)) // MOVSD X0, (VP)1421self.Link(pin)1422}
1423
1424func (self *_Assembler) _asm_OP_unquote(_ *_Instr) {1425self.check_eof(2)1426self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('\\')) // CMPB (IP)(IC), $'\\'1427self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error1428self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 1), jit.Imm('"')) // CMPB 1(IP)(IC), $'"'1429self.Sjmp("JNE" , _LB_char_1_error) // JNE _char_1_error1430self.Emit("ADDQ", jit.Imm(2), _IC) // ADDQ $2, IC1431self.parse_string() // PARSE STRING1432self.unquote_twice(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false) // UNQUOTE twice, (VP), 8(VP)1433}
1434
1435func (self *_Assembler) _asm_OP_nil_1(_ *_Instr) {1436self.Emit("XORL", _AX, _AX) // XORL AX, AX1437self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP)1438}
1439
1440func (self *_Assembler) _asm_OP_nil_2(_ *_Instr) {1441self.Emit("PXOR" , _X0, _X0) // PXOR X0, X01442self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)1443}
1444
1445func (self *_Assembler) _asm_OP_nil_3(_ *_Instr) {1446self.Emit("XORL" , _AX, _AX) // XORL AX, AX1447self.Emit("PXOR" , _X0, _X0) // PXOR X0, X01448self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)1449self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 16)) // MOVOU AX, 16(VP)1450}
1451
1452func (self *_Assembler) _asm_OP_deref(p *_Instr) {1453self.vfollow(p.vt())1454}
1455
1456func (self *_Assembler) _asm_OP_index(p *_Instr) {1457self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ ${p.vi()}, AX1458self.Emit("ADDQ", _AX, _VP) // ADDQ _AX, _VP1459}
1460
1461func (self *_Assembler) _asm_OP_is_null(p *_Instr) {1462self.Emit("LEAQ" , jit.Ptr(_IC, 4), _AX) // LEAQ 4(IC), AX1463self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL1464self.Sjmp("JA" , "_not_null_{n}") // JA _not_null_{n}1465self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null)) // CMPL (IP)(IC), $"null"1466self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC1467self.Xjmp("JE" , p.vi()) // JE {p.vi()}1468self.Link("_not_null_{n}") // _not_null_{n}:1469}
1470
1471func (self *_Assembler) _asm_OP_is_null_quote(p *_Instr) {1472self.Emit("LEAQ" , jit.Ptr(_IC, 5), _AX) // LEAQ 4(IC), AX1473self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL1474self.Sjmp("JA" , "_not_null_quote_{n}") // JA _not_null_quote_{n}1475self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null)) // CMPL (IP)(IC), $"null"1476self.Sjmp("JNE" , "_not_null_quote_{n}") // JNE _not_null_quote_{n}1477self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 4), jit.Imm('"')) // CMPB 4(IP)(IC), $'"'1478self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC1479self.Xjmp("JE" , p.vi()) // JE {p.vi()}1480self.Link("_not_null_quote_{n}") // _not_null_quote_{n}:1481}
1482
1483func (self *_Assembler) _asm_OP_map_init(_ *_Instr) {1484self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX1485self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX1486self.Sjmp("JNZ" , "_end_{n}") // JNZ _end_{n}1487self.call_go(_F_makemap_small) // CALL_GO makemap_small1488self.WritePtrAX(6, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP)1489self.Link("_end_{n}") // _end_{n}:1490self.Emit("MOVQ" , _AX, _VP) // MOVQ AX, VP1491}
1492
1493func (self *_Assembler) _asm_OP_map_key_i8(p *_Instr) {1494self.parse_signed(int8Type, "", p.vi()) // PARSE int81495self.range_signed_CX(_I_int8, _T_int8, math.MinInt8, math.MaxInt8) // RANGE int81496self.match_char('"')1497self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN int8, mapassign, st.Iv1498}
1499
1500func (self *_Assembler) _asm_OP_map_key_i16(p *_Instr) {1501self.parse_signed(int16Type, "", p.vi()) // PARSE int161502self.range_signed_CX(_I_int16, _T_int16, math.MinInt16, math.MaxInt16) // RANGE int161503self.match_char('"')1504self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN int16, mapassign, st.Iv1505}
1506
1507func (self *_Assembler) _asm_OP_map_key_i32(p *_Instr) {1508self.parse_signed(int32Type, "", p.vi()) // PARSE int321509self.range_signed_CX(_I_int32, _T_int32, math.MinInt32, math.MaxInt32) // RANGE int321510self.match_char('"')1511if vt := p.vt(); !mapfast(vt) {1512self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN int32, mapassign, st.Iv1513} else {1514self.Emit("MOVQ", _CX, _AX) // MOVQ CX, AX1515self.mapassign_fastx(vt, _F_mapassign_fast32) // MAPASSIGN int32, mapassign_fast321516}1517}
1518
1519func (self *_Assembler) _asm_OP_map_key_i64(p *_Instr) {1520self.parse_signed(int64Type, "", p.vi()) // PARSE int641521self.match_char('"')1522if vt := p.vt(); !mapfast(vt) {1523self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN int64, mapassign, st.Iv1524} else {1525self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX1526self.mapassign_fastx(vt, _F_mapassign_fast64) // MAPASSIGN int64, mapassign_fast641527}1528}
1529
1530func (self *_Assembler) _asm_OP_map_key_u8(p *_Instr) {1531self.parse_unsigned(uint8Type, "", p.vi()) // PARSE uint81532self.range_unsigned_CX(_I_uint8, _T_uint8, math.MaxUint8) // RANGE uint81533self.match_char('"')1534self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN uint8, vt.Iv1535}
1536
1537func (self *_Assembler) _asm_OP_map_key_u16(p *_Instr) {1538self.parse_unsigned(uint16Type, "", p.vi()) // PARSE uint161539self.range_unsigned_CX(_I_uint16, _T_uint16, math.MaxUint16) // RANGE uint161540self.match_char('"')1541self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN uint16, vt.Iv1542}
1543
1544func (self *_Assembler) _asm_OP_map_key_u32(p *_Instr) {1545self.parse_unsigned(uint32Type, "", p.vi()) // PARSE uint321546self.range_unsigned_CX(_I_uint32, _T_uint32, math.MaxUint32) // RANGE uint321547self.match_char('"')1548if vt := p.vt(); !mapfast(vt) {1549self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN uint32, vt.Iv1550} else {1551self.Emit("MOVQ", _CX, _AX) // MOVQ CX, AX1552self.mapassign_fastx(vt, _F_mapassign_fast32) // MAPASSIGN uint32, mapassign_fast321553}1554}
1555
1556func (self *_Assembler) _asm_OP_map_key_u64(p *_Instr) {1557self.parse_unsigned(uint64Type, "", p.vi()) // PARSE uint641558self.match_char('"')1559if vt := p.vt(); !mapfast(vt) {1560self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN uint64, vt.Iv1561} else {1562self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX1563self.mapassign_fastx(vt, _F_mapassign_fast64) // MAPASSIGN uint64, mapassign_fast641564}1565}
1566
1567func (self *_Assembler) _asm_OP_map_key_f32(p *_Instr) {1568self.parse_number(float32Type, "", p.vi()) // PARSE NUMBER1569self.range_single_X0() // RANGE float321570self.Emit("MOVSS", _X0, _VAR_st_Dv) // MOVSS X0, st.Dv1571self.match_char('"')1572self.mapassign_std(p.vt(), _VAR_st_Dv) // MAPASSIGN ${p.vt()}, mapassign, st.Dv1573}
1574
1575func (self *_Assembler) _asm_OP_map_key_f64(p *_Instr) {1576self.parse_number(float64Type, "", p.vi()) // PARSE NUMBER1577self.match_char('"')1578self.mapassign_std(p.vt(), _VAR_st_Dv) // MAPASSIGN ${p.vt()}, mapassign, st.Dv1579}
1580
1581func (self *_Assembler) _asm_OP_map_key_str(p *_Instr) {1582self.parse_string() // PARSE STRING1583self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true) // UNQUOTE once, sv.p, sv.n1584if vt := p.vt(); !mapfast(vt) {1585self.valloc(vt.Key(), _DI)1586self.Emit("MOVOU", _ARG_sv, _X0)1587self.Emit("MOVOU", _X0, jit.Ptr(_DI, 0))1588self.mapassign_std(vt, jit.Ptr(_DI, 0)) // MAPASSIGN string, DI, SI1589} else {1590self.mapassign_str_fast(vt, _ARG_sv_p, _ARG_sv_n) // MAPASSIGN string, DI, SI1591}1592}
1593
1594func (self *_Assembler) _asm_OP_map_key_utext(p *_Instr) {1595self.parse_string() // PARSE STRING1596self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true) // UNQUOTE once, sv.p, sv.n1597self.mapassign_utext(p.vt(), false) // MAPASSIGN utext, ${p.vt()}, false1598}
1599
1600func (self *_Assembler) _asm_OP_map_key_utext_p(p *_Instr) {1601self.parse_string() // PARSE STRING1602self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true) // UNQUOTE once, sv.p, sv.n1603self.mapassign_utext(p.vt(), true) // MAPASSIGN utext, ${p.vt()}, true1604}
1605
1606func (self *_Assembler) _asm_OP_array_skip(_ *_Instr) {1607self.call_sf(_F_skip_array) // CALL_SF skip_array1608self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX1609self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v1610}
1611
1612func (self *_Assembler) _asm_OP_array_clear(p *_Instr) {1613self.mem_clear_rem(p.i64(), true)1614}
1615
1616func (self *_Assembler) _asm_OP_array_clear_p(p *_Instr) {1617self.mem_clear_rem(p.i64(), false)1618}
1619
1620func (self *_Assembler) _asm_OP_slice_init(p *_Instr) {1621self.Emit("XORL" , _AX, _AX) // XORL AX, AX1622self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP)1623self.Emit("MOVQ" , jit.Ptr(_VP, 16), _BX) // MOVQ 16(VP), BX1624self.Emit("TESTQ", _BX, _BX) // TESTQ BX, BX1625self.Sjmp("JNZ" , "_done_{n}") // JNZ _done_{n}1626self.Emit("MOVQ" , jit.Imm(_MinSlice), _CX) // MOVQ ${_MinSlice}, CX1627self.Emit("MOVQ" , _CX, jit.Ptr(_VP, 16)) // MOVQ CX, 16(VP)1628self.Emit("MOVQ" , jit.Type(p.vt()), _AX) // MOVQ ${p.vt()}, DX1629self.call_go(_F_makeslice) // CALL_GO makeslice1630self.WritePtrAX(7, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP)1631self.Emit("XORL" , _AX, _AX) // XORL AX, AX1632self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP)1633self.Link("_done_{n}") // _done_{n}1634}
1635
1636func (self *_Assembler) _asm_OP_check_empty(p *_Instr) {1637rbracket := p.vb()1638if rbracket == ']' {1639self.check_eof(1)1640self.Emit("LEAQ", jit.Ptr(_IC, 1), _AX) // LEAQ 1(IC), AX1641self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(rbracket))) // CMPB (IP)(IC), ']'1642self.Sjmp("JNE" , "_not_empty_array_{n}") // JNE _not_empty_array_{n}1643self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC1644self.Emit("MOVQ", jit.Imm(_Zero_Base), _AX)1645self.WritePtrAX(9, jit.Ptr(_VP, 0), false)1646self.Emit("PXOR", _X0, _X0) // PXOR X0, X01647self.Emit("MOVOU", _X0, jit.Ptr(_VP, 8)) // MOVOU X0, 8(VP)1648self.Xjmp("JMP" , p.vi()) // JMP {p.vi()}1649self.Link("_not_empty_array_{n}")1650} else {1651panic("only implement check empty array here!")1652}1653}
1654
1655func (self *_Assembler) _asm_OP_slice_append(p *_Instr) {1656self.Emit("MOVQ" , jit.Ptr(_VP, 8), _AX) // MOVQ 8(VP), AX1657self.Emit("CMPQ" , _AX, jit.Ptr(_VP, 16)) // CMPQ AX, 16(VP)1658self.Sjmp("JB" , "_index_{n}") // JB _index_{n}1659self.Emit("MOVQ" , _AX, _SI) // MOVQ AX, SI1660self.Emit("SHLQ" , jit.Imm(1), _SI) // SHLQ $1, SI1661self.Emit("MOVQ" , jit.Type(p.vt()), _AX) // MOVQ ${p.vt()}, AX1662self.Emit("MOVQ" , jit.Ptr(_VP, 0), _BX) // MOVQ (VP), BX1663self.Emit("MOVQ" , jit.Ptr(_VP, 8), _CX) // MOVQ 8(VP), CX1664self.Emit("MOVQ" , jit.Ptr(_VP, 16), _DI) // MOVQ 16(VP), DI1665self.call_go(_F_growslice) // CALL_GO growslice1666self.WritePtrAX(8, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP)1667self.Emit("MOVQ" , _BX, jit.Ptr(_VP, 8)) // MOVQ BX, 8(VP)1668self.Emit("MOVQ" , _CX, jit.Ptr(_VP, 16)) // MOVQ CX, 16(VP)1669
1670// because growslice not zero memory {oldcap, newlen} when append et not has ptrdata.1671// but we should zero it, avoid decode it as random values.1672if rt.UnpackType(p.vt()).PtrData == 0 {1673self.Emit("MOVQ" , _CX, _DI) // MOVQ CX, DI1674self.Emit("SUBQ" , _BX, _DI) // MOVQ BX, DI1675
1676self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8)) // ADDQ $1, 8(VP)1677self.Emit("MOVQ" , _AX, _VP) // MOVQ AX, VP1678self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX) // MOVQ ${p.vlen()}, CX1679self.Emit("MOVQ" , _BX, _AX) // MOVQ BX, AX1680self.From("MULQ" , _CX) // MULQ CX1681self.Emit("ADDQ" , _AX, _VP) // ADDQ AX, VP1682
1683self.Emit("MOVQ" , _DI, _AX) // MOVQ SI, AX1684self.From("MULQ" , _CX) // MULQ BX1685self.Emit("MOVQ" , _AX, _BX) // ADDQ AX, BX1686self.Emit("MOVQ" , _VP, _AX) // MOVQ VP, AX1687self.mem_clear_fn(true) // CALL_GO memclr{Has,NoHeap}1688self.Sjmp("JMP", "_append_slice_end_{n}")1689}1690
1691self.Emit("MOVQ" , _BX, _AX) // MOVQ BX, AX1692self.Link("_index_{n}") // _index_{n}:1693self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8)) // ADDQ $1, 8(VP)1694self.Emit("MOVQ" , jit.Ptr(_VP, 0), _VP) // MOVQ (VP), VP1695self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX) // MOVQ ${p.vlen()}, CX1696self.From("MULQ" , _CX) // MULQ CX1697self.Emit("ADDQ" , _AX, _VP) // ADDQ AX, VP1698self.Link("_append_slice_end_{n}")1699}
1700
1701func (self *_Assembler) _asm_OP_object_skip(_ *_Instr) {1702self.call_sf(_F_skip_object) // CALL_SF skip_object1703self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX1704self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v1705}
1706
1707func (self *_Assembler) _asm_OP_object_next(_ *_Instr) {1708self.call_sf(_F_skip_one) // CALL_SF skip_one1709self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX1710self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v1711}
1712
1713func (self *_Assembler) _asm_OP_struct_field(p *_Instr) {1714assert_eq(caching.FieldEntrySize, 32, "invalid field entry size")1715self.Emit("MOVQ" , jit.Imm(-1), _AX) // MOVQ $-1, AX1716self.Emit("MOVQ" , _AX, _VAR_sr) // MOVQ AX, sr1717self.parse_string() // PARSE STRING1718self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, false) // UNQUOTE once, sv.p, sv.n1719self.Emit("LEAQ" , _ARG_sv, _AX) // LEAQ sv, AX1720self.Emit("XORL" , _BX, _BX) // XORL BX, BX1721self.call_go(_F_strhash) // CALL_GO strhash1722self.Emit("MOVQ" , _AX, _R9) // MOVQ AX, R91723self.Emit("MOVQ" , jit.Imm(freezeFields(p.vf())), _CX) // MOVQ ${p.vf()}, CX1724self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_b), _SI) // MOVQ FieldMap.b(CX), SI1725self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_N), _CX) // MOVQ FieldMap.N(CX), CX1726self.Emit("TESTQ", _CX, _CX) // TESTQ CX, CX1727self.Sjmp("JZ" , "_try_lowercase_{n}") // JZ _try_lowercase_{n}1728self.Link("_loop_{n}") // _loop_{n}:1729self.Emit("XORL" , _DX, _DX) // XORL DX, DX1730self.From("DIVQ" , _CX) // DIVQ CX1731self.Emit("LEAQ" , jit.Ptr(_DX, 1), _AX) // LEAQ 1(DX), AX1732self.Emit("SHLQ" , jit.Imm(5), _DX) // SHLQ $5, DX1733self.Emit("LEAQ" , jit.Sib(_SI, _DX, 1, 0), _DI) // LEAQ (SI)(DX), DI1734self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Hash), _R8) // MOVQ FieldEntry.Hash(DI), R81735self.Emit("TESTQ", _R8, _R8) // TESTQ R8, R81736self.Sjmp("JZ" , "_try_lowercase_{n}") // JZ _try_lowercase_{n}1737self.Emit("CMPQ" , _R8, _R9) // CMPQ R8, R91738self.Sjmp("JNE" , "_loop_{n}") // JNE _loop_{n}1739self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name + 8), _DX) // MOVQ FieldEntry.Name+8(DI), DX1740self.Emit("CMPQ" , _DX, _ARG_sv_n) // CMPQ DX, sv.n1741self.Sjmp("JNE" , "_loop_{n}") // JNE _loop_{n}1742self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_ID), _R8) // MOVQ FieldEntry.ID(DI), R81743self.Emit("MOVQ" , _AX, _VAR_ss_AX) // MOVQ AX, ss.AX1744self.Emit("MOVQ" , _CX, _VAR_ss_CX) // MOVQ CX, ss.CX1745self.Emit("MOVQ" , _SI, _VAR_ss_SI) // MOVQ SI, ss.SI1746self.Emit("MOVQ" , _R8, _VAR_ss_R8) // MOVQ R8, ss.R81747self.Emit("MOVQ" , _R9, _VAR_ss_R9) // MOVQ R9, ss.R91748self.Emit("MOVQ" , _ARG_sv_p, _AX) // MOVQ _VAR_sv_p, AX1749self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name), _CX) // MOVQ FieldEntry.Name(DI), CX1750self.Emit("MOVQ" , _CX, _BX) // MOVQ CX, 8(SP)1751self.Emit("MOVQ" , _DX, _CX) // MOVQ DX, 16(SP)1752self.call_go(_F_memequal) // CALL_GO memequal1753self.Emit("MOVB" , _AX, _DX) // MOVB 24(SP), DX1754self.Emit("MOVQ" , _VAR_ss_AX, _AX) // MOVQ ss.AX, AX1755self.Emit("MOVQ" , _VAR_ss_CX, _CX) // MOVQ ss.CX, CX1756self.Emit("MOVQ" , _VAR_ss_SI, _SI) // MOVQ ss.SI, SI1757self.Emit("MOVQ" , _VAR_ss_R9, _R9) // MOVQ ss.R9, R91758self.Emit("TESTB", _DX, _DX) // TESTB DX, DX1759self.Sjmp("JZ" , "_loop_{n}") // JZ _loop_{n}1760self.Emit("MOVQ" , _VAR_ss_R8, _R8) // MOVQ ss.R8, R81761self.Emit("MOVQ" , _R8, _VAR_sr) // MOVQ R8, sr1762self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n}1763self.Link("_try_lowercase_{n}") // _try_lowercase_{n}:1764self.Emit("MOVQ" , jit.Imm(referenceFields(p.vf())), _AX) // MOVQ ${p.vf()}, AX1765self.Emit("MOVQ", _ARG_sv_p, _BX) // MOVQ sv, BX1766self.Emit("MOVQ", _ARG_sv_n, _CX) // MOVQ sv, CX1767self.call_go(_F_FieldMap_GetCaseInsensitive) // CALL_GO FieldMap::GetCaseInsensitive1768self.Emit("MOVQ" , _AX, _VAR_sr) // MOVQ AX, _VAR_sr1769self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX1770self.Sjmp("JNS" , "_end_{n}") // JNS _end_{n}1771self.Emit("BTQ" , jit.Imm(_F_disable_unknown), _ARG_fv) // BTQ ${_F_disable_unknown}, fv1772self.Sjmp("JC" , _LB_field_error) // JC _field_error1773self.Link("_end_{n}") // _end_{n}:1774}
1775
1776func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) {1777self.unmarshal_json(p.vt(), true)1778}
1779
1780func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) {1781self.unmarshal_json(p.vt(), false)1782}
1783
1784func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) {1785self.unmarshal_text(p.vt(), true)1786}
1787
1788func (self *_Assembler) _asm_OP_unmarshal_text_p(p *_Instr) {1789self.unmarshal_text(p.vt(), false)1790}
1791
1792func (self *_Assembler) _asm_OP_lspace(_ *_Instr) {1793self.lspace("_{n}")1794}
1795
1796func (self *_Assembler) lspace(subfix string) {1797var label = "_lspace" + subfix1798self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL1799self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error1800self.Emit("MOVQ" , jit.Imm(_BM_space), _DX) // MOVQ _BM_space, DX1801self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX1802self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '1803self.Sjmp("JA" , label) // JA _nospace_{n}1804self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX1805self.Sjmp("JNC" , label) // JNC _nospace_{n}1806
1807/* test up to 4 characters */1808for i := 0; i < 3; i++ {1809self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC1810self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL1811self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error1812self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX1813self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '1814self.Sjmp("JA" , label) // JA _nospace_{n}1815self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX1816self.Sjmp("JNC" , label) // JNC _nospace_{n}1817}1818
1819/* handle over to the native function */1820self.Emit("MOVQ" , _IP, _DI) // MOVQ IP, DI1821self.Emit("MOVQ" , _IL, _SI) // MOVQ IL, SI1822self.Emit("MOVQ" , _IC, _DX) // MOVQ IC, DX1823self.callc(_F_lspace) // CALL lspace1824self.Emit("TESTQ" , _AX, _AX) // TESTQ AX, AX1825self.Sjmp("JS" , _LB_parsing_error_v) // JS _parsing_error_v1826self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL1827self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error1828self.Emit("MOVQ" , _AX, _IC) // MOVQ AX, IC1829self.Link(label) // _nospace_{n}:1830}
1831
1832func (self *_Assembler) _asm_OP_match_char(p *_Instr) {1833self.match_char(p.vb())1834}
1835
1836func (self *_Assembler) match_char(char byte) {1837self.check_eof(1)1838self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(char))) // CMPB (IP)(IC), ${p.vb()}1839self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error1840self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC1841}
1842
1843func (self *_Assembler) _asm_OP_check_char(p *_Instr) {1844self.check_eof(1)1845self.Emit("LEAQ" , jit.Ptr(_IC, 1), _AX) // LEAQ 1(IC), AX1846self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb()))) // CMPB (IP)(IC), ${p.vb()}1847self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC1848self.Xjmp("JE" , p.vi()) // JE {p.vi()}1849}
1850
1851func (self *_Assembler) _asm_OP_check_char_0(p *_Instr) {1852self.check_eof(1)1853self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb()))) // CMPB (IP)(IC), ${p.vb()}1854self.Xjmp("JE" , p.vi()) // JE {p.vi()}1855}
1856
1857func (self *_Assembler) _asm_OP_add(p *_Instr) {1858self.Emit("ADDQ", jit.Imm(int64(p.vi())), _IC) // ADDQ ${p.vi()}, IC1859}
1860
1861func (self *_Assembler) _asm_OP_load(_ *_Instr) {1862self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX1863self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _VP) // MOVQ (ST)(AX), VP1864}
1865
1866func (self *_Assembler) _asm_OP_save(_ *_Instr) {1867self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX) // MOVQ (ST), CX1868self.Emit("CMPQ", _CX, jit.Imm(_MaxStackBytes)) // CMPQ CX, ${_MaxStackBytes}1869self.Sjmp("JAE" , _LB_stack_error) // JA _stack_error1870self.WriteRecNotAX(0 , _VP, jit.Sib(_ST, _CX, 1, 8), false, false) // MOVQ VP, 8(ST)(CX)1871self.Emit("ADDQ", jit.Imm(8), _CX) // ADDQ $8, CX1872self.Emit("MOVQ", _CX, jit.Ptr(_ST, 0)) // MOVQ CX, (ST)1873}
1874
1875func (self *_Assembler) _asm_OP_drop(_ *_Instr) {1876self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX1877self.Emit("SUBQ", jit.Imm(8), _AX) // SUBQ $8, AX1878self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _VP) // MOVQ 8(ST)(AX), VP1879self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST)1880self.Emit("XORL", _BX, _BX) // XORL BX, BX1881self.Emit("MOVQ", _BX, jit.Sib(_ST, _AX, 1, 8)) // MOVQ BX, 8(ST)(AX)1882}
1883
1884func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {1885self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX1886self.Emit("SUBQ" , jit.Imm(16), _AX) // SUBQ $16, AX1887self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _VP) // MOVQ 8(ST)(AX), VP1888self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST)1889self.Emit("PXOR" , _X0, _X0) // PXOR X0, X01890self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX)1891}
1892
1893func (self *_Assembler) _asm_OP_recurse(p *_Instr) {1894self.Emit("MOVQ", jit.Type(p.vt()), _AX) // MOVQ ${p.vt()}, AX1895self.decode_dynamic(_AX, _VP) // DECODE AX, VP1896}
1897
1898func (self *_Assembler) _asm_OP_goto(p *_Instr) {1899self.Xjmp("JMP", p.vi())1900}
1901
1902func (self *_Assembler) _asm_OP_switch(p *_Instr) {1903self.Emit("MOVQ", _VAR_sr, _AX) // MOVQ sr, AX1904self.Emit("CMPQ", _AX, jit.Imm(p.i64())) // CMPQ AX, ${len(p.vs())}1905self.Sjmp("JAE" , "_default_{n}") // JAE _default_{n}1906
1907/* jump table selector */1908self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI1909self.Sref("_switch_table_{n}", 4) // .... &_switch_table_{n}1910self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX) // MOVLQSX (DI)(AX*4), AX1911self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX1912self.Rjmp("JMP" , _AX) // JMP AX1913self.Link("_switch_table_{n}") // _switch_table_{n}:1914
1915/* generate the jump table */1916for i, v := range p.vs() {1917self.Xref(v, int64(-i) * 4)1918}1919
1920/* default case */1921self.Link("_default_{n}")1922self.NOP()1923}
1924
1925func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {1926self.Emit("MOVQ", jit.Imm(int64(p2.op())), _CX)// MOVQ $(p2.op()), 16(SP)1927self.Emit("MOVQ", jit.Imm(int64(p1.op())), _BX) // MOVQ $(p1.op()), 8(SP)1928self.Emit("MOVQ", jit.Imm(int64(i)), _AX) // MOVQ $(i), (SP)1929self.call_go(_F_println)1930}
1931