podman
1950 строк · 89.1 Кб
1// +build go1.16,!go1.17
2
3/*
4* Copyright 2021 ByteDance Inc.
5*
6* Licensed under the Apache License, Version 2.0 (the "License");
7* you may not use this file except in compliance with the License.
8* You may obtain a copy of the License at
9*
10* http://www.apache.org/licenses/LICENSE-2.0
11*
12* Unless required by applicable law or agreed to in writing, software
13* distributed under the License is distributed on an "AS IS" BASIS,
14* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15* See the License for the specific language governing permissions and
16* limitations under the License.
17*/
18
19package decoder
20
21import (
22`encoding/json`
23`fmt`
24`math`
25`reflect`
26`unsafe`
27
28`github.com/bytedance/sonic/internal/caching`
29`github.com/bytedance/sonic/internal/jit`
30`github.com/bytedance/sonic/internal/native`
31`github.com/bytedance/sonic/internal/native/types`
32`github.com/bytedance/sonic/internal/rt`
33`github.com/twitchyliquid64/golang-asm/obj`
34)
35
36/** Register Allocations
37*
38* State Registers:
39*
40* %rbx : stack base
41* %r12 : input pointer
42* %r13 : input length
43* %r14 : input cursor
44* %r15 : value pointer
45*
46* Error Registers:
47*
48* %r10 : error type register
49* %r11 : error pointer register
50*/
51
52/** Function Prototype & Stack Map
53*
54* func (s string, ic int, vp unsafe.Pointer, sb *_Stack, fv uint64, sv string) (rc int, err error)
55*
56* s.buf : (FP)
57* s.len : 8(FP)
58* ic : 16(FP)
59* vp : 24(FP)
60* sb : 32(FP)
61* fv : 40(FP)
62* sv : 56(FP)
63* err.vt : 72(FP)
64* err.vp : 80(FP)
65*/
66
67const (
68_FP_args = 96 // 96 bytes to pass arguments and return values for this function
69_FP_fargs = 80 // 80 bytes for passing arguments to other Go functions
70_FP_saves = 40 // 40 bytes for saving the registers before CALL instructions
71_FP_locals = 144 // 144 bytes for local variables
72)
73
74const (
75_FP_offs = _FP_fargs + _FP_saves + _FP_locals
76_FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer
77_FP_base = _FP_size + 8 // 8 bytes for the return address
78)
79
80const (
81_IM_null = 0x6c6c756e // 'null'
82_IM_true = 0x65757274 // 'true'
83_IM_alse = 0x65736c61 // 'alse' ('false' without the 'f')
84)
85
86const (
87_BM_space = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n')
88)
89
90const (
91_MODE_JSON = 1 << 3 // base64 mode
92)
93
94const (
95_LB_error = "_error"
96_LB_im_error = "_im_error"
97_LB_eof_error = "_eof_error"
98_LB_type_error = "_type_error"
99_LB_field_error = "_field_error"
100_LB_range_error = "_range_error"
101_LB_stack_error = "_stack_error"
102_LB_base64_error = "_base64_error"
103_LB_unquote_error = "_unquote_error"
104_LB_parsing_error = "_parsing_error"
105_LB_parsing_error_v = "_parsing_error_v"
106_LB_mismatch_error = "_mismatch_error"
107)
108
109const (
110_LB_char_0_error = "_char_0_error"
111_LB_char_1_error = "_char_1_error"
112_LB_char_2_error = "_char_2_error"
113_LB_char_3_error = "_char_3_error"
114_LB_char_4_error = "_char_4_error"
115_LB_char_m2_error = "_char_m2_error"
116_LB_char_m3_error = "_char_m3_error"
117)
118
119const (
120_LB_skip_one = "_skip_one"
121_LB_skip_key_value = "_skip_key_value"
122)
123
124var (
125_AX = jit.Reg("AX")
126_CX = jit.Reg("CX")
127_DX = jit.Reg("DX")
128_DI = jit.Reg("DI")
129_SI = jit.Reg("SI")
130_BP = jit.Reg("BP")
131_SP = jit.Reg("SP")
132_R8 = jit.Reg("R8")
133_R9 = jit.Reg("R9")
134_X0 = jit.Reg("X0")
135_X1 = jit.Reg("X1")
136)
137
138var (
139_ST = jit.Reg("BX")
140_IP = jit.Reg("R12")
141_IL = jit.Reg("R13")
142_IC = jit.Reg("R14")
143_VP = jit.Reg("R15")
144)
145
146var (
147_R10 = jit.Reg("R10") // used for gcWriteBarrier
148_DF = jit.Reg("R10") // reuse R10 in generic decoder for flags
149_ET = jit.Reg("R10")
150_EP = jit.Reg("R11")
151)
152
153var (
154_ARG_s = _ARG_sp
155_ARG_sp = jit.Ptr(_SP, _FP_base)
156_ARG_sl = jit.Ptr(_SP, _FP_base + 8)
157_ARG_ic = jit.Ptr(_SP, _FP_base + 16)
158_ARG_vp = jit.Ptr(_SP, _FP_base + 24)
159_ARG_sb = jit.Ptr(_SP, _FP_base + 32)
160_ARG_fv = jit.Ptr(_SP, _FP_base + 40)
161)
162
163var (
164_VAR_sv = _VAR_sv_p
165_VAR_sv_p = jit.Ptr(_SP, _FP_base + 48)
166_VAR_sv_n = jit.Ptr(_SP, _FP_base + 56)
167_VAR_vk = jit.Ptr(_SP, _FP_base + 64)
168)
169
170var (
171_RET_rc = jit.Ptr(_SP, _FP_base + 72)
172_RET_et = jit.Ptr(_SP, _FP_base + 80)
173_RET_ep = jit.Ptr(_SP, _FP_base + 88)
174)
175
176var (
177_VAR_st = _VAR_st_Vt
178_VAR_sr = jit.Ptr(_SP, _FP_fargs + _FP_saves)
179)
180
181
182var (
183_VAR_st_Vt = jit.Ptr(_SP, _FP_fargs + _FP_saves + 0)
184_VAR_st_Dv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
185_VAR_st_Iv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
186_VAR_st_Ep = jit.Ptr(_SP, _FP_fargs + _FP_saves + 24)
187_VAR_st_Db = jit.Ptr(_SP, _FP_fargs + _FP_saves + 32)
188_VAR_st_Dc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 40)
189)
190
191var (
192_VAR_ss_AX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 48)
193_VAR_ss_CX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 56)
194_VAR_ss_SI = jit.Ptr(_SP, _FP_fargs + _FP_saves + 64)
195_VAR_ss_R8 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 72)
196_VAR_ss_R9 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 80)
197)
198
199var (
200_VAR_bs_p = jit.Ptr(_SP, _FP_fargs + _FP_saves + 88)
201_VAR_bs_n = jit.Ptr(_SP, _FP_fargs + _FP_saves + 96)
202_VAR_bs_LR = jit.Ptr(_SP, _FP_fargs + _FP_saves + 104)
203)
204
205var _VAR_fl = jit.Ptr(_SP, _FP_fargs + _FP_saves + 112)
206
207var (
208_VAR_et = jit.Ptr(_SP, _FP_fargs + _FP_saves + 120) // save dismatched type
209_VAR_ic = jit.Ptr(_SP, _FP_fargs + _FP_saves + 128) // save dismatched position
210_VAR_pc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 136) // save skip return pc
211)
212
213type _Assembler struct {
214jit.BaseAssembler
215p _Program
216name string
217}
218
219func newAssembler(p _Program) *_Assembler {
220return new(_Assembler).Init(p)
221}
222
223/** Assembler Interface **/
224
225func (self *_Assembler) Load() _Decoder {
226return ptodec(self.BaseAssembler.Load("decode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
227}
228
229func (self *_Assembler) Init(p _Program) *_Assembler {
230self.p = p
231self.BaseAssembler.Init(self.compile)
232return self
233}
234
235func (self *_Assembler) compile() {
236self.prologue()
237self.instrs()
238self.epilogue()
239self.copy_string()
240self.escape_string()
241self.escape_string_twice()
242self.skip_one()
243self.skip_key_value()
244self.mismatch_error()
245self.type_error()
246self.field_error()
247self.range_error()
248self.stack_error()
249self.base64_error()
250self.parsing_error()
251}
252
253/** Assembler Stages **/
254
255var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
256_OP_any : (*_Assembler)._asm_OP_any,
257_OP_dyn : (*_Assembler)._asm_OP_dyn,
258_OP_str : (*_Assembler)._asm_OP_str,
259_OP_bin : (*_Assembler)._asm_OP_bin,
260_OP_bool : (*_Assembler)._asm_OP_bool,
261_OP_num : (*_Assembler)._asm_OP_num,
262_OP_i8 : (*_Assembler)._asm_OP_i8,
263_OP_i16 : (*_Assembler)._asm_OP_i16,
264_OP_i32 : (*_Assembler)._asm_OP_i32,
265_OP_i64 : (*_Assembler)._asm_OP_i64,
266_OP_u8 : (*_Assembler)._asm_OP_u8,
267_OP_u16 : (*_Assembler)._asm_OP_u16,
268_OP_u32 : (*_Assembler)._asm_OP_u32,
269_OP_u64 : (*_Assembler)._asm_OP_u64,
270_OP_f32 : (*_Assembler)._asm_OP_f32,
271_OP_f64 : (*_Assembler)._asm_OP_f64,
272_OP_unquote : (*_Assembler)._asm_OP_unquote,
273_OP_nil_1 : (*_Assembler)._asm_OP_nil_1,
274_OP_nil_2 : (*_Assembler)._asm_OP_nil_2,
275_OP_nil_3 : (*_Assembler)._asm_OP_nil_3,
276_OP_deref : (*_Assembler)._asm_OP_deref,
277_OP_index : (*_Assembler)._asm_OP_index,
278_OP_is_null : (*_Assembler)._asm_OP_is_null,
279_OP_is_null_quote : (*_Assembler)._asm_OP_is_null_quote,
280_OP_map_init : (*_Assembler)._asm_OP_map_init,
281_OP_map_key_i8 : (*_Assembler)._asm_OP_map_key_i8,
282_OP_map_key_i16 : (*_Assembler)._asm_OP_map_key_i16,
283_OP_map_key_i32 : (*_Assembler)._asm_OP_map_key_i32,
284_OP_map_key_i64 : (*_Assembler)._asm_OP_map_key_i64,
285_OP_map_key_u8 : (*_Assembler)._asm_OP_map_key_u8,
286_OP_map_key_u16 : (*_Assembler)._asm_OP_map_key_u16,
287_OP_map_key_u32 : (*_Assembler)._asm_OP_map_key_u32,
288_OP_map_key_u64 : (*_Assembler)._asm_OP_map_key_u64,
289_OP_map_key_f32 : (*_Assembler)._asm_OP_map_key_f32,
290_OP_map_key_f64 : (*_Assembler)._asm_OP_map_key_f64,
291_OP_map_key_str : (*_Assembler)._asm_OP_map_key_str,
292_OP_map_key_utext : (*_Assembler)._asm_OP_map_key_utext,
293_OP_map_key_utext_p : (*_Assembler)._asm_OP_map_key_utext_p,
294_OP_array_skip : (*_Assembler)._asm_OP_array_skip,
295_OP_array_clear : (*_Assembler)._asm_OP_array_clear,
296_OP_array_clear_p : (*_Assembler)._asm_OP_array_clear_p,
297_OP_slice_init : (*_Assembler)._asm_OP_slice_init,
298_OP_slice_append : (*_Assembler)._asm_OP_slice_append,
299_OP_object_skip : (*_Assembler)._asm_OP_object_skip,
300_OP_object_next : (*_Assembler)._asm_OP_object_next,
301_OP_struct_field : (*_Assembler)._asm_OP_struct_field,
302_OP_unmarshal : (*_Assembler)._asm_OP_unmarshal,
303_OP_unmarshal_p : (*_Assembler)._asm_OP_unmarshal_p,
304_OP_unmarshal_text : (*_Assembler)._asm_OP_unmarshal_text,
305_OP_unmarshal_text_p : (*_Assembler)._asm_OP_unmarshal_text_p,
306_OP_lspace : (*_Assembler)._asm_OP_lspace,
307_OP_match_char : (*_Assembler)._asm_OP_match_char,
308_OP_check_char : (*_Assembler)._asm_OP_check_char,
309_OP_load : (*_Assembler)._asm_OP_load,
310_OP_save : (*_Assembler)._asm_OP_save,
311_OP_drop : (*_Assembler)._asm_OP_drop,
312_OP_drop_2 : (*_Assembler)._asm_OP_drop_2,
313_OP_recurse : (*_Assembler)._asm_OP_recurse,
314_OP_goto : (*_Assembler)._asm_OP_goto,
315_OP_switch : (*_Assembler)._asm_OP_switch,
316_OP_check_char_0 : (*_Assembler)._asm_OP_check_char_0,
317_OP_dismatch_err : (*_Assembler)._asm_OP_dismatch_err,
318_OP_go_skip : (*_Assembler)._asm_OP_go_skip,
319_OP_add : (*_Assembler)._asm_OP_add,
320_OP_check_empty : (*_Assembler)._asm_OP_check_empty,
321}
322
323func (self *_Assembler) instr(v *_Instr) {
324if fn := _OpFuncTab[v.op()]; fn != nil {
325fn(self, v)
326} else {
327panic(fmt.Sprintf("invalid opcode: %d", v.op()))
328}
329}
330
331func (self *_Assembler) instrs() {
332for i, v := range self.p {
333self.Mark(i)
334self.instr(&v)
335self.debug_instr(i, &v)
336}
337}
338
339func (self *_Assembler) epilogue() {
340self.Mark(len(self.p))
341self.Emit("XORL", _EP, _EP) // XORL EP, EP
342self.Emit("MOVQ", _VAR_et, _ET) // MOVQ VAR_et, ET
343self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
344self.Sjmp("JNZ", _LB_mismatch_error) // JNZ _LB_mismatch_error
345self.Link(_LB_error) // _error:
346self.Emit("MOVQ", _IC, _RET_rc) // MOVQ IC, rc<>+40(FP)
347self.Emit("MOVQ", _ET, _RET_et) // MOVQ ET, et<>+48(FP)
348self.Emit("MOVQ", _EP, _RET_ep) // MOVQ EP, ep<>+56(FP)
349self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP
350self.Emit("ADDQ", jit.Imm(_FP_size), _SP) // ADDQ $_FP_size, SP
351self.Emit("RET") // RET
352}
353
354func (self *_Assembler) prologue() {
355self.Emit("SUBQ", jit.Imm(_FP_size), _SP) // SUBQ $_FP_size, SP
356self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP)
357self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP
358self.Emit("MOVQ", _ARG_sp, _IP) // MOVQ s.p<>+0(FP), IP
359self.Emit("MOVQ", _ARG_sl, _IL) // MOVQ s.l<>+8(FP), IL
360self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC
361self.Emit("MOVQ", _ARG_vp, _VP) // MOVQ vp<>+24(FP), VP
362self.Emit("MOVQ", _ARG_sb, _ST) // MOVQ vp<>+32(FP), ST
363// initialize digital buffer first
364self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_st_Dc) // MOVQ $_MaxDigitNums, ss.Dcap
365self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX) // LEAQ _DbufOffset(ST), AX
366self.Emit("MOVQ", _AX, _VAR_st_Db) // MOVQ AX, ss.Dbuf
367self.Emit("XORL", _AX, _AX) // XORL AX, AX
368self.Emit("MOVQ", _AX, _VAR_et) // MOVQ AX, ss.Dp
369}
370
371/** Function Calling Helpers **/
372
373var _REG_go = []obj.Addr {
374_ST,
375_VP,
376_IP,
377_IL,
378_IC,
379}
380
381func (self *_Assembler) save(r ...obj.Addr) {
382for i, v := range r {
383if i > _FP_saves / 8 - 1 {
384panic("too many registers to save")
385} else {
386self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
387}
388}
389}
390
391func (self *_Assembler) load(r ...obj.Addr) {
392for i, v := range r {
393if i > _FP_saves / 8 - 1 {
394panic("too many registers to load")
395} else {
396self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
397}
398}
399}
400
401func (self *_Assembler) call(fn obj.Addr) {
402self.Emit("MOVQ", fn, _AX) // MOVQ ${fn}, AX
403self.Rjmp("CALL", _AX) // CALL AX
404}
405
406func (self *_Assembler) call_go(fn obj.Addr) {
407self.save(_REG_go...) // SAVE $REG_go
408self.call(fn) // CALL ${fn}
409self.load(_REG_go...) // LOAD $REG_go
410}
411
412func (self *_Assembler) call_sf(fn obj.Addr) {
413self.Emit("LEAQ", _ARG_s, _DI) // LEAQ s<>+0(FP), DI
414self.Emit("MOVQ", _IC, _ARG_ic) // MOVQ IC, ic<>+16(FP)
415self.Emit("LEAQ", _ARG_ic, _SI) // LEAQ ic<>+16(FP), SI
416self.Emit("LEAQ", jit.Ptr(_ST, _FsmOffset), _DX) // LEAQ _FsmOffset(ST), DX
417self.Emit("MOVQ", _ARG_fv, _CX)
418self.call(fn) // CALL ${fn}
419self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC
420}
421
422func (self *_Assembler) call_vf(fn obj.Addr) {
423self.Emit("LEAQ", _ARG_s, _DI) // LEAQ s<>+0(FP), DI
424self.Emit("MOVQ", _IC, _ARG_ic) // MOVQ IC, ic<>+16(FP)
425self.Emit("LEAQ", _ARG_ic, _SI) // LEAQ ic<>+16(FP), SI
426self.Emit("LEAQ", _VAR_st, _DX) // LEAQ st, DX
427self.call(fn) // CALL ${fn}
428self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC
429}
430
431/** Assembler Error Handlers **/
432
433var (
434_F_convT64 = jit.Func(convT64)
435_F_error_wrap = jit.Func(error_wrap)
436_F_error_type = jit.Func(error_type)
437_F_error_field = jit.Func(error_field)
438_F_error_value = jit.Func(error_value)
439_F_error_mismatch = jit.Func(error_mismatch)
440)
441
442var (
443_I_int8 , _T_int8 = rtype(reflect.TypeOf(int8(0)))
444_I_int16 , _T_int16 = rtype(reflect.TypeOf(int16(0)))
445_I_int32 , _T_int32 = rtype(reflect.TypeOf(int32(0)))
446_I_uint8 , _T_uint8 = rtype(reflect.TypeOf(uint8(0)))
447_I_uint16 , _T_uint16 = rtype(reflect.TypeOf(uint16(0)))
448_I_uint32 , _T_uint32 = rtype(reflect.TypeOf(uint32(0)))
449_I_float32 , _T_float32 = rtype(reflect.TypeOf(float32(0)))
450)
451
452var (
453_T_error = rt.UnpackType(errorType)
454_I_base64_CorruptInputError = jit.Itab(_T_error, base64CorruptInputError)
455)
456
457var (
458_V_stackOverflow = jit.Imm(int64(uintptr(unsafe.Pointer(&stackOverflow))))
459_I_json_UnsupportedValueError = jit.Itab(_T_error, reflect.TypeOf(new(json.UnsupportedValueError)))
460_I_json_MismatchTypeError = jit.Itab(_T_error, reflect.TypeOf(new(MismatchTypeError)))
461)
462
463func (self *_Assembler) type_error() {
464self.Link(_LB_type_error) // _type_error:
465self.Emit("MOVQ", _ET, jit.Ptr(_SP, 0)) // MOVQ ET, (SP)
466self.call_go(_F_error_type) // CALL_GO error_type
467self.Emit("MOVQ", jit.Ptr(_SP, 8), _ET) // MOVQ 8(SP), ET
468self.Emit("MOVQ", jit.Ptr(_SP, 16), _EP) // MOVQ 16(SP), EP
469self.Sjmp("JMP" , _LB_error) // JMP _error
470}
471
472
473func (self *_Assembler) mismatch_error() {
474self.Link(_LB_mismatch_error) // _type_error:
475self.Emit("MOVQ", _VAR_et, _ET) // MOVQ _VAR_et, ET
476self.Emit("MOVQ", _VAR_ic, _EP) // MOVQ _VAR_ic, EP
477self.Emit("MOVQ", _I_json_MismatchTypeError, _AX) // MOVQ _I_json_MismatchTypeError, AX
478self.Emit("CMPQ", _ET, _AX) // CMPQ ET, AX
479self.Sjmp("JE" , _LB_error) // JE _LB_error
480self.Emit("MOVQ", _ARG_sp, _AX)
481self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
482self.Emit("MOVQ", _ARG_sl, _CX)
483self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
484self.Emit("MOVQ", _VAR_ic, _AX)
485self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
486self.Emit("MOVQ", _VAR_et, _CX)
487self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24)) // MOVQ CX, 24(SP)
488self.call_go(_F_error_mismatch) // CALL_GO error_type
489self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
490self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
491self.Sjmp("JMP" , _LB_error) // JMP _error
492}
493
494func (self *_Assembler) _asm_OP_dismatch_err(p *_Instr) {
495self.Emit("MOVQ", _IC, _VAR_ic)
496self.Emit("MOVQ", jit.Type(p.vt()), _ET)
497self.Emit("MOVQ", _ET, _VAR_et)
498}
499
500func (self *_Assembler) _asm_OP_go_skip(p *_Instr) {
501self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
502self.Xref(p.vi(), 4)
503self.Emit("MOVQ", _R9, _VAR_pc)
504self.Sjmp("JMP" , _LB_skip_one) // JMP _skip_one
505}
506
507func (self *_Assembler) skip_one() {
508self.Link(_LB_skip_one) // _skip:
509self.Emit("MOVQ", _VAR_ic, _IC) // MOVQ _VAR_ic, IC
510self.call_sf(_F_skip_one) // CALL_SF skip_one
511self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
512self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
513self.Emit("MOVQ" , _VAR_pc, _R9) // MOVQ pc, R9
514self.Rjmp("JMP" , _R9) // JMP (R9)
515}
516
517
518func (self *_Assembler) skip_key_value() {
519self.Link(_LB_skip_key_value) // _skip:
520// skip the key
521self.Emit("MOVQ", _VAR_ic, _IC) // MOVQ _VAR_ic, IC
522self.call_sf(_F_skip_one) // CALL_SF skip_one
523self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
524self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
525// match char ':'
526self.lspace("_global_1")
527self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(':'))
528self.Sjmp("JNE" , _LB_parsing_error_v) // JNE _parse_error_v
529self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC
530self.lspace("_global_2")
531// skip the value
532self.call_sf(_F_skip_one) // CALL_SF skip_one
533self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
534self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
535// jump back to specified address
536self.Emit("MOVQ" , _VAR_pc, _R9) // MOVQ pc, R9
537self.Rjmp("JMP" , _R9) // JMP (R9)
538}
539
540func (self *_Assembler) field_error() {
541self.Link(_LB_field_error) // _field_error:
542self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0
543self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0)) // MOVOU X0, (SP)
544self.call_go(_F_error_field) // CALL_GO error_field
545self.Emit("MOVQ" , jit.Ptr(_SP, 16), _ET) // MOVQ 16(SP), ET
546self.Emit("MOVQ" , jit.Ptr(_SP, 24), _EP) // MOVQ 24(SP), EP
547self.Sjmp("JMP" , _LB_error) // JMP _error
548}
549
550func (self *_Assembler) range_error() {
551self.Link(_LB_range_error) // _range_error:
552self.slice_from(_VAR_st_Ep, 0) // SLICE st.Ep, $0
553self.Emit("MOVQ", _DI, jit.Ptr(_SP, 0)) // MOVQ DI, (SP)
554self.Emit("MOVQ", _SI, jit.Ptr(_SP, 8)) // MOVQ SI, 8(SP)
555self.Emit("MOVQ", _ET, jit.Ptr(_SP, 16)) // MOVQ ET, 16(SP)
556self.Emit("MOVQ", _EP, jit.Ptr(_SP, 24)) // MOVQ EP, 24(SP)
557self.call_go(_F_error_value) // CALL_GO error_value
558self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
559self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
560self.Sjmp("JMP" , _LB_error) // JMP _error
561}
562
563func (self *_Assembler) stack_error() {
564self.Link(_LB_stack_error) // _stack_error:
565self.Emit("MOVQ", _V_stackOverflow, _EP) // MOVQ ${_V_stackOverflow}, EP
566self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ ${_I_json_UnsupportedValueError}, ET
567self.Sjmp("JMP" , _LB_error) // JMP _error
568}
569
570func (self *_Assembler) base64_error() {
571self.Link(_LB_base64_error)
572self.Emit("NEGQ", _AX) // NEGQ AX
573self.Emit("SUBQ", jit.Imm(1), _AX) // SUBQ $1, AX
574self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
575self.call_go(_F_convT64) // CALL_GO convT64
576self.Emit("MOVQ", jit.Ptr(_SP, 8), _EP) // MOVQ 8(SP), EP
577self.Emit("MOVQ", _I_base64_CorruptInputError, _ET) // MOVQ ${itab(base64.CorruptInputError)}, ET
578self.Sjmp("JMP" , _LB_error) // JMP _error
579}
580
581func (self *_Assembler) parsing_error() {
582self.Link(_LB_eof_error) // _eof_error:
583self.Emit("MOVQ" , _IL, _IC) // MOVQ IL, IC
584self.Emit("MOVL" , jit.Imm(int64(types.ERR_EOF)), _EP) // MOVL ${types.ERR_EOF}, EP
585self.Sjmp("JMP" , _LB_parsing_error) // JMP _parsing_error
586self.Link(_LB_unquote_error) // _unquote_error:
587self.Emit("SUBQ" , _VAR_sr, _SI) // SUBQ sr, SI
588self.Emit("SUBQ" , _SI, _IC) // SUBQ IL, IC
589self.Link(_LB_parsing_error_v) // _parsing_error_v:
590self.Emit("MOVQ" , _AX, _EP) // MOVQ AX, EP
591self.Emit("NEGQ" , _EP) // NEGQ EP
592self.Sjmp("JMP" , _LB_parsing_error) // JMP _parsing_error
593self.Link(_LB_char_m3_error) // _char_m3_error:
594self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC
595self.Link(_LB_char_m2_error) // _char_m2_error:
596self.Emit("SUBQ" , jit.Imm(2), _IC) // SUBQ $2, IC
597self.Sjmp("JMP" , _LB_char_0_error) // JMP _char_0_error
598self.Link(_LB_im_error) // _im_error:
599self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPB CX, (IP)(IC)
600self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error
601self.Emit("SHRL" , jit.Imm(8), _CX) // SHRL $8, CX
602self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 1)) // CMPB CX, 1(IP)(IC)
603self.Sjmp("JNE" , _LB_char_1_error) // JNE _char_1_error
604self.Emit("SHRL" , jit.Imm(8), _CX) // SHRL $8, CX
605self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 2)) // CMPB CX, 2(IP)(IC)
606self.Sjmp("JNE" , _LB_char_2_error) // JNE _char_2_error
607self.Sjmp("JMP" , _LB_char_3_error) // JNE _char_3_error
608self.Link(_LB_char_4_error) // _char_4_error:
609self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
610self.Link(_LB_char_3_error) // _char_3_error:
611self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
612self.Link(_LB_char_2_error) // _char_2_error:
613self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
614self.Link(_LB_char_1_error) // _char_1_error:
615self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
616self.Link(_LB_char_0_error) // _char_0_error:
617self.Emit("MOVL" , jit.Imm(int64(types.ERR_INVALID_CHAR)), _EP) // MOVL ${types.ERR_INVALID_CHAR}, EP
618self.Link(_LB_parsing_error) // _parsing_error:
619self.Emit("MOVOU", _ARG_s, _X0) // MOVOU s, X0
620self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0)) // MOVOU X0, (SP)
621self.Emit("MOVQ" , _IC, jit.Ptr(_SP, 16)) // MOVQ IC, 16(SP)
622self.Emit("MOVQ" , _EP, jit.Ptr(_SP, 24)) // MOVQ EP, 24(SP)
623self.call_go(_F_error_wrap) // CALL_GO error_wrap
624self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
625self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
626self.Sjmp("JMP" , _LB_error) // JMP _error
627}
628
629/** Memory Management Routines **/
630
631var (
632_T_byte = jit.Type(byteType)
633_F_mallocgc = jit.Func(mallocgc)
634)
635
636func (self *_Assembler) malloc(nb obj.Addr, ret obj.Addr) {
637self.Emit("XORL", _AX, _AX) // XORL AX, AX
638self.Emit("MOVQ", _T_byte, _CX) // MOVQ ${type(byte)}, CX
639self.Emit("MOVQ", nb, jit.Ptr(_SP, 0)) // MOVQ ${nb}, (SP)
640self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
641self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
642self.call_go(_F_mallocgc) // CALL_GO mallocgc
643self.Emit("MOVQ", jit.Ptr(_SP, 24), ret) // MOVQ 24(SP), ${ret}
644}
645
646func (self *_Assembler) valloc(vt reflect.Type, ret obj.Addr) {
647self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX) // MOVQ ${vt.Size()}, AX
648self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
649self.Emit("MOVQ", jit.Type(vt), _AX) // MOVQ ${vt}, AX
650self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
651self.Emit("MOVB", jit.Imm(1), jit.Ptr(_SP, 16)) // MOVB $1, 16(SP)
652self.call_go(_F_mallocgc) // CALL_GO mallocgc
653self.Emit("MOVQ", jit.Ptr(_SP, 24), ret) // MOVQ 24(SP), ${ret}
654}
655
656func (self *_Assembler) vfollow(vt reflect.Type) {
657self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX
658self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
659self.Sjmp("JNZ" , "_end_{n}") // JNZ _end_{n}
660self.valloc(vt, _AX) // VALLOC ${vt}, AX
661self.WritePtrAX(1, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP)
662self.Link("_end_{n}") // _end_{n}:
663self.Emit("MOVQ" , _AX, _VP) // MOVQ AX, VP
664}
665
666/** Value Parsing Routines **/
667
668var (
669_F_vstring = jit.Imm(int64(native.S_vstring))
670_F_vnumber = jit.Imm(int64(native.S_vnumber))
671_F_vsigned = jit.Imm(int64(native.S_vsigned))
672_F_vunsigned = jit.Imm(int64(native.S_vunsigned))
673)
674
675func (self *_Assembler) check_err(vt reflect.Type, pin string, pin2 int) {
676self.Emit("MOVQ" , _VAR_st_Vt, _AX) // MOVQ st.Vt, AX
677self.Emit("TESTQ", _AX, _AX) // CMPQ AX, ${native.V_STRING}
678// try to skip the value
679if vt != nil {
680self.Sjmp("JNS" , "_check_err_{n}") // JNE _parsing_error_v
681self.Emit("MOVQ", jit.Type(vt), _ET)
682self.Emit("MOVQ", _ET, _VAR_et)
683if pin2 != -1 {
684self.Emit("SUBQ", jit.Imm(1), _BP)
685self.Emit("MOVQ", _BP, _VAR_ic)
686self.Byte(0x4c , 0x8d, 0x0d) // LEAQ (PC), R9
687self.Xref(pin2, 4)
688self.Emit("MOVQ", _R9, _VAR_pc)
689self.Sjmp("JMP" , _LB_skip_key_value)
690} else {
691self.Emit("MOVQ", _BP, _VAR_ic)
692self.Byte(0x4c , 0x8d, 0x0d) // LEAQ (PC), R9
693self.Sref(pin, 4)
694self.Emit("MOVQ", _R9, _VAR_pc)
695self.Sjmp("JMP" , _LB_skip_one)
696}
697self.Link("_check_err_{n}")
698} else {
699self.Sjmp("JS" , _LB_parsing_error_v) // JNE _parsing_error_v
700}
701}
702
703func (self *_Assembler) check_eof(d int64) {
704if d == 1 {
705self.Emit("CMPQ", _IC, _IL) // CMPQ IC, IL
706self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error
707} else {
708self.Emit("LEAQ", jit.Ptr(_IC, d), _AX) // LEAQ ${d}(IC), AX
709self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL
710self.Sjmp("JA" , _LB_eof_error) // JA _eof_error
711}
712}
713
714func (self *_Assembler) parse_string() { // parse_string has a validate flag params in the last
715self.Emit("MOVQ", _ARG_fv, _CX)
716self.call_vf(_F_vstring)
717self.check_err(nil, "", -1)
718}
719
720func (self *_Assembler) parse_number(vt reflect.Type, pin string, pin2 int) {
721self.Emit("MOVQ", _IC, _BP)
722self.call_vf(_F_vnumber) // call vnumber
723self.check_err(vt, pin, pin2)
724}
725
726func (self *_Assembler) parse_signed(vt reflect.Type, pin string, pin2 int) {
727self.Emit("MOVQ", _IC, _BP)
728self.call_vf(_F_vsigned)
729self.check_err(vt, pin, pin2)
730}
731
732func (self *_Assembler) parse_unsigned(vt reflect.Type, pin string, pin2 int) {
733self.Emit("MOVQ", _IC, _BP)
734self.call_vf(_F_vunsigned)
735self.check_err(vt, pin, pin2)
736}
737
738// Pointer: DI, Size: SI, Return: R9
739func (self *_Assembler) copy_string() {
740self.Link("_copy_string")
741self.Emit("MOVQ", _DI, _VAR_bs_p)
742self.Emit("MOVQ", _SI, _VAR_bs_n)
743self.Emit("MOVQ", _R9, _VAR_bs_LR)
744self.malloc(_SI, _AX)
745self.Emit("MOVQ", _AX, _VAR_sv_p)
746self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))
747self.Emit("MOVQ", _VAR_bs_p, _DI)
748self.Emit("MOVQ", _DI, jit.Ptr(_SP, 8))
749self.Emit("MOVQ", _VAR_bs_n, _SI)
750self.Emit("MOVQ", _SI, jit.Ptr(_SP, 16))
751self.call_go(_F_memmove)
752self.Emit("MOVQ", _VAR_sv_p, _DI)
753self.Emit("MOVQ", _VAR_bs_n, _SI)
754self.Emit("MOVQ", _VAR_bs_LR, _R9)
755self.Rjmp("JMP", _R9)
756}
757
758// Pointer: DI, Size: SI, Return: R9
759func (self *_Assembler) escape_string() {
760self.Link("_escape_string")
761self.Emit("MOVQ" , _DI, _VAR_bs_p)
762self.Emit("MOVQ" , _SI, _VAR_bs_n)
763self.Emit("MOVQ" , _R9, _VAR_bs_LR)
764self.malloc(_SI, _DX) // MALLOC SI, DX
765self.Emit("MOVQ" , _DX, _VAR_sv_p)
766self.Emit("MOVQ" , _VAR_bs_p, _DI)
767self.Emit("MOVQ" , _VAR_bs_n, _SI)
768self.Emit("LEAQ" , _VAR_sr, _CX) // LEAQ sr, CX
769self.Emit("XORL" , _R8, _R8) // XORL R8, R8
770self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv) // BTQ ${_F_disable_urc}, fv
771self.Emit("SETCC", _R8) // SETCC R8
772self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8) // SHLQ ${types.B_UNICODE_REPLACE}, R8
773self.call(_F_unquote) // CALL unquote
774self.Emit("MOVQ" , _VAR_bs_n, _SI) // MOVQ ${n}, SI
775self.Emit("ADDQ" , jit.Imm(1), _SI) // ADDQ $1, SI
776self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
777self.Sjmp("JS" , _LB_unquote_error) // JS _unquote_error
778self.Emit("MOVQ" , _AX, _SI)
779self.Emit("MOVQ" , _VAR_sv_p, _DI)
780self.Emit("MOVQ" , _VAR_bs_LR, _R9)
781self.Rjmp("JMP", _R9)
782}
783
784func (self *_Assembler) escape_string_twice() {
785self.Link("_escape_string_twice")
786self.Emit("MOVQ" , _DI, _VAR_bs_p)
787self.Emit("MOVQ" , _SI, _VAR_bs_n)
788self.Emit("MOVQ" , _R9, _VAR_bs_LR)
789self.malloc(_SI, _DX) // MALLOC SI, DX
790self.Emit("MOVQ" , _DX, _VAR_sv_p)
791self.Emit("MOVQ" , _VAR_bs_p, _DI)
792self.Emit("MOVQ" , _VAR_bs_n, _SI)
793self.Emit("LEAQ" , _VAR_sr, _CX) // LEAQ sr, CX
794self.Emit("MOVL" , jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8
795self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv) // BTQ ${_F_disable_urc}, AX
796self.Emit("XORL" , _AX, _AX) // XORL AX, AX
797self.Emit("SETCC", _AX) // SETCC AX
798self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _AX) // SHLQ ${types.B_UNICODE_REPLACE}, AX
799self.Emit("ORQ" , _AX, _R8) // ORQ AX, R8
800self.call(_F_unquote) // CALL unquote
801self.Emit("MOVQ" , _VAR_bs_n, _SI) // MOVQ ${n}, SI
802self.Emit("ADDQ" , jit.Imm(3), _SI) // ADDQ $3, SI
803self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
804self.Sjmp("JS" , _LB_unquote_error) // JS _unquote_error
805self.Emit("MOVQ" , _AX, _SI)
806self.Emit("MOVQ" , _VAR_sv_p, _DI)
807self.Emit("MOVQ" , _VAR_bs_LR, _R9)
808self.Rjmp("JMP", _R9)
809}
810
811/** Range Checking Routines **/
812
813var (
814_V_max_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_max_f32))))
815_V_min_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_min_f32))))
816)
817
818var (
819_Vp_max_f32 = new(float32)
820_Vp_min_f32 = new(float32)
821)
822
823func init() {
824*_Vp_max_f32 = math.MaxFloat32
825*_Vp_min_f32 = -math.MaxFloat32
826}
827
828func (self *_Assembler) range_single() {
829self.Emit("CVTSD2SS", _VAR_st_Dv, _X0) // CVTSD2SS st.Dv, X0
830self.Emit("MOVQ" , _V_max_f32, _AX) // MOVQ _max_f32, AX
831self.Emit("MOVQ" , jit.Gitab(_I_float32), _ET) // MOVQ ${itab(float32)}, ET
832self.Emit("MOVQ" , jit.Gtype(_T_float32), _EP) // MOVQ ${type(float32)}, EP
833self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0) // UCOMISS (AX), X0
834self.Sjmp("JA" , _LB_range_error) // JA _range_error
835self.Emit("MOVQ" , _V_min_f32, _AX) // MOVQ _min_f32, AX
836self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0) // UCOMISS (AX), X0
837self.Sjmp("JB" , _LB_range_error) // JB _range_error
838}
839
840func (self *_Assembler) range_signed(i *rt.GoItab, t *rt.GoType, a int64, b int64) {
841self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
842self.Emit("MOVQ", jit.Gitab(i), _ET) // MOVQ ${i}, ET
843self.Emit("MOVQ", jit.Gtype(t), _EP) // MOVQ ${t}, EP
844self.Emit("CMPQ", _AX, jit.Imm(a)) // CMPQ AX, ${a}
845self.Sjmp("JL" , _LB_range_error) // JL _range_error
846self.Emit("CMPQ", _AX, jit.Imm(b)) // CMPQ AX, ${B}
847self.Sjmp("JG" , _LB_range_error) // JG _range_error
848}
849
850func (self *_Assembler) range_unsigned(i *rt.GoItab, t *rt.GoType, v uint64) {
851self.Emit("MOVQ" , _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
852self.Emit("MOVQ" , jit.Gitab(i), _ET) // MOVQ ${i}, ET
853self.Emit("MOVQ" , jit.Gtype(t), _EP) // MOVQ ${t}, EP
854self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
855self.Sjmp("JS" , _LB_range_error) // JS _range_error
856self.Emit("CMPQ" , _AX, jit.Imm(int64(v))) // CMPQ AX, ${a}
857self.Sjmp("JA" , _LB_range_error) // JA _range_error
858}
859
860/** String Manipulating Routines **/
861
862var (
863_F_unquote = jit.Imm(int64(native.S_unquote))
864)
865
866func (self *_Assembler) slice_from(p obj.Addr, d int64) {
867self.Emit("MOVQ", p, _SI) // MOVQ ${p}, SI
868self.slice_from_r(_SI, d) // SLICE_R SI, ${d}
869}
870
871func (self *_Assembler) slice_from_r(p obj.Addr, d int64) {
872self.Emit("LEAQ", jit.Sib(_IP, p, 1, 0), _DI) // LEAQ (IP)(${p}), DI
873self.Emit("NEGQ", p) // NEGQ ${p}
874self.Emit("LEAQ", jit.Sib(_IC, p, 1, d), _SI) // LEAQ d(IC)(${p}), SI
875}
876
877func (self *_Assembler) unquote_once(p obj.Addr, n obj.Addr, stack bool, copy bool) {
878self.slice_from(_VAR_st_Iv, -1) // SLICE st.Iv, $-1
879self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1)) // CMPQ st.Ep, $-1
880self.Sjmp("JE" , "_noescape_{n}") // JE _noescape_{n}
881self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
882self.Sref("_unquote_once_write_{n}", 4)
883self.Sjmp("JMP" , "_escape_string")
884self.Link("_noescape_{n}") // _noescape_{n}:
885if copy {
886self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv)
887self.Sjmp("JNC", "_unquote_once_write_{n}")
888self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
889self.Sref("_unquote_once_write_{n}", 4)
890self.Sjmp("JMP", "_copy_string")
891}
892self.Link("_unquote_once_write_{n}")
893self.Emit("MOVQ" , _SI, n) // MOVQ SI, ${n}
894if stack {
895self.Emit("MOVQ", _DI, p)
896} else {
897self.WriteRecNotAX(10, _DI, p, false, false)
898}
899}
900
901func (self *_Assembler) unquote_twice(p obj.Addr, n obj.Addr, stack bool) {
902self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1)) // CMPQ st.Ep, $-1
903self.Sjmp("JE" , _LB_eof_error) // JE _eof_error
904self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -3), jit.Imm('\\')) // CMPB -3(IP)(IC), $'\\'
905self.Sjmp("JNE" , _LB_char_m3_error) // JNE _char_m3_error
906self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -2), jit.Imm('"')) // CMPB -2(IP)(IC), $'"'
907self.Sjmp("JNE" , _LB_char_m2_error) // JNE _char_m2_error
908self.slice_from(_VAR_st_Iv, -3) // SLICE st.Iv, $-3
909self.Emit("MOVQ" , _SI, _AX) // MOVQ SI, AX
910self.Emit("ADDQ" , _VAR_st_Iv, _AX) // ADDQ st.Iv, AX
911self.Emit("CMPQ" , _VAR_st_Ep, _AX) // CMPQ st.Ep, AX
912self.Sjmp("JE" , "_noescape_{n}") // JE _noescape_{n}
913self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
914self.Sref("_unquote_twice_write_{n}", 4)
915self.Sjmp("JMP" , "_escape_string_twice")
916self.Link("_noescape_{n}") // _noescape_{n}:
917self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv)
918self.Sjmp("JNC", "_unquote_twice_write_{n}")
919self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
920self.Sref("_unquote_twice_write_{n}", 4)
921self.Sjmp("JMP", "_copy_string")
922self.Link("_unquote_twice_write_{n}")
923self.Emit("MOVQ" , _SI, n) // MOVQ SI, ${n}
924if stack {
925self.Emit("MOVQ", _DI, p)
926} else {
927self.WriteRecNotAX(12, _DI, p, false, false)
928}
929}
930
931/** Memory Clearing Routines **/
932
933var (
934_F_memclrHasPointers = jit.Func(memclrHasPointers)
935_F_memclrNoHeapPointers = jit.Func(memclrNoHeapPointers)
936)
937
938func (self *_Assembler) mem_clear_fn(ptrfree bool) {
939if !ptrfree {
940self.call_go(_F_memclrHasPointers)
941} else {
942self.call_go(_F_memclrNoHeapPointers)
943}
944}
945
946func (self *_Assembler) mem_clear_rem(size int64, ptrfree bool) {
947self.Emit("MOVQ", jit.Imm(size), _CX) // MOVQ ${size}, CX
948self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
949self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _AX) // MOVQ (ST)(AX), AX
950self.Emit("SUBQ", _VP, _AX) // SUBQ VP, AX
951self.Emit("ADDQ", _AX, _CX) // ADDQ AX, CX
952self.Emit("MOVQ", _VP, jit.Ptr(_SP, 0)) // MOVQ VP, (SP)
953self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
954self.mem_clear_fn(ptrfree) // CALL_GO memclr{Has,NoHeap}Pointers
955}
956
957/** Map Assigning Routines **/
958
959var (
960_F_mapassign = jit.Func(mapassign)
961_F_mapassign_fast32 = jit.Func(mapassign_fast32)
962_F_mapassign_faststr = jit.Func(mapassign_faststr)
963_F_mapassign_fast64ptr = jit.Func(mapassign_fast64ptr)
964)
965
966var (
967_F_decodeJsonUnmarshaler obj.Addr
968_F_decodeTextUnmarshaler obj.Addr
969)
970
971func init() {
972_F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler)
973_F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler)
974}
975
976func (self *_Assembler) mapaccess_ptr(t reflect.Type) {
977if rt.MapType(rt.UnpackType(t)).IndirectElem() {
978self.vfollow(t.Elem())
979}
980}
981
982func (self *_Assembler) mapassign_std(t reflect.Type, v obj.Addr) {
983self.Emit("LEAQ", v, _AX) // LEAQ ${v}, AX
984self.mapassign_call(t, _F_mapassign) // MAPASSIGN ${t}, mapassign
985}
986
987func (self *_Assembler) mapassign_str_fast(t reflect.Type, p obj.Addr, n obj.Addr) {
988self.Emit("MOVQ", jit.Type(t), _AX) // MOVQ ${t}, AX
989self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
990self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8)) // MOVQ VP, 8(SP)
991self.Emit("MOVQ", p, jit.Ptr(_SP, 16)) // MOVQ ${p}, 16(SP)
992self.Emit("MOVQ", n, jit.Ptr(_SP, 24)) // MOVQ ${n}, 24(SP)
993self.call_go(_F_mapassign_faststr) // CALL_GO ${fn}
994self.Emit("MOVQ", jit.Ptr(_SP, 32), _VP) // MOVQ 32(SP), VP
995self.mapaccess_ptr(t)
996}
997
998func (self *_Assembler) mapassign_call(t reflect.Type, fn obj.Addr) {
999self.Emit("MOVQ", jit.Type(t), _SI) // MOVQ ${t}, SI
1000self.Emit("MOVQ", _SI, jit.Ptr(_SP, 0)) // MOVQ SI, (SP)
1001self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8)) // MOVQ VP, 8(SP)
1002self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
1003self.call_go(fn) // CALL_GO ${fn}
1004self.Emit("MOVQ", jit.Ptr(_SP, 24), _VP) // MOVQ 24(SP), VP
1005}
1006
1007func (self *_Assembler) mapassign_fastx(t reflect.Type, fn obj.Addr) {
1008self.mapassign_call(t, fn)
1009self.mapaccess_ptr(t)
1010}
1011
1012func (self *_Assembler) mapassign_utext(t reflect.Type, addressable bool) {
1013pv := false
1014vk := t.Key()
1015tk := t.Key()
1016
1017/* deref pointer if needed */
1018if vk.Kind() == reflect.Ptr {
1019pv = true
1020vk = vk.Elem()
1021}
1022
1023/* addressable value with pointer receiver */
1024if addressable {
1025pv = false
1026tk = reflect.PtrTo(tk)
1027}
1028
1029/* allocate the key, and call the unmarshaler */
1030self.valloc(vk, _DI) // VALLOC ${vk}, DI
1031// must spill vk pointer since next call_go may invoke GC
1032self.Emit("MOVQ" , _DI, _VAR_vk)
1033self.Emit("MOVQ" , jit.Type(tk), _AX) // MOVQ ${tk}, AX
1034self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
1035self.Emit("MOVQ" , _DI, jit.Ptr(_SP, 8)) // MOVQ DI, 8(SP)
1036self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0
1037self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16)) // MOVOU X0, 16(SP)
1038self.call_go(_F_decodeTextUnmarshaler) // CALL_GO decodeTextUnmarshaler
1039self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
1040self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
1041self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
1042self.Sjmp("JNZ" , _LB_error) // JNZ _error
1043self.Emit("MOVQ" , _VAR_vk, _AX)
1044
1045/* select the correct assignment function */
1046if !pv {
1047self.mapassign_call(t, _F_mapassign)
1048} else {
1049self.mapassign_fastx(t, _F_mapassign_fast64ptr)
1050}
1051}
1052
1053/** External Unmarshaler Routines **/
1054
1055var (
1056_F_skip_one = jit.Imm(int64(native.S_skip_one))
1057_F_skip_number = jit.Imm(int64(native.S_skip_number))
1058)
1059
1060func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) {
1061self.call_sf(_F_skip_one) // CALL_SF skip_one
1062self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
1063self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
1064self.slice_from_r(_AX, 0) // SLICE_R AX, $0
1065self.Emit("MOVQ" , _DI, _VAR_sv_p) // MOVQ DI, sv.p
1066self.Emit("MOVQ" , _SI, _VAR_sv_n) // MOVQ SI, sv.n
1067self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref) // UNMARSHAL json, ${t}, ${deref}
1068}
1069
1070func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) {
1071self.parse_string() // PARSE STRING
1072self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true) // UNQUOTE once, sv.p, sv.n
1073self.unmarshal_func(t, _F_decodeTextUnmarshaler, deref) // UNMARSHAL text, ${t}, ${deref}
1074}
1075
1076func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool) {
1077pt := t
1078vk := t.Kind()
1079
1080/* allocate the field if needed */
1081if deref && vk == reflect.Ptr {
1082self.Emit("MOVQ" , _VP, _AX) // MOVQ VP, AX
1083self.Emit("MOVQ" , jit.Ptr(_AX, 0), _AX) // MOVQ (AX), AX
1084self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
1085self.Sjmp("JNZ" , "_deref_{n}") // JNZ _deref_{n}
1086self.valloc(t.Elem(), _AX) // VALLOC ${t.Elem()}, AX
1087self.WritePtrAX(3, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP)
1088self.Link("_deref_{n}") // _deref_{n}:
1089}
1090
1091/* set value type */
1092self.Emit("MOVQ", jit.Type(pt), _CX) // MOVQ ${pt}, CX
1093self.Emit("MOVQ", _CX, jit.Ptr(_SP, 0)) // MOVQ CX, (SP)
1094
1095/* set value pointer */
1096if deref && vk == reflect.Ptr {
1097self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
1098} else {
1099self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8)) // MOVQ VP, 8(SP)
1100}
1101
1102/* set the source string and call the unmarshaler */
1103self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0
1104self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16)) // MOVOU X0, 16(SP)
1105self.call_go(fn) // CALL_GO ${fn}
1106self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
1107self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
1108self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
1109self.Sjmp("JNZ" , _LB_error) // JNZ _error
1110}
1111
1112/** Dynamic Decoding Routine **/
1113
1114var (
1115_F_decodeTypedPointer obj.Addr
1116)
1117
1118func init() {
1119_F_decodeTypedPointer = jit.Func(decodeTypedPointer)
1120}
1121
1122func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) {
1123self.Emit("MOVQ" , _ARG_fv, _CX) // MOVQ fv, CX
1124self.Emit("MOVOU", _ARG_sp, _X0) // MOVOU sp, X0
1125self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0)) // MOVOU X0, (SP)
1126self.Emit("MOVQ" , _IC, jit.Ptr(_SP, 16)) // MOVQ IC, 16(SP)
1127self.Emit("MOVQ" , vt, jit.Ptr(_SP, 24)) // MOVQ ${vt}, 24(SP)
1128self.Emit("MOVQ" , vp, jit.Ptr(_SP, 32)) // MOVQ ${vp}, 32(SP)
1129self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 40)) // MOVQ ST, 40(SP)
1130self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 48)) // MOVQ CX, 48(SP)
1131self.call_go(_F_decodeTypedPointer) // CALL_GO decodeTypedPointer
1132self.Emit("MOVQ" , jit.Ptr(_SP, 64), _ET) // MOVQ 64(SP), ET
1133self.Emit("MOVQ" , jit.Ptr(_SP, 72), _EP) // MOVQ 72(SP), EP
1134self.Emit("MOVQ" , jit.Ptr(_SP, 56), _IC) // MOVQ 56(SP), IC
1135self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
1136self.Sjmp("JE", "_decode_dynamic_end_{n}") // JE, _decode_dynamic_end_{n}
1137self.Emit("MOVQ", _I_json_MismatchTypeError, _AX) // MOVQ _I_json_MismatchTypeError, AX
1138self.Emit("CMPQ", _ET, _AX) // CMPQ ET, AX
1139self.Sjmp("JNE" , _LB_error) // JNE LB_error
1140self.Emit("MOVQ", _EP, _VAR_ic) // MOVQ EP, VAR_ic
1141self.Emit("MOVQ", _ET, _VAR_et) // MOVQ ET, VAR_et
1142self.Link("_decode_dynamic_end_{n}")
1143
1144}
1145
1146/** OpCode Assembler Functions **/
1147
1148var (
1149_F_memequal = jit.Func(memequal)
1150_F_memmove = jit.Func(memmove)
1151_F_growslice = jit.Func(growslice)
1152_F_makeslice = jit.Func(makeslice)
1153_F_makemap_small = jit.Func(makemap_small)
1154_F_mapassign_fast64 = jit.Func(mapassign_fast64)
1155)
1156
1157var (
1158_F_lspace = jit.Imm(int64(native.S_lspace))
1159_F_strhash = jit.Imm(int64(caching.S_strhash))
1160)
1161
1162var (
1163_F_b64decode = jit.Imm(int64(_subr__b64decode))
1164_F_decodeValue = jit.Imm(int64(_subr_decode_value))
1165)
1166
1167var (
1168_F_skip_array = jit.Imm(int64(native.S_skip_array))
1169_F_skip_object = jit.Imm(int64(native.S_skip_object))
1170)
1171
1172var (
1173_F_FieldMap_GetCaseInsensitive obj.Addr
1174_Empty_Slice = make([]byte, 0)
1175_Zero_Base = int64(uintptr(((*rt.GoSlice)(unsafe.Pointer(&_Empty_Slice))).Ptr))
1176)
1177
1178const (
1179_MODE_AVX2 = 1 << 2
1180)
1181
1182const (
1183_Fe_ID = int64(unsafe.Offsetof(caching.FieldEntry{}.ID))
1184_Fe_Name = int64(unsafe.Offsetof(caching.FieldEntry{}.Name))
1185_Fe_Hash = int64(unsafe.Offsetof(caching.FieldEntry{}.Hash))
1186)
1187
1188const (
1189_Vk_Ptr = int64(reflect.Ptr)
1190_Gt_KindFlags = int64(unsafe.Offsetof(rt.GoType{}.KindFlags))
1191)
1192
1193func init() {
1194_F_FieldMap_GetCaseInsensitive = jit.Func((*caching.FieldMap).GetCaseInsensitive)
1195}
1196
1197func (self *_Assembler) _asm_OP_any(_ *_Instr) {
1198self.Emit("MOVQ" , jit.Ptr(_VP, 8), _CX) // MOVQ 8(VP), CX
1199self.Emit("TESTQ" , _CX, _CX) // TESTQ CX, CX
1200self.Sjmp("JZ" , "_decode_{n}") // JZ _decode_{n}
1201self.Emit("CMPQ" , _CX, _VP) // CMPQ CX, VP
1202self.Sjmp("JE" , "_decode_{n}") // JE _decode_{n}
1203self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX
1204self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX) // MOVBLZX _Gt_KindFlags(AX), DX
1205self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX) // ANDL ${F_kind_mask}, DX
1206self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr)) // CMPL DX, ${reflect.Ptr}
1207self.Sjmp("JNE" , "_decode_{n}") // JNE _decode_{n}
1208self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI) // LEAQ 8(VP), DI
1209self.decode_dynamic(_AX, _DI) // DECODE AX, DI
1210self.Sjmp("JMP" , "_decode_end_{n}") // JMP _decode_end_{n}
1211self.Link("_decode_{n}") // _decode_{n}:
1212self.Emit("MOVQ" , _ARG_fv, _DF) // MOVQ fv, DF
1213self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 0)) // MOVQ _ST, (SP)
1214self.call(_F_decodeValue) // CALL decodeValue
1215self.Emit("TESTQ" , _EP, _EP) // TESTQ EP, EP
1216self.Sjmp("JNZ" , _LB_parsing_error) // JNZ _parsing_error
1217self.Link("_decode_end_{n}") // _decode_end_{n}:
1218}
1219
1220func (self *_Assembler) _asm_OP_dyn(p *_Instr) {
1221self.Emit("MOVQ" , jit.Type(p.vt()), _ET) // MOVQ ${p.vt()}, ET
1222self.Emit("CMPQ" , jit.Ptr(_VP, 8), jit.Imm(0)) // CMPQ 8(VP), $0
1223self.Sjmp("JE" , _LB_type_error) // JE _type_error
1224self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX
1225self.Emit("MOVQ" , jit.Ptr(_AX, 8), _AX) // MOVQ 8(AX), AX
1226self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX) // MOVBLZX _Gt_KindFlags(AX), DX
1227self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX) // ANDL ${F_kind_mask}, DX
1228self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr)) // CMPL DX, ${reflect.Ptr}
1229self.Sjmp("JNE" , _LB_type_error) // JNE _type_error
1230self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI) // LEAQ 8(VP), DI
1231self.decode_dynamic(_AX, _DI) // DECODE AX, DI
1232self.Link("_decode_end_{n}") // _decode_end_{n}:
1233}
1234
1235func (self *_Assembler) _asm_OP_str(_ *_Instr) {
1236self.parse_string() // PARSE STRING
1237self.unquote_once(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false, true) // UNQUOTE once, (VP), 8(VP)
1238}
1239
1240func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
1241self.parse_string() // PARSE STRING
1242self.slice_from(_VAR_st_Iv, -1) // SLICE st.Iv, $-1
1243self.Emit("MOVQ" , _DI, jit.Ptr(_VP, 0)) // MOVQ DI, (VP)
1244self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 8)) // MOVQ SI, 8(VP)
1245self.Emit("SHRQ" , jit.Imm(2), _SI) // SHRQ $2, SI
1246self.Emit("LEAQ" , jit.Sib(_SI, _SI, 2, 0), _SI) // LEAQ (SI)(SI*2), SI
1247self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16)) // MOVQ SI, 16(VP)
1248self.malloc(_SI, _SI) // MALLOC SI, SI
1249
1250// TODO: due to base64x's bug, only use AVX mode now
1251self.Emit("MOVL", jit.Imm(_MODE_JSON), _CX) // MOVL $_MODE_JSON, CX
1252
1253/* call the decoder */
1254self.Emit("XORL" , _DX, _DX) // XORL DX, DX
1255self.Emit("MOVQ" , _VP, _DI) // MOVQ VP, DI
1256
1257self.Emit("MOVQ" , jit.Ptr(_VP, 0), _R9) // MOVQ SI, (VP)
1258self.WriteRecNotAX(4, _SI, jit.Ptr(_VP, 0), true, false) // XCHGQ SI, (VP)
1259self.Emit("MOVQ" , _R9, _SI)
1260
1261self.Emit("XCHGQ", _DX, jit.Ptr(_VP, 8)) // XCHGQ DX, 8(VP)
1262self.call(_F_b64decode) // CALL b64decode
1263self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
1264self.Sjmp("JS" , _LB_base64_error) // JS _base64_error
1265self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP)
1266}
1267
1268func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
1269self.Emit("LEAQ", jit.Ptr(_IC, 4), _AX) // LEAQ 4(IC), AX
1270self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL
1271self.Sjmp("JA" , _LB_eof_error) // JA _eof_error
1272self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('f')) // CMPB (IP)(IC), $'f'
1273self.Sjmp("JE" , "_false_{n}") // JE _false_{n}
1274self.Emit("MOVL", jit.Imm(_IM_true), _CX) // MOVL $"true", CX
1275self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPL CX, (IP)(IC)
1276self.Sjmp("JE" , "_bool_true_{n}")
1277
1278// try to skip the value
1279self.Emit("MOVQ", _IC, _VAR_ic)
1280self.Emit("MOVQ", _T_bool, _ET)
1281self.Emit("MOVQ", _ET, _VAR_et)
1282self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
1283self.Sref("_end_{n}", 4)
1284self.Emit("MOVQ", _R9, _VAR_pc)
1285self.Sjmp("JMP" , _LB_skip_one)
1286
1287self.Link("_bool_true_{n}")
1288self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC
1289self.Emit("MOVB", jit.Imm(1), jit.Ptr(_VP, 0)) // MOVB $1, (VP)
1290self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n}
1291self.Link("_false_{n}") // _false_{n}:
1292self.Emit("ADDQ", jit.Imm(1), _AX) // ADDQ $1, AX
1293self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC
1294self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL
1295self.Sjmp("JA" , _LB_eof_error) // JA _eof_error
1296self.Emit("MOVL", jit.Imm(_IM_alse), _CX) // MOVL $"alse", CX
1297self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPL CX, (IP)(IC)
1298self.Sjmp("JNE" , _LB_im_error) // JNE _im_error
1299self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC
1300self.Emit("XORL", _AX, _AX) // XORL AX, AX
1301self.Emit("MOVB", _AX, jit.Ptr(_VP, 0)) // MOVB AX, (VP)
1302self.Link("_end_{n}") // _end_{n}:
1303}
1304
1305func (self *_Assembler) _asm_OP_num(_ *_Instr) {
1306self.Emit("MOVQ", jit.Imm(0), _VAR_fl)
1307self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
1308self.Emit("MOVQ", _IC, _BP)
1309self.Sjmp("JNE", "_skip_number_{n}")
1310self.Emit("MOVQ", jit.Imm(1), _VAR_fl)
1311self.Emit("ADDQ", jit.Imm(1), _IC)
1312self.Link("_skip_number_{n}")
1313
1314/* call skip_number */
1315self.call_sf(_F_skip_number) // CALL_SF skip_one
1316self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
1317self.Sjmp("JNS" , "_num_next_{n}")
1318
1319/* call skip one */
1320self.Emit("MOVQ", _BP, _VAR_ic)
1321self.Emit("MOVQ", _T_number, _ET)
1322self.Emit("MOVQ", _ET, _VAR_et)
1323self.Byte(0x4c, 0x8d, 0x0d)
1324self.Sref("_num_end_{n}", 4)
1325self.Emit("MOVQ", _R9, _VAR_pc)
1326self.Sjmp("JMP" , _LB_skip_one)
1327
1328/* assgin string */
1329self.Link("_num_next_{n}")
1330self.slice_from_r(_AX, 0)
1331self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv)
1332self.Sjmp("JNC", "_num_write_{n}")
1333self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
1334self.Sref("_num_write_{n}", 4)
1335self.Sjmp("JMP", "_copy_string")
1336self.Link("_num_write_{n}")
1337self.Emit("MOVQ", _SI, jit.Ptr(_VP, 8)) // MOVQ SI, 8(VP)
1338self.WriteRecNotAX(13, _DI, jit.Ptr(_VP, 0), false, false)
1339
1340/* check if quoted */
1341self.Emit("CMPQ", _VAR_fl, jit.Imm(1))
1342self.Sjmp("JNE", "_num_end_{n}")
1343self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
1344self.Sjmp("JNE", _LB_char_0_error)
1345self.Emit("ADDQ", jit.Imm(1), _IC)
1346self.Link("_num_end_{n}")
1347}
1348
1349func (self *_Assembler) _asm_OP_i8(ins *_Instr) {
1350var pin = "_i8_end_{n}"
1351self.parse_signed(int8Type, pin, -1) // PARSE int8
1352self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8) // RANGE int8
1353self.Emit("MOVB", _AX, jit.Ptr(_VP, 0)) // MOVB AX, (VP)
1354self.Link(pin)
1355}
1356
1357func (self *_Assembler) _asm_OP_i16(ins *_Instr) {
1358var pin = "_i16_end_{n}"
1359self.parse_signed(int16Type, pin, -1) // PARSE int16
1360self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16) // RANGE int16
1361self.Emit("MOVW", _AX, jit.Ptr(_VP, 0)) // MOVW AX, (VP)
1362self.Link(pin)
1363}
1364
1365func (self *_Assembler) _asm_OP_i32(ins *_Instr) {
1366var pin = "_i32_end_{n}"
1367self.parse_signed(int32Type, pin, -1) // PARSE int32
1368self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32) // RANGE int32
1369self.Emit("MOVL", _AX, jit.Ptr(_VP, 0)) // MOVL AX, (VP)
1370self.Link(pin)
1371}
1372
1373func (self *_Assembler) _asm_OP_i64(ins *_Instr) {
1374var pin = "_i64_end_{n}"
1375self.parse_signed(int64Type, pin, -1) // PARSE int64
1376self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
1377self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP)
1378self.Link(pin)
1379}
1380
1381func (self *_Assembler) _asm_OP_u8(ins *_Instr) {
1382var pin = "_u8_end_{n}"
1383self.parse_unsigned(uint8Type, pin, -1) // PARSE uint8
1384self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8) // RANGE uint8
1385self.Emit("MOVB", _AX, jit.Ptr(_VP, 0)) // MOVB AX, (VP)
1386self.Link(pin)
1387}
1388
1389func (self *_Assembler) _asm_OP_u16(ins *_Instr) {
1390var pin = "_u16_end_{n}"
1391self.parse_unsigned(uint16Type, pin, -1) // PARSE uint16
1392self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16) // RANGE uint16
1393self.Emit("MOVW", _AX, jit.Ptr(_VP, 0)) // MOVW AX, (VP)
1394self.Link(pin)
1395}
1396
1397func (self *_Assembler) _asm_OP_u32(ins *_Instr) {
1398var pin = "_u32_end_{n}"
1399self.parse_unsigned(uint32Type, pin, -1) // PARSE uint32
1400self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32) // RANGE uint32
1401self.Emit("MOVL", _AX, jit.Ptr(_VP, 0)) // MOVL AX, (VP)
1402self.Link(pin)
1403}
1404
1405func (self *_Assembler) _asm_OP_u64(ins *_Instr) {
1406var pin = "_u64_end_{n}"
1407self.parse_unsigned(uint64Type, pin, -1) // PARSE uint64
1408self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
1409self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP)
1410self.Link(pin)
1411}
1412
1413func (self *_Assembler) _asm_OP_f32(ins *_Instr) {
1414var pin = "_f32_end_{n}"
1415self.parse_number(float32Type, pin, -1) // PARSE NUMBER
1416self.range_single() // RANGE float32
1417self.Emit("MOVSS", _X0, jit.Ptr(_VP, 0)) // MOVSS X0, (VP)
1418self.Link(pin)
1419}
1420
1421func (self *_Assembler) _asm_OP_f64(ins *_Instr) {
1422var pin = "_f64_end_{n}"
1423self.parse_number(float64Type, pin, -1) // PARSE NUMBER
1424self.Emit("MOVSD", _VAR_st_Dv, _X0) // MOVSD st.Dv, X0
1425self.Emit("MOVSD", _X0, jit.Ptr(_VP, 0)) // MOVSD X0, (VP)
1426self.Link(pin)
1427}
1428
1429func (self *_Assembler) _asm_OP_unquote(ins *_Instr) {
1430self.check_eof(2)
1431self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('\\')) // CMPB (IP)(IC), $'\\'
1432self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error
1433self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 1), jit.Imm('"')) // CMPB 1(IP)(IC), $'"'
1434self.Sjmp("JNE" , _LB_char_1_error) // JNE _char_1_error
1435self.Emit("ADDQ", jit.Imm(2), _IC) // ADDQ $2, IC
1436self.parse_string() // PARSE STRING
1437self.unquote_twice(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false) // UNQUOTE twice, (VP), 8(VP)
1438}
1439
1440func (self *_Assembler) _asm_OP_nil_1(_ *_Instr) {
1441self.Emit("XORL", _AX, _AX) // XORL AX, AX
1442self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP)
1443}
1444
1445func (self *_Assembler) _asm_OP_nil_2(_ *_Instr) {
1446self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
1447self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)
1448}
1449
1450func (self *_Assembler) _asm_OP_nil_3(_ *_Instr) {
1451self.Emit("XORL" , _AX, _AX) // XORL AX, AX
1452self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
1453self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)
1454self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 16)) // MOVOU X0, 16(VP)
1455}
1456
1457func (self *_Assembler) _asm_OP_deref(p *_Instr) {
1458self.vfollow(p.vt())
1459}
1460
1461func (self *_Assembler) _asm_OP_index(p *_Instr) {
1462self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ ${p.vi()}, AX
1463self.Emit("ADDQ", _AX, _VP) // ADDQ _AX, _VP
1464}
1465
1466func (self *_Assembler) _asm_OP_is_null(p *_Instr) {
1467self.Emit("LEAQ" , jit.Ptr(_IC, 4), _AX) // LEAQ 4(IC), AX
1468self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL
1469self.Sjmp("JA" , "_not_null_{n}") // JA _not_null_{n}
1470self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null)) // CMPL (IP)(IC), $"null"
1471self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC
1472self.Xjmp("JE" , p.vi()) // JE {p.vi()}
1473self.Link("_not_null_{n}") // _not_null_{n}:
1474}
1475
1476func (self *_Assembler) _asm_OP_is_null_quote(p *_Instr) {
1477self.Emit("LEAQ" , jit.Ptr(_IC, 5), _AX) // LEAQ 4(IC), AX
1478self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL
1479self.Sjmp("JA" , "_not_null_quote_{n}") // JA _not_null_quote_{n}
1480self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null)) // CMPL (IP)(IC), $"null"
1481self.Sjmp("JNE" , "_not_null_quote_{n}") // JNE _not_null_quote_{n}
1482self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 4), jit.Imm('"')) // CMPB 4(IP)(IC), $'"'
1483self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC
1484self.Xjmp("JE" , p.vi()) // JE {p.vi()}
1485self.Link("_not_null_quote_{n}") // _not_null_quote_{n}:
1486}
1487
1488func (self *_Assembler) _asm_OP_map_init(_ *_Instr) {
1489self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX
1490self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
1491self.Sjmp("JNZ" , "_end_{n}") // JNZ _end_{n}
1492self.call_go(_F_makemap_small) // CALL_GO makemap_small
1493self.Emit("MOVQ" , jit.Ptr(_SP, 0), _AX) // MOVQ (SP), AX
1494self.WritePtrAX(6, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP)
1495self.Link("_end_{n}") // _end_{n}:
1496self.Emit("MOVQ" , _AX, _VP) // MOVQ AX, VP
1497}
1498
1499func (self *_Assembler) _asm_OP_map_key_i8(p *_Instr) {
1500self.parse_signed(int8Type, "", p.vi()) // PARSE int8
1501self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8) // RANGE int8
1502self.match_char('"')
1503self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN int8, mapassign, st.Iv
1504}
1505
1506func (self *_Assembler) _asm_OP_map_key_i16(p *_Instr) {
1507self.parse_signed(int16Type, "", p.vi()) // PARSE int16
1508self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16) // RANGE int16
1509self.match_char('"')
1510self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN int16, mapassign, st.Iv
1511}
1512
1513func (self *_Assembler) _asm_OP_map_key_i32(p *_Instr) {
1514self.parse_signed(int32Type, "", p.vi()) // PARSE int32
1515self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32) // RANGE int32
1516self.match_char('"')
1517if vt := p.vt(); !mapfast(vt) {
1518self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN int32, mapassign, st.Iv
1519} else {
1520self.mapassign_fastx(vt, _F_mapassign_fast32) // MAPASSIGN int32, mapassign_fast32
1521}
1522}
1523
1524func (self *_Assembler) _asm_OP_map_key_i64(p *_Instr) {
1525self.parse_signed(int64Type, "", p.vi()) // PARSE int64
1526self.match_char('"')
1527if vt := p.vt(); !mapfast(vt) {
1528self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN int64, mapassign, st.Iv
1529} else {
1530self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
1531self.mapassign_fastx(vt, _F_mapassign_fast64) // MAPASSIGN int64, mapassign_fast64
1532}
1533}
1534
1535func (self *_Assembler) _asm_OP_map_key_u8(p *_Instr) {
1536self.parse_unsigned(uint8Type, "", p.vi()) // PARSE uint8
1537self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8) // RANGE uint8
1538self.match_char('"')
1539self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN uint8, vt.Iv
1540}
1541
1542func (self *_Assembler) _asm_OP_map_key_u16(p *_Instr) {
1543self.parse_unsigned(uint16Type, "", p.vi()) // PARSE uint16
1544self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16) // RANGE uint16
1545self.match_char('"')
1546self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN uint16, vt.Iv
1547}
1548
1549func (self *_Assembler) _asm_OP_map_key_u32(p *_Instr) {
1550self.parse_unsigned(uint32Type, "", p.vi()) // PARSE uint32
1551self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32) // RANGE uint32
1552self.match_char('"')
1553if vt := p.vt(); !mapfast(vt) {
1554self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN uint32, vt.Iv
1555} else {
1556self.mapassign_fastx(vt, _F_mapassign_fast32) // MAPASSIGN uint32, mapassign_fast32
1557}
1558}
1559
1560func (self *_Assembler) _asm_OP_map_key_u64(p *_Instr) {
1561self.parse_unsigned(uint64Type, "", p.vi()) // PARSE uint64
1562self.match_char('"')
1563if vt := p.vt(); !mapfast(vt) {
1564self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN uint64, vt.Iv
1565} else {
1566self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
1567self.mapassign_fastx(vt, _F_mapassign_fast64) // MAPASSIGN uint64, mapassign_fast64
1568}
1569}
1570
1571func (self *_Assembler) _asm_OP_map_key_f32(p *_Instr) {
1572self.parse_number(float32Type, "", p.vi()) // PARSE NUMBER
1573self.range_single() // RANGE float32
1574self.Emit("MOVSS", _X0, _VAR_st_Dv) // MOVSS X0, st.Dv
1575self.match_char('"')
1576self.mapassign_std(p.vt(), _VAR_st_Dv) // MAPASSIGN ${p.vt()}, mapassign, st.Dv
1577}
1578
1579func (self *_Assembler) _asm_OP_map_key_f64(p *_Instr) {
1580self.parse_number(float64Type, "", p.vi()) // PARSE NUMBER
1581self.match_char('"')
1582self.mapassign_std(p.vt(), _VAR_st_Dv) // MAPASSIGN ${p.vt()}, mapassign, st.Dv
1583}
1584
1585func (self *_Assembler) _asm_OP_map_key_str(p *_Instr) {
1586self.parse_string() // PARSE STRING
1587self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true) // UNQUOTE once, sv.p, sv.n
1588if vt := p.vt(); !mapfast(vt) {
1589self.valloc(vt.Key(), _DI)
1590self.Emit("MOVOU", _VAR_sv, _X0)
1591self.Emit("MOVOU", _X0, jit.Ptr(_DI, 0))
1592self.mapassign_std(vt, jit.Ptr(_DI, 0))
1593} else {
1594self.Emit("MOVQ", _VAR_sv_p, _DI) // MOVQ sv.p, DI
1595self.Emit("MOVQ", _VAR_sv_n, _SI) // MOVQ sv.n, SI
1596self.mapassign_str_fast(vt, _DI, _SI) // MAPASSIGN string, DI, SI
1597}
1598}
1599
1600func (self *_Assembler) _asm_OP_map_key_utext(p *_Instr) {
1601self.parse_string() // PARSE STRING
1602self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true) // UNQUOTE once, sv.p, sv.n
1603self.mapassign_utext(p.vt(), false) // MAPASSIGN utext, ${p.vt()}, false
1604}
1605
1606func (self *_Assembler) _asm_OP_map_key_utext_p(p *_Instr) {
1607self.parse_string() // PARSE STRING
1608self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false) // UNQUOTE once, sv.p, sv.n
1609self.mapassign_utext(p.vt(), true) // MAPASSIGN utext, ${p.vt()}, true
1610}
1611
1612func (self *_Assembler) _asm_OP_array_skip(_ *_Instr) {
1613self.call_sf(_F_skip_array) // CALL_SF skip_array
1614self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
1615self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
1616}
1617
1618func (self *_Assembler) _asm_OP_array_clear(p *_Instr) {
1619self.mem_clear_rem(p.i64(), true)
1620}
1621
1622func (self *_Assembler) _asm_OP_array_clear_p(p *_Instr) {
1623self.mem_clear_rem(p.i64(), false)
1624}
1625
1626func (self *_Assembler) _asm_OP_slice_init(p *_Instr) {
1627self.Emit("XORL" , _AX, _AX) // XORL AX, AX
1628self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP)
1629self.Emit("MOVQ" , jit.Ptr(_VP, 16), _AX) // MOVQ 16(VP), AX
1630self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
1631self.Sjmp("JNZ" , "_done_{n}") // JNZ _done_{n}
1632self.Emit("MOVQ" , jit.Imm(_MinSlice), _CX) // MOVQ ${_MinSlice}, CX
1633self.Emit("MOVQ" , _CX, jit.Ptr(_VP, 16)) // MOVQ CX, 16(VP)
1634self.Emit("MOVQ" , jit.Type(p.vt()), _DX) // MOVQ ${p.vt()}, DX
1635self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 0)) // MOVQ DX, (SP)
1636self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
1637self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 16)) // MOVQ CX, 16(SP)
1638self.call_go(_F_makeslice) // CALL_GO makeslice
1639self.Emit("MOVQ" , jit.Ptr(_SP, 24), _AX) // MOVQ 24(SP), AX
1640self.WritePtrAX(7, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP)
1641self.Link("_done_{n}") // _done_{n}:
1642self.Emit("XORL" , _AX, _AX) // XORL AX, AX
1643self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP)
1644}
1645
1646func (self *_Assembler) _asm_OP_check_empty(p *_Instr) {
1647rbracket := p.vb()
1648if rbracket == ']' {
1649self.check_eof(1)
1650self.Emit("LEAQ", jit.Ptr(_IC, 1), _AX) // LEAQ 1(IC), AX
1651self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(rbracket))) // CMPB (IP)(IC), ']'
1652self.Sjmp("JNE" , "_not_empty_array_{n}") // JNE _not_empty_array_{n}
1653self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC
1654self.Emit("MOVQ", jit.Imm(_Zero_Base), _AX)
1655self.WritePtrAX(9, jit.Ptr(_VP, 0), false)
1656self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
1657self.Emit("MOVOU", _X0, jit.Ptr(_VP, 8)) // MOVOU X0, 8(VP)
1658self.Xjmp("JMP" , p.vi()) // JMP {p.vi()}
1659self.Link("_not_empty_array_{n}")
1660} else {
1661panic("only implement check empty array here!")
1662}
1663}
1664
1665func (self *_Assembler) _asm_OP_slice_append(p *_Instr) {
1666self.Emit("MOVQ" , jit.Ptr(_VP, 8), _AX) // MOVQ 8(VP), AX
1667self.Emit("CMPQ" , _AX, jit.Ptr(_VP, 16)) // CMPQ AX, 16(VP)
1668self.Sjmp("JB" , "_index_{n}") // JB _index_{n}
1669self.Emit("MOVQ" , jit.Type(p.vt()), _AX) // MOVQ ${p.vt()}, AX
1670self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
1671self.Emit("MOVOU", jit.Ptr(_VP, 0), _X0) // MOVOU (VP), X0
1672self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP)
1673self.Emit("MOVQ" , jit.Ptr(_VP, 16), _AX) // MOVQ 16(VP), AX
1674self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 24)) // MOVQ AX, 24(SP)
1675self.Emit("SHLQ" , jit.Imm(1), _AX) // SHLQ $1, AX
1676self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP)
1677self.call_go(_F_growslice) // CALL_GO growslice
1678self.Emit("MOVQ" , jit.Ptr(_SP, 40), _DI) // MOVQ 40(SP), DI
1679self.Emit("MOVQ" , jit.Ptr(_SP, 48), _AX) // MOVQ 48(SP), AX
1680self.Emit("MOVQ" , jit.Ptr(_SP, 56), _SI) // MOVQ 56(SP), SI
1681self.WriteRecNotAX(8, _DI, jit.Ptr(_VP, 0), true, true)// MOVQ DI, (VP)
1682self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP)
1683self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16)) // MOVQ SI, 16(VP)
1684
1685// because growslice not zero memory {oldcap, newlen} when append et not has ptrdata.
1686// but we should zero it, avoid decode it as random values.
1687if rt.UnpackType(p.vt()).PtrData == 0 {
1688self.Emit("SUBQ" , _AX, _SI) // MOVQ AX, SI
1689
1690self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8)) // ADDQ $1, 8(VP)
1691self.Emit("MOVQ" , _DI, _VP) // MOVQ DI, VP
1692self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX) // MOVQ ${p.vlen()}, CX
1693self.From("MULQ" , _CX) // MULQ CX
1694self.Emit("ADDQ" , _AX, _VP) // ADDQ AX, VP
1695
1696self.Emit("MOVQ" , _SI, _AX) // MOVQ SI, AX
1697self.From("MULQ" , _CX) // MULQ CX
1698self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
1699
1700self.Emit("MOVQ" , _VP, jit.Ptr(_SP, 0)) // MOVQ VP, (SP)
1701self.mem_clear_fn(true) // CALL_GO memclr{Has,NoHeap}
1702self.Sjmp("JMP", "_append_slice_end_{n}") // JMP _append_slice_end_{n}
1703}
1704
1705self.Link("_index_{n}") // _index_{n}:
1706self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8)) // ADDQ $1, 8(VP)
1707self.Emit("MOVQ" , jit.Ptr(_VP, 0), _VP) // MOVQ (VP), VP
1708self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX) // MOVQ ${p.vlen()}, CX
1709self.From("MULQ" , _CX) // MULQ CX
1710self.Emit("ADDQ" , _AX, _VP) // ADDQ AX, VP
1711self.Link("_append_slice_end_{n}")
1712}
1713
1714func (self *_Assembler) _asm_OP_object_skip(_ *_Instr) {
1715self.call_sf(_F_skip_object) // CALL_SF skip_object
1716self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
1717self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
1718}
1719
1720func (self *_Assembler) _asm_OP_object_next(_ *_Instr) {
1721self.call_sf(_F_skip_one) // CALL_SF skip_one
1722self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
1723self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
1724}
1725
1726func (self *_Assembler) _asm_OP_struct_field(p *_Instr) {
1727assert_eq(caching.FieldEntrySize, 32, "invalid field entry size")
1728self.Emit("MOVQ" , jit.Imm(-1), _AX) // MOVQ $-1, AX
1729self.Emit("MOVQ" , _AX, _VAR_sr) // MOVQ AX, sr
1730self.parse_string() // PARSE STRING
1731self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false) // UNQUOTE once, sv.p, sv.n
1732self.Emit("LEAQ" , _VAR_sv, _AX) // LEAQ sv, AX
1733self.Emit("XORL" , _CX, _CX) // XORL CX, CX
1734self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
1735self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
1736self.call_go(_F_strhash) // CALL_GO strhash
1737self.Emit("MOVQ" , jit.Ptr(_SP, 16), _AX) // MOVQ 16(SP), AX
1738self.Emit("MOVQ" , _AX, _R9) // MOVQ AX, R9
1739self.Emit("MOVQ" , jit.Imm(freezeFields(p.vf())), _CX) // MOVQ ${p.vf()}, CX
1740self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_b), _SI) // MOVQ FieldMap.b(CX), SI
1741self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_N), _CX) // MOVQ FieldMap.N(CX), CX
1742self.Emit("TESTQ", _CX, _CX) // TESTQ CX, CX
1743self.Sjmp("JZ" , "_try_lowercase_{n}") // JZ _try_lowercase_{n}
1744self.Link("_loop_{n}") // _loop_{n}:
1745self.Emit("XORL" , _DX, _DX) // XORL DX, DX
1746self.From("DIVQ" , _CX) // DIVQ CX
1747self.Emit("LEAQ" , jit.Ptr(_DX, 1), _AX) // LEAQ 1(DX), AX
1748self.Emit("SHLQ" , jit.Imm(5), _DX) // SHLQ $5, DX
1749self.Emit("LEAQ" , jit.Sib(_SI, _DX, 1, 0), _DI) // LEAQ (SI)(DX), DI
1750self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Hash), _R8) // MOVQ FieldEntry.Hash(DI), R8
1751self.Emit("TESTQ", _R8, _R8) // TESTQ R8, R8
1752self.Sjmp("JZ" , "_try_lowercase_{n}") // JZ _try_lowercase_{n}
1753self.Emit("CMPQ" , _R8, _R9) // CMPQ R8, R9
1754self.Sjmp("JNE" , "_loop_{n}") // JNE _loop_{n}
1755self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name + 8), _DX) // MOVQ FieldEntry.Name+8(DI), DX
1756self.Emit("CMPQ" , _DX, _VAR_sv_n) // CMPQ DX, sv.n
1757self.Sjmp("JNE" , "_loop_{n}") // JNE _loop_{n}
1758self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_ID), _R8) // MOVQ FieldEntry.ID(DI), R8
1759self.Emit("MOVQ" , _AX, _VAR_ss_AX) // MOVQ AX, ss.AX
1760self.Emit("MOVQ" , _CX, _VAR_ss_CX) // MOVQ CX, ss.CX
1761self.Emit("MOVQ" , _SI, _VAR_ss_SI) // MOVQ SI, ss.SI
1762self.Emit("MOVQ" , _R8, _VAR_ss_R8) // MOVQ R8, ss.R8
1763self.Emit("MOVQ" , _R9, _VAR_ss_R9) // MOVQ R9, ss.R9
1764self.Emit("MOVQ" , _VAR_sv_p, _AX) // MOVQ _VAR_sv_p, AX
1765self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name), _CX) // MOVQ FieldEntry.Name(DI), CX
1766self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
1767self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
1768self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 16)) // MOVQ DX, 16(SP)
1769self.call_go(_F_memequal) // CALL_GO memequal
1770self.Emit("MOVQ" , _VAR_ss_AX, _AX) // MOVQ ss.AX, AX
1771self.Emit("MOVQ" , _VAR_ss_CX, _CX) // MOVQ ss.CX, CX
1772self.Emit("MOVQ" , _VAR_ss_SI, _SI) // MOVQ ss.SI, SI
1773self.Emit("MOVQ" , _VAR_ss_R9, _R9) // MOVQ ss.R9, R9
1774self.Emit("MOVB" , jit.Ptr(_SP, 24), _DX) // MOVB 24(SP), DX
1775self.Emit("TESTB", _DX, _DX) // TESTB DX, DX
1776self.Sjmp("JZ" , "_loop_{n}") // JZ _loop_{n}
1777self.Emit("MOVQ" , _VAR_ss_R8, _R8) // MOVQ ss.R8, R8
1778self.Emit("MOVQ" , _R8, _VAR_sr) // MOVQ R8, sr
1779self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n}
1780self.Link("_try_lowercase_{n}") // _try_lowercase_{n}:
1781self.Emit("MOVQ" , jit.Imm(referenceFields(p.vf())), _AX) // MOVQ ${p.vf()}, AX
1782self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0
1783self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
1784self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP)
1785self.call_go(_F_FieldMap_GetCaseInsensitive) // CALL_GO FieldMap::GetCaseInsensitive
1786self.Emit("MOVQ" , jit.Ptr(_SP, 24), _AX) // MOVQ 24(SP), AX
1787self.Emit("MOVQ" , _AX, _VAR_sr) // MOVQ AX, _VAR_sr
1788self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
1789self.Sjmp("JNS" , "_end_{n}") // JNS _end_{n}
1790self.Emit("BTQ" , jit.Imm(_F_disable_unknown), _ARG_fv) // BTQ ${_F_disable_unknown}, fv
1791self.Sjmp("JC" , _LB_field_error) // JC _field_error
1792self.Link("_end_{n}") // _end_{n}:
1793}
1794
1795func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) {
1796self.unmarshal_json(p.vt(), true)
1797}
1798
1799func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) {
1800self.unmarshal_json(p.vt(), false)
1801}
1802
1803func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) {
1804self.unmarshal_text(p.vt(), true)
1805}
1806
1807func (self *_Assembler) _asm_OP_unmarshal_text_p(p *_Instr) {
1808self.unmarshal_text(p.vt(), false)
1809}
1810
1811func (self *_Assembler) _asm_OP_lspace(_ *_Instr) {
1812self.lspace("_{n}")
1813}
1814
1815func (self *_Assembler) lspace(subfix string) {
1816var label = "_lspace" + subfix
1817
1818self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
1819self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error
1820self.Emit("MOVQ" , jit.Imm(_BM_space), _DX) // MOVQ _BM_space, DX
1821self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
1822self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
1823self.Sjmp("JA" , label) // JA _nospace_{n}
1824self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
1825self.Sjmp("JNC" , label) // JNC _nospace_{n}
1826
1827/* test up to 4 characters */
1828for i := 0; i < 3; i++ {
1829self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
1830self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
1831self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error
1832self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
1833self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
1834self.Sjmp("JA" , label) // JA _nospace_{n}
1835self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
1836self.Sjmp("JNC" , label) // JNC _nospace_{n}
1837}
1838
1839/* handle over to the native function */
1840self.Emit("MOVQ" , _IP, _DI) // MOVQ IP, DI
1841self.Emit("MOVQ" , _IL, _SI) // MOVQ IL, SI
1842self.Emit("MOVQ" , _IC, _DX) // MOVQ IC, DX
1843self.call(_F_lspace) // CALL lspace
1844self.Emit("TESTQ" , _AX, _AX) // TESTQ AX, AX
1845self.Sjmp("JS" , _LB_parsing_error_v) // JS _parsing_error_v
1846self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL
1847self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error
1848self.Emit("MOVQ" , _AX, _IC) // MOVQ AX, IC
1849self.Link(label) // _nospace_{n}:
1850}
1851
1852func (self *_Assembler) _asm_OP_match_char(p *_Instr) {
1853self.match_char(p.vb())
1854}
1855
1856func (self *_Assembler) match_char(char byte) {
1857self.check_eof(1)
1858self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(char))) // CMPB (IP)(IC), ${p.vb()}
1859self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error
1860self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC
1861}
1862
1863func (self *_Assembler) _asm_OP_check_char(p *_Instr) {
1864self.check_eof(1)
1865self.Emit("LEAQ" , jit.Ptr(_IC, 1), _AX) // LEAQ 1(IC), AX
1866self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb()))) // CMPB (IP)(IC), ${p.vb()}
1867self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC
1868self.Xjmp("JE" , p.vi()) // JE {p.vi()}
1869}
1870
1871func (self *_Assembler) _asm_OP_check_char_0(p *_Instr) {
1872self.check_eof(1)
1873self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb()))) // CMPB (IP)(IC), ${p.vb()}
1874self.Xjmp("JE" , p.vi()) // JE {p.vi()}
1875}
1876
1877func (self *_Assembler) _asm_OP_add(p *_Instr) {
1878self.Emit("ADDQ", jit.Imm(int64(p.vi())), _IC) // ADDQ ${p.vi()}, IC
1879}
1880
1881func (self *_Assembler) _asm_OP_load(_ *_Instr) {
1882self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
1883self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _VP) // MOVQ (ST)(AX), VP
1884}
1885
1886func (self *_Assembler) _asm_OP_save(_ *_Instr) {
1887self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX) // MOVQ (ST), CX
1888self.Emit("CMPQ", _CX, jit.Imm(_MaxStackBytes)) // CMPQ CX, ${_MaxStackBytes}
1889self.Sjmp("JAE" , _LB_stack_error) // JA _stack_error
1890self.WriteRecNotAX(0 , _VP, jit.Sib(_ST, _CX, 1, 8), false, false) // MOVQ VP, 8(ST)(CX)
1891self.Emit("ADDQ", jit.Imm(8), _CX) // ADDQ $8, CX
1892self.Emit("MOVQ", _CX, jit.Ptr(_ST, 0)) // MOVQ CX, (ST)
1893}
1894
1895func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
1896self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
1897self.Emit("SUBQ", jit.Imm(8), _AX) // SUBQ $8, AX
1898self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _VP) // MOVQ 8(ST)(AX), VP
1899self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST)
1900self.Emit("XORL", _ET, _ET) // XORL ET, ET
1901self.Emit("MOVQ", _ET, jit.Sib(_ST, _AX, 1, 8)) // MOVQ ET, 8(ST)(AX)
1902}
1903
1904func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
1905self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
1906self.Emit("SUBQ" , jit.Imm(16), _AX) // SUBQ $16, AX
1907self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _VP) // MOVQ 8(ST)(AX), VP
1908self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST)
1909self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
1910self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX)
1911}
1912
1913func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
1914self.Emit("MOVQ", jit.Type(p.vt()), _AX) // MOVQ ${p.vt()}, AX
1915self.decode_dynamic(_AX, _VP) // DECODE AX, VP
1916}
1917
1918func (self *_Assembler) _asm_OP_goto(p *_Instr) {
1919self.Xjmp("JMP", p.vi())
1920}
1921
1922func (self *_Assembler) _asm_OP_switch(p *_Instr) {
1923self.Emit("MOVQ", _VAR_sr, _AX) // MOVQ sr, AX
1924self.Emit("CMPQ", _AX, jit.Imm(p.i64())) // CMPQ AX, ${len(p.vs())}
1925self.Sjmp("JAE" , "_default_{n}") // JAE _default_{n}
1926
1927/* jump table selector */
1928self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI
1929self.Sref("_switch_table_{n}", 4) // .... &_switch_table_{n}
1930self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX) // MOVLQSX (DI)(AX*4), AX
1931self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX
1932self.Rjmp("JMP" , _AX) // JMP AX
1933self.Link("_switch_table_{n}") // _switch_table_{n}:
1934
1935/* generate the jump table */
1936for i, v := range p.vs() {
1937self.Xref(v, int64(-i) * 4)
1938}
1939
1940/* default case */
1941self.Link("_default_{n}")
1942self.NOP()
1943}
1944
1945func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
1946self.Emit("MOVQ", jit.Imm(int64(p2.op())), jit.Ptr(_SP, 16))// MOVQ $(p2.op()), 16(SP)
1947self.Emit("MOVQ", jit.Imm(int64(p1.op())), jit.Ptr(_SP, 8)) // MOVQ $(p1.op()), 8(SP)
1948self.Emit("MOVQ", jit.Imm(int64(i)), jit.Ptr(_SP, 0)) // MOVQ $(i), (SP)
1949self.call_go(_F_println)
1950}
1951