podman

Форк
0
/
assembler_stkabi_amd64.go 
1950 строк · 89.1 Кб
1
// +build go1.16,!go1.17
2

3
/*
4
 * Copyright 2021 ByteDance Inc.
5
 *
6
 * Licensed under the Apache License, Version 2.0 (the "License");
7
 * you may not use this file except in compliance with the License.
8
 * You may obtain a copy of the License at
9
 *
10
 *     http://www.apache.org/licenses/LICENSE-2.0
11
 *
12
 * Unless required by applicable law or agreed to in writing, software
13
 * distributed under the License is distributed on an "AS IS" BASIS,
14
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
 * See the License for the specific language governing permissions and
16
 * limitations under the License.
17
 */
18

19
package decoder
20

21
import (
22
    `encoding/json`
23
    `fmt`
24
    `math`
25
    `reflect`
26
    `unsafe`
27
    
28
    `github.com/bytedance/sonic/internal/caching`
29
    `github.com/bytedance/sonic/internal/jit`
30
    `github.com/bytedance/sonic/internal/native`
31
    `github.com/bytedance/sonic/internal/native/types`
32
    `github.com/bytedance/sonic/internal/rt`
33
    `github.com/twitchyliquid64/golang-asm/obj`
34
)
35

36
/** Register Allocations
37
 *
38
 *  State Registers:
39
 *
40
 *      %rbx : stack base
41
 *      %r12 : input pointer
42
 *      %r13 : input length
43
 *      %r14 : input cursor
44
 *      %r15 : value pointer
45
 *
46
 *  Error Registers:
47
 *
48
 *      %r10 : error type register
49
 *      %r11 : error pointer register
50
 */
51

52
/** Function Prototype & Stack Map
53
 *
54
 *  func (s string, ic int, vp unsafe.Pointer, sb *_Stack, fv uint64, sv string) (rc int, err error)
55
 *
56
 *  s.buf  :   (FP)
57
 *  s.len  :  8(FP)
58
 *  ic     : 16(FP)
59
 *  vp     : 24(FP)
60
 *  sb     : 32(FP)
61
 *  fv     : 40(FP)
62
 *  sv     : 56(FP)
63
 *  err.vt : 72(FP)
64
 *  err.vp : 80(FP)
65
 */
66

67
const (
68
    _FP_args   = 96     // 96 bytes to pass arguments and return values for this function
69
    _FP_fargs  = 80     // 80 bytes for passing arguments to other Go functions
70
    _FP_saves  = 40     // 40 bytes for saving the registers before CALL instructions
71
    _FP_locals = 144    // 144 bytes for local variables
72
)
73

74
const (
75
    _FP_offs = _FP_fargs + _FP_saves + _FP_locals
76
    _FP_size = _FP_offs + 8     // 8 bytes for the parent frame pointer
77
    _FP_base = _FP_size + 8     // 8 bytes for the return address
78
)
79

80
const (
81
    _IM_null = 0x6c6c756e   // 'null'
82
    _IM_true = 0x65757274   // 'true'
83
    _IM_alse = 0x65736c61   // 'alse' ('false' without the 'f')
84
)
85

86
const (
87
    _BM_space = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n')
88
)
89

90
const (
91
    _MODE_JSON = 1 << 3 // base64 mode
92
)
93

94
const (
95
    _LB_error           = "_error"
96
    _LB_im_error        = "_im_error"
97
    _LB_eof_error       = "_eof_error"
98
    _LB_type_error      = "_type_error"
99
    _LB_field_error     = "_field_error"
100
    _LB_range_error     = "_range_error"
101
    _LB_stack_error     = "_stack_error"
102
    _LB_base64_error    = "_base64_error"
103
    _LB_unquote_error   = "_unquote_error"
104
    _LB_parsing_error   = "_parsing_error"
105
    _LB_parsing_error_v = "_parsing_error_v"
106
    _LB_mismatch_error   = "_mismatch_error"
107
)
108

109
const (
110
    _LB_char_0_error  = "_char_0_error"
111
    _LB_char_1_error  = "_char_1_error"
112
    _LB_char_2_error  = "_char_2_error"
113
    _LB_char_3_error  = "_char_3_error"
114
    _LB_char_4_error  = "_char_4_error"
115
    _LB_char_m2_error = "_char_m2_error"
116
    _LB_char_m3_error = "_char_m3_error"
117
)
118

119
const (
120
    _LB_skip_one = "_skip_one"
121
    _LB_skip_key_value = "_skip_key_value"
122
)
123

124
var (
125
    _AX = jit.Reg("AX")
126
    _CX = jit.Reg("CX")
127
    _DX = jit.Reg("DX")
128
    _DI = jit.Reg("DI")
129
    _SI = jit.Reg("SI")
130
    _BP = jit.Reg("BP")
131
    _SP = jit.Reg("SP")
132
    _R8 = jit.Reg("R8")
133
    _R9 = jit.Reg("R9")
134
    _X0 = jit.Reg("X0")
135
    _X1 = jit.Reg("X1")
136
)
137

138
var (
139
    _ST = jit.Reg("BX")
140
    _IP = jit.Reg("R12")
141
    _IL = jit.Reg("R13")
142
    _IC = jit.Reg("R14")
143
    _VP = jit.Reg("R15")
144
)
145

146
var (
147
    _R10 = jit.Reg("R10")    // used for gcWriteBarrier
148
    _DF  = jit.Reg("R10")    // reuse R10 in generic decoder for flags
149
    _ET  = jit.Reg("R10")
150
    _EP  = jit.Reg("R11")
151
)
152

153
var (
154
    _ARG_s  = _ARG_sp
155
    _ARG_sp = jit.Ptr(_SP, _FP_base)
156
    _ARG_sl = jit.Ptr(_SP, _FP_base + 8)
157
    _ARG_ic = jit.Ptr(_SP, _FP_base + 16)
158
    _ARG_vp = jit.Ptr(_SP, _FP_base + 24)
159
    _ARG_sb = jit.Ptr(_SP, _FP_base + 32)
160
    _ARG_fv = jit.Ptr(_SP, _FP_base + 40)
161
)
162

163
var (
164
    _VAR_sv = _VAR_sv_p
165
    _VAR_sv_p = jit.Ptr(_SP, _FP_base + 48)
166
    _VAR_sv_n = jit.Ptr(_SP, _FP_base + 56)
167
    _VAR_vk   = jit.Ptr(_SP, _FP_base + 64)
168
)
169

170
var (
171
    _RET_rc = jit.Ptr(_SP, _FP_base + 72)
172
    _RET_et = jit.Ptr(_SP, _FP_base + 80)
173
    _RET_ep = jit.Ptr(_SP, _FP_base + 88)
174
)
175

176
var (
177
    _VAR_st = _VAR_st_Vt
178
    _VAR_sr = jit.Ptr(_SP, _FP_fargs + _FP_saves)
179
)
180

181

182
var (
183
    _VAR_st_Vt = jit.Ptr(_SP, _FP_fargs + _FP_saves + 0)
184
    _VAR_st_Dv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
185
    _VAR_st_Iv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
186
    _VAR_st_Ep = jit.Ptr(_SP, _FP_fargs + _FP_saves + 24)
187
    _VAR_st_Db = jit.Ptr(_SP, _FP_fargs + _FP_saves + 32)
188
    _VAR_st_Dc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 40)
189
)
190

191
var (
192
    _VAR_ss_AX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 48)
193
    _VAR_ss_CX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 56)
194
    _VAR_ss_SI = jit.Ptr(_SP, _FP_fargs + _FP_saves + 64)
195
    _VAR_ss_R8 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 72)
196
    _VAR_ss_R9 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 80)
197
)
198

199
var (
200
    _VAR_bs_p = jit.Ptr(_SP, _FP_fargs + _FP_saves + 88)
201
    _VAR_bs_n = jit.Ptr(_SP, _FP_fargs + _FP_saves + 96)
202
    _VAR_bs_LR = jit.Ptr(_SP, _FP_fargs + _FP_saves + 104)
203
)
204

205
var _VAR_fl = jit.Ptr(_SP, _FP_fargs + _FP_saves + 112)
206

207
var (
208
    _VAR_et = jit.Ptr(_SP, _FP_fargs + _FP_saves + 120) // save dismatched type
209
    _VAR_ic = jit.Ptr(_SP, _FP_fargs + _FP_saves + 128) // save dismatched position
210
    _VAR_pc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 136) // save skip return pc
211
)
212

213
type _Assembler struct {
214
    jit.BaseAssembler
215
    p _Program
216
    name string
217
}
218

219
func newAssembler(p _Program) *_Assembler {
220
    return new(_Assembler).Init(p)
221
}
222

223
/** Assembler Interface **/
224

225
func (self *_Assembler) Load() _Decoder {
226
    return ptodec(self.BaseAssembler.Load("decode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
227
}
228

229
func (self *_Assembler) Init(p _Program) *_Assembler {
230
    self.p = p
231
    self.BaseAssembler.Init(self.compile)
232
    return self
233
}
234

235
func (self *_Assembler) compile() {
236
    self.prologue()
237
    self.instrs()
238
    self.epilogue()
239
    self.copy_string()
240
    self.escape_string()
241
    self.escape_string_twice()
242
    self.skip_one()
243
    self.skip_key_value()
244
    self.mismatch_error()
245
    self.type_error()
246
    self.field_error()
247
    self.range_error()
248
    self.stack_error()
249
    self.base64_error()
250
    self.parsing_error()
251
}
252

253
/** Assembler Stages **/
254

255
var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
256
    _OP_any              : (*_Assembler)._asm_OP_any,
257
    _OP_dyn              : (*_Assembler)._asm_OP_dyn,
258
    _OP_str              : (*_Assembler)._asm_OP_str,
259
    _OP_bin              : (*_Assembler)._asm_OP_bin,
260
    _OP_bool             : (*_Assembler)._asm_OP_bool,
261
    _OP_num              : (*_Assembler)._asm_OP_num,
262
    _OP_i8               : (*_Assembler)._asm_OP_i8,
263
    _OP_i16              : (*_Assembler)._asm_OP_i16,
264
    _OP_i32              : (*_Assembler)._asm_OP_i32,
265
    _OP_i64              : (*_Assembler)._asm_OP_i64,
266
    _OP_u8               : (*_Assembler)._asm_OP_u8,
267
    _OP_u16              : (*_Assembler)._asm_OP_u16,
268
    _OP_u32              : (*_Assembler)._asm_OP_u32,
269
    _OP_u64              : (*_Assembler)._asm_OP_u64,
270
    _OP_f32              : (*_Assembler)._asm_OP_f32,
271
    _OP_f64              : (*_Assembler)._asm_OP_f64,
272
    _OP_unquote          : (*_Assembler)._asm_OP_unquote,
273
    _OP_nil_1            : (*_Assembler)._asm_OP_nil_1,
274
    _OP_nil_2            : (*_Assembler)._asm_OP_nil_2,
275
    _OP_nil_3            : (*_Assembler)._asm_OP_nil_3,
276
    _OP_deref            : (*_Assembler)._asm_OP_deref,
277
    _OP_index            : (*_Assembler)._asm_OP_index,
278
    _OP_is_null          : (*_Assembler)._asm_OP_is_null,
279
    _OP_is_null_quote    : (*_Assembler)._asm_OP_is_null_quote,
280
    _OP_map_init         : (*_Assembler)._asm_OP_map_init,
281
    _OP_map_key_i8       : (*_Assembler)._asm_OP_map_key_i8,
282
    _OP_map_key_i16      : (*_Assembler)._asm_OP_map_key_i16,
283
    _OP_map_key_i32      : (*_Assembler)._asm_OP_map_key_i32,
284
    _OP_map_key_i64      : (*_Assembler)._asm_OP_map_key_i64,
285
    _OP_map_key_u8       : (*_Assembler)._asm_OP_map_key_u8,
286
    _OP_map_key_u16      : (*_Assembler)._asm_OP_map_key_u16,
287
    _OP_map_key_u32      : (*_Assembler)._asm_OP_map_key_u32,
288
    _OP_map_key_u64      : (*_Assembler)._asm_OP_map_key_u64,
289
    _OP_map_key_f32      : (*_Assembler)._asm_OP_map_key_f32,
290
    _OP_map_key_f64      : (*_Assembler)._asm_OP_map_key_f64,
291
    _OP_map_key_str      : (*_Assembler)._asm_OP_map_key_str,
292
    _OP_map_key_utext    : (*_Assembler)._asm_OP_map_key_utext,
293
    _OP_map_key_utext_p  : (*_Assembler)._asm_OP_map_key_utext_p,
294
    _OP_array_skip       : (*_Assembler)._asm_OP_array_skip,
295
    _OP_array_clear      : (*_Assembler)._asm_OP_array_clear,
296
    _OP_array_clear_p    : (*_Assembler)._asm_OP_array_clear_p,
297
    _OP_slice_init       : (*_Assembler)._asm_OP_slice_init,
298
    _OP_slice_append     : (*_Assembler)._asm_OP_slice_append,
299
    _OP_object_skip      : (*_Assembler)._asm_OP_object_skip,
300
    _OP_object_next      : (*_Assembler)._asm_OP_object_next,
301
    _OP_struct_field     : (*_Assembler)._asm_OP_struct_field,
302
    _OP_unmarshal        : (*_Assembler)._asm_OP_unmarshal,
303
    _OP_unmarshal_p      : (*_Assembler)._asm_OP_unmarshal_p,
304
    _OP_unmarshal_text   : (*_Assembler)._asm_OP_unmarshal_text,
305
    _OP_unmarshal_text_p : (*_Assembler)._asm_OP_unmarshal_text_p,
306
    _OP_lspace           : (*_Assembler)._asm_OP_lspace,
307
    _OP_match_char       : (*_Assembler)._asm_OP_match_char,
308
    _OP_check_char       : (*_Assembler)._asm_OP_check_char,
309
    _OP_load             : (*_Assembler)._asm_OP_load,
310
    _OP_save             : (*_Assembler)._asm_OP_save,
311
    _OP_drop             : (*_Assembler)._asm_OP_drop,
312
    _OP_drop_2           : (*_Assembler)._asm_OP_drop_2,
313
    _OP_recurse          : (*_Assembler)._asm_OP_recurse,
314
    _OP_goto             : (*_Assembler)._asm_OP_goto,
315
    _OP_switch           : (*_Assembler)._asm_OP_switch,
316
    _OP_check_char_0     : (*_Assembler)._asm_OP_check_char_0,
317
    _OP_dismatch_err     : (*_Assembler)._asm_OP_dismatch_err,
318
    _OP_go_skip          : (*_Assembler)._asm_OP_go_skip,
319
    _OP_add              : (*_Assembler)._asm_OP_add,
320
    _OP_check_empty      : (*_Assembler)._asm_OP_check_empty,
321
}
322

323
func (self *_Assembler) instr(v *_Instr) {
324
    if fn := _OpFuncTab[v.op()]; fn != nil {
325
        fn(self, v)
326
    } else {
327
        panic(fmt.Sprintf("invalid opcode: %d", v.op()))
328
    }
329
}
330

331
func (self *_Assembler) instrs() {
332
    for i, v := range self.p {
333
        self.Mark(i)
334
        self.instr(&v)
335
        self.debug_instr(i, &v)
336
    }
337
}
338

339
func (self *_Assembler) epilogue() {
340
    self.Mark(len(self.p))
341
    self.Emit("XORL", _EP, _EP)                     // XORL EP, EP
342
    self.Emit("MOVQ", _VAR_et, _ET)                 // MOVQ VAR_et, ET
343
    self.Emit("TESTQ", _ET, _ET)                    // TESTQ ET, ET
344
    self.Sjmp("JNZ", _LB_mismatch_error)            // JNZ _LB_mismatch_error
345
    self.Link(_LB_error)                            // _error:
346
    self.Emit("MOVQ", _IC, _RET_rc)                 // MOVQ IC, rc<>+40(FP)
347
    self.Emit("MOVQ", _ET, _RET_et)                 // MOVQ ET, et<>+48(FP)
348
    self.Emit("MOVQ", _EP, _RET_ep)                 // MOVQ EP, ep<>+56(FP)
349
    self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP)  // MOVQ _FP_offs(SP), BP
350
    self.Emit("ADDQ", jit.Imm(_FP_size), _SP)       // ADDQ $_FP_size, SP
351
    self.Emit("RET")                                // RET
352
}
353

354
func (self *_Assembler) prologue() {
355
    self.Emit("SUBQ", jit.Imm(_FP_size), _SP)       // SUBQ $_FP_size, SP
356
    self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs))  // MOVQ BP, _FP_offs(SP)
357
    self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP)  // LEAQ _FP_offs(SP), BP
358
    self.Emit("MOVQ", _ARG_sp, _IP)                 // MOVQ s.p<>+0(FP), IP
359
    self.Emit("MOVQ", _ARG_sl, _IL)                 // MOVQ s.l<>+8(FP), IL
360
    self.Emit("MOVQ", _ARG_ic, _IC)                 // MOVQ ic<>+16(FP), IC
361
    self.Emit("MOVQ", _ARG_vp, _VP)                 // MOVQ vp<>+24(FP), VP
362
    self.Emit("MOVQ", _ARG_sb, _ST)                 // MOVQ vp<>+32(FP), ST
363
    // initialize digital buffer first
364
    self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_st_Dc)    // MOVQ $_MaxDigitNums, ss.Dcap
365
    self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX)           // LEAQ _DbufOffset(ST), AX
366
    self.Emit("MOVQ", _AX, _VAR_st_Db)                          // MOVQ AX, ss.Dbuf
367
    self.Emit("XORL", _AX, _AX)                                 // XORL AX, AX
368
    self.Emit("MOVQ", _AX, _VAR_et)                          // MOVQ AX, ss.Dp
369
}
370

371
/** Function Calling Helpers **/
372

373
var _REG_go = []obj.Addr {
374
    _ST,
375
    _VP,
376
    _IP,
377
    _IL,
378
    _IC,
379
}
380

381
func (self *_Assembler) save(r ...obj.Addr) {
382
    for i, v := range r {
383
        if i > _FP_saves / 8 - 1 {
384
            panic("too many registers to save")
385
        } else {
386
            self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
387
        }
388
    }
389
}
390

391
func (self *_Assembler) load(r ...obj.Addr) {
392
    for i, v := range r {
393
        if i > _FP_saves / 8 - 1 {
394
            panic("too many registers to load")
395
        } else {
396
            self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
397
        }
398
    }
399
}
400

401
func (self *_Assembler) call(fn obj.Addr) {
402
    self.Emit("MOVQ", fn, _AX)  // MOVQ ${fn}, AX
403
    self.Rjmp("CALL", _AX)      // CALL AX
404
}
405

406
func (self *_Assembler) call_go(fn obj.Addr) {
407
    self.save(_REG_go...)   // SAVE $REG_go
408
    self.call(fn)           // CALL ${fn}
409
    self.load(_REG_go...)   // LOAD $REG_go
410
}
411

412
func (self *_Assembler) call_sf(fn obj.Addr) {
413
    self.Emit("LEAQ", _ARG_s, _DI)                      // LEAQ s<>+0(FP), DI
414
    self.Emit("MOVQ", _IC, _ARG_ic)                     // MOVQ IC, ic<>+16(FP)
415
    self.Emit("LEAQ", _ARG_ic, _SI)                     // LEAQ ic<>+16(FP), SI
416
    self.Emit("LEAQ", jit.Ptr(_ST, _FsmOffset), _DX)    // LEAQ _FsmOffset(ST), DX
417
    self.Emit("MOVQ", _ARG_fv, _CX)
418
    self.call(fn)                                       // CALL ${fn}
419
    self.Emit("MOVQ", _ARG_ic, _IC)                     // MOVQ ic<>+16(FP), IC
420
}
421

422
func (self *_Assembler) call_vf(fn obj.Addr) {
423
    self.Emit("LEAQ", _ARG_s, _DI)      // LEAQ s<>+0(FP), DI
424
    self.Emit("MOVQ", _IC, _ARG_ic)     // MOVQ IC, ic<>+16(FP)
425
    self.Emit("LEAQ", _ARG_ic, _SI)     // LEAQ ic<>+16(FP), SI
426
    self.Emit("LEAQ", _VAR_st, _DX)     // LEAQ st, DX
427
    self.call(fn)                       // CALL ${fn}
428
    self.Emit("MOVQ", _ARG_ic, _IC)     // MOVQ ic<>+16(FP), IC
429
}
430

431
/** Assembler Error Handlers **/
432

433
var (
434
    _F_convT64        = jit.Func(convT64)
435
    _F_error_wrap     = jit.Func(error_wrap)
436
    _F_error_type     = jit.Func(error_type)
437
    _F_error_field    = jit.Func(error_field)
438
    _F_error_value    = jit.Func(error_value)
439
    _F_error_mismatch = jit.Func(error_mismatch)
440
)
441

442
var (
443
    _I_int8    , _T_int8    = rtype(reflect.TypeOf(int8(0)))
444
    _I_int16   , _T_int16   = rtype(reflect.TypeOf(int16(0)))
445
    _I_int32   , _T_int32   = rtype(reflect.TypeOf(int32(0)))
446
    _I_uint8   , _T_uint8   = rtype(reflect.TypeOf(uint8(0)))
447
    _I_uint16  , _T_uint16  = rtype(reflect.TypeOf(uint16(0)))
448
    _I_uint32  , _T_uint32  = rtype(reflect.TypeOf(uint32(0)))
449
    _I_float32 , _T_float32 = rtype(reflect.TypeOf(float32(0)))
450
)
451

452
var (
453
    _T_error                    = rt.UnpackType(errorType)
454
    _I_base64_CorruptInputError = jit.Itab(_T_error, base64CorruptInputError)
455
)
456

457
var (
458
    _V_stackOverflow              = jit.Imm(int64(uintptr(unsafe.Pointer(&stackOverflow))))
459
    _I_json_UnsupportedValueError = jit.Itab(_T_error, reflect.TypeOf(new(json.UnsupportedValueError)))
460
    _I_json_MismatchTypeError     = jit.Itab(_T_error, reflect.TypeOf(new(MismatchTypeError)))
461
)
462

463
func (self *_Assembler) type_error() {
464
    self.Link(_LB_type_error)                   // _type_error:
465
    self.Emit("MOVQ", _ET, jit.Ptr(_SP, 0))     // MOVQ    ET, (SP)
466
    self.call_go(_F_error_type)                 // CALL_GO error_type
467
    self.Emit("MOVQ", jit.Ptr(_SP, 8), _ET)     // MOVQ    8(SP), ET
468
    self.Emit("MOVQ", jit.Ptr(_SP, 16), _EP)    // MOVQ    16(SP), EP
469
    self.Sjmp("JMP" , _LB_error)                // JMP     _error
470
}
471

472

473
func (self *_Assembler) mismatch_error() {
474
    self.Link(_LB_mismatch_error)                     // _type_error:
475
    self.Emit("MOVQ", _VAR_et, _ET)                   // MOVQ _VAR_et, ET
476
    self.Emit("MOVQ", _VAR_ic, _EP)                   // MOVQ _VAR_ic, EP
477
    self.Emit("MOVQ", _I_json_MismatchTypeError, _AX) // MOVQ _I_json_MismatchTypeError, AX
478
    self.Emit("CMPQ", _ET, _AX)                       // CMPQ ET, AX
479
    self.Sjmp("JE"  , _LB_error)                      // JE _LB_error
480
    self.Emit("MOVQ", _ARG_sp, _AX)
481
    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))     // MOVQ    AX, (SP)
482
    self.Emit("MOVQ", _ARG_sl, _CX)
483
    self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8))     // MOVQ    CX, 8(SP)
484
    self.Emit("MOVQ", _VAR_ic, _AX)
485
    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))    // MOVQ    AX, 16(SP)
486
    self.Emit("MOVQ", _VAR_et, _CX)
487
    self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24))    // MOVQ    CX, 24(SP)
488
    self.call_go(_F_error_mismatch)             // CALL_GO error_type
489
    self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET)    // MOVQ    32(SP), ET
490
    self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP)    // MOVQ    40(SP), EP
491
    self.Sjmp("JMP" , _LB_error)                // JMP     _error
492
}
493

494
func (self *_Assembler) _asm_OP_dismatch_err(p *_Instr) {
495
    self.Emit("MOVQ", _IC, _VAR_ic)  
496
    self.Emit("MOVQ", jit.Type(p.vt()), _ET)       
497
    self.Emit("MOVQ", _ET, _VAR_et)
498
}
499

500
func (self *_Assembler) _asm_OP_go_skip(p *_Instr) {
501
    self.Byte(0x4c, 0x8d, 0x0d)         // LEAQ (PC), R9
502
    self.Xref(p.vi(), 4)
503
    self.Emit("MOVQ", _R9, _VAR_pc)
504
    self.Sjmp("JMP"  , _LB_skip_one)            // JMP     _skip_one
505
}
506

507
func (self *_Assembler) skip_one() {
508
    self.Link(_LB_skip_one)                     // _skip:
509
    self.Emit("MOVQ", _VAR_ic, _IC)             // MOVQ    _VAR_ic, IC
510
    self.call_sf(_F_skip_one)                   // CALL_SF skip_one
511
    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
512
    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
513
    self.Emit("MOVQ" , _VAR_pc, _R9)            // MOVQ    pc, R9
514
    self.Rjmp("JMP"  , _R9)                     // JMP     (R9)
515
}
516

517

518
func (self *_Assembler) skip_key_value() {
519
    self.Link(_LB_skip_key_value)               // _skip:
520
    // skip the key
521
    self.Emit("MOVQ", _VAR_ic, _IC)             // MOVQ    _VAR_ic, IC
522
    self.call_sf(_F_skip_one)                   // CALL_SF skip_one
523
    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
524
    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
525
    // match char ':'
526
    self.lspace("_global_1")
527
    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(':'))
528
    self.Sjmp("JNE"  , _LB_parsing_error_v)     // JNE     _parse_error_v
529
    self.Emit("ADDQ", jit.Imm(1), _IC)          // ADDQ    $1, IC
530
    self.lspace("_global_2")
531
    // skip the value
532
    self.call_sf(_F_skip_one)                   // CALL_SF skip_one
533
    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
534
    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
535
    // jump back to specified address
536
    self.Emit("MOVQ" , _VAR_pc, _R9)            // MOVQ    pc, R9
537
    self.Rjmp("JMP"  , _R9)                     // JMP     (R9)
538
}
539

540
func (self *_Assembler) field_error() {
541
    self.Link(_LB_field_error)                  // _field_error:
542
    self.Emit("MOVOU", _VAR_sv, _X0)            // MOVOU   sv, X0
543
    self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0))    // MOVOU   X0, (SP)
544
    self.call_go(_F_error_field)                // CALL_GO error_field
545
    self.Emit("MOVQ" , jit.Ptr(_SP, 16), _ET)   // MOVQ    16(SP), ET
546
    self.Emit("MOVQ" , jit.Ptr(_SP, 24), _EP)   // MOVQ    24(SP), EP
547
    self.Sjmp("JMP"  , _LB_error)               // JMP     _error
548
}
549

550
func (self *_Assembler) range_error() {
551
    self.Link(_LB_range_error)                  // _range_error:
552
    self.slice_from(_VAR_st_Ep, 0)              // SLICE   st.Ep, $0
553
    self.Emit("MOVQ", _DI, jit.Ptr(_SP, 0))     // MOVQ    DI, (SP)
554
    self.Emit("MOVQ", _SI, jit.Ptr(_SP, 8))     // MOVQ    SI, 8(SP)
555
    self.Emit("MOVQ", _ET, jit.Ptr(_SP, 16))    // MOVQ    ET, 16(SP)
556
    self.Emit("MOVQ", _EP, jit.Ptr(_SP, 24))    // MOVQ    EP, 24(SP)
557
    self.call_go(_F_error_value)                // CALL_GO error_value
558
    self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET)    // MOVQ    32(SP), ET
559
    self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP)    // MOVQ    40(SP), EP
560
    self.Sjmp("JMP" , _LB_error)                // JMP     _error
561
}
562

563
func (self *_Assembler) stack_error() {
564
    self.Link(_LB_stack_error)                              // _stack_error:
565
    self.Emit("MOVQ", _V_stackOverflow, _EP)                // MOVQ ${_V_stackOverflow}, EP
566
    self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET)   // MOVQ ${_I_json_UnsupportedValueError}, ET
567
    self.Sjmp("JMP" , _LB_error)                            // JMP  _error
568
}
569

570
func (self *_Assembler) base64_error() {
571
    self.Link(_LB_base64_error)
572
    self.Emit("NEGQ", _AX)                                  // NEGQ    AX
573
    self.Emit("SUBQ", jit.Imm(1), _AX)                      // SUBQ    $1, AX
574
    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))                 // MOVQ    AX, (SP)
575
    self.call_go(_F_convT64)                                // CALL_GO convT64
576
    self.Emit("MOVQ", jit.Ptr(_SP, 8), _EP)                 // MOVQ    8(SP), EP
577
    self.Emit("MOVQ", _I_base64_CorruptInputError, _ET)     // MOVQ    ${itab(base64.CorruptInputError)}, ET
578
    self.Sjmp("JMP" , _LB_error)                            // JMP     _error
579
}
580

581
func (self *_Assembler) parsing_error() {
582
    self.Link(_LB_eof_error)                                            // _eof_error:
583
    self.Emit("MOVQ" , _IL, _IC)                                        // MOVQ    IL, IC
584
    self.Emit("MOVL" , jit.Imm(int64(types.ERR_EOF)), _EP)              // MOVL    ${types.ERR_EOF}, EP
585
    self.Sjmp("JMP"  , _LB_parsing_error)                               // JMP     _parsing_error
586
    self.Link(_LB_unquote_error)                                        // _unquote_error:
587
    self.Emit("SUBQ" , _VAR_sr, _SI)                                    // SUBQ    sr, SI
588
    self.Emit("SUBQ" , _SI, _IC)                                        // SUBQ    IL, IC
589
    self.Link(_LB_parsing_error_v)                                      // _parsing_error_v:
590
    self.Emit("MOVQ" , _AX, _EP)                                        // MOVQ    AX, EP
591
    self.Emit("NEGQ" , _EP)                                             // NEGQ    EP
592
    self.Sjmp("JMP"  , _LB_parsing_error)                               // JMP     _parsing_error
593
    self.Link(_LB_char_m3_error)                                        // _char_m3_error:
594
    self.Emit("SUBQ" , jit.Imm(1), _IC)                                 // SUBQ    $1, IC
595
    self.Link(_LB_char_m2_error)                                        // _char_m2_error:
596
    self.Emit("SUBQ" , jit.Imm(2), _IC)                                 // SUBQ    $2, IC
597
    self.Sjmp("JMP"  , _LB_char_0_error)                                // JMP     _char_0_error
598
    self.Link(_LB_im_error)                                             // _im_error:
599
    self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 0))                    // CMPB    CX, (IP)(IC)
600
    self.Sjmp("JNE"  , _LB_char_0_error)                                // JNE     _char_0_error
601
    self.Emit("SHRL" , jit.Imm(8), _CX)                                 // SHRL    $8, CX
602
    self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 1))                    // CMPB    CX, 1(IP)(IC)
603
    self.Sjmp("JNE"  , _LB_char_1_error)                                // JNE     _char_1_error
604
    self.Emit("SHRL" , jit.Imm(8), _CX)                                 // SHRL    $8, CX
605
    self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 2))                    // CMPB    CX, 2(IP)(IC)
606
    self.Sjmp("JNE"  , _LB_char_2_error)                                // JNE     _char_2_error
607
    self.Sjmp("JMP"  , _LB_char_3_error)                                // JNE     _char_3_error
608
    self.Link(_LB_char_4_error)                                         // _char_4_error:
609
    self.Emit("ADDQ" , jit.Imm(1), _IC)                                 // ADDQ    $1, IC
610
    self.Link(_LB_char_3_error)                                         // _char_3_error:
611
    self.Emit("ADDQ" , jit.Imm(1), _IC)                                 // ADDQ    $1, IC
612
    self.Link(_LB_char_2_error)                                         // _char_2_error:
613
    self.Emit("ADDQ" , jit.Imm(1), _IC)                                 // ADDQ    $1, IC
614
    self.Link(_LB_char_1_error)                                         // _char_1_error:
615
    self.Emit("ADDQ" , jit.Imm(1), _IC)                                 // ADDQ    $1, IC
616
    self.Link(_LB_char_0_error)                                         // _char_0_error:
617
    self.Emit("MOVL" , jit.Imm(int64(types.ERR_INVALID_CHAR)), _EP)     // MOVL    ${types.ERR_INVALID_CHAR}, EP
618
    self.Link(_LB_parsing_error)                                        // _parsing_error:
619
    self.Emit("MOVOU", _ARG_s, _X0)                                     // MOVOU   s, X0
620
    self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0))                            // MOVOU   X0, (SP)
621
    self.Emit("MOVQ" , _IC, jit.Ptr(_SP, 16))                           // MOVQ    IC, 16(SP)
622
    self.Emit("MOVQ" , _EP, jit.Ptr(_SP, 24))                           // MOVQ    EP, 24(SP)
623
    self.call_go(_F_error_wrap)                                         // CALL_GO error_wrap
624
    self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET)                           // MOVQ    32(SP), ET
625
    self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP)                           // MOVQ    40(SP), EP
626
    self.Sjmp("JMP"  , _LB_error)                                       // JMP     _error
627
}
628

629
/** Memory Management Routines **/
630

631
var (
632
    _T_byte     = jit.Type(byteType)
633
    _F_mallocgc = jit.Func(mallocgc)
634
)
635

636
func (self *_Assembler) malloc(nb obj.Addr, ret obj.Addr) {
637
    self.Emit("XORL", _AX, _AX)                 // XORL    AX, AX
638
    self.Emit("MOVQ", _T_byte, _CX)             // MOVQ    ${type(byte)}, CX
639
    self.Emit("MOVQ", nb, jit.Ptr(_SP, 0))      // MOVQ    ${nb}, (SP)
640
    self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8))     // MOVQ    CX, 8(SP)
641
    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))    // MOVQ    AX, 16(SP)
642
    self.call_go(_F_mallocgc)                   // CALL_GO mallocgc
643
    self.Emit("MOVQ", jit.Ptr(_SP, 24), ret)    // MOVQ    24(SP), ${ret}
644
}
645

646
func (self *_Assembler) valloc(vt reflect.Type, ret obj.Addr) {
647
    self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX)   // MOVQ    ${vt.Size()}, AX
648
    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))             // MOVQ    AX, (SP)
649
    self.Emit("MOVQ", jit.Type(vt), _AX)                // MOVQ    ${vt}, AX
650
    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))             // MOVQ    AX, 8(SP)
651
    self.Emit("MOVB", jit.Imm(1), jit.Ptr(_SP, 16))     // MOVB    $1, 16(SP)
652
    self.call_go(_F_mallocgc)                           // CALL_GO mallocgc
653
    self.Emit("MOVQ", jit.Ptr(_SP, 24), ret)            // MOVQ    24(SP), ${ret}
654
}
655

656
func (self *_Assembler) vfollow(vt reflect.Type) {
657
    self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX)    // MOVQ   (VP), AX
658
    self.Emit("TESTQ", _AX, _AX)                // TESTQ  AX, AX
659
    self.Sjmp("JNZ"  , "_end_{n}")              // JNZ    _end_{n}
660
    self.valloc(vt, _AX)                        // VALLOC ${vt}, AX
661
    self.WritePtrAX(1, jit.Ptr(_VP, 0), false)    // MOVQ   AX, (VP)
662
    self.Link("_end_{n}")                       // _end_{n}:
663
    self.Emit("MOVQ" , _AX, _VP)                // MOVQ   AX, VP
664
}
665

666
/** Value Parsing Routines **/
667

668
var (
669
    _F_vstring   = jit.Imm(int64(native.S_vstring))
670
    _F_vnumber   = jit.Imm(int64(native.S_vnumber))
671
    _F_vsigned   = jit.Imm(int64(native.S_vsigned))
672
    _F_vunsigned = jit.Imm(int64(native.S_vunsigned))
673
)
674

675
func (self *_Assembler) check_err(vt reflect.Type, pin string, pin2 int) {
676
    self.Emit("MOVQ" , _VAR_st_Vt, _AX)         // MOVQ st.Vt, AX
677
    self.Emit("TESTQ", _AX, _AX)                // CMPQ AX, ${native.V_STRING}
678
    // try to skip the value
679
    if vt != nil {
680
        self.Sjmp("JNS" , "_check_err_{n}")        // JNE  _parsing_error_v
681
        self.Emit("MOVQ", jit.Type(vt), _ET)         
682
        self.Emit("MOVQ", _ET, _VAR_et)
683
        if pin2 != -1 {
684
            self.Emit("SUBQ", jit.Imm(1), _BP)
685
            self.Emit("MOVQ", _BP, _VAR_ic)
686
            self.Byte(0x4c  , 0x8d, 0x0d)         // LEAQ (PC), R9
687
            self.Xref(pin2, 4)
688
            self.Emit("MOVQ", _R9, _VAR_pc)
689
            self.Sjmp("JMP" , _LB_skip_key_value)
690
        } else {
691
            self.Emit("MOVQ", _BP, _VAR_ic)
692
            self.Byte(0x4c  , 0x8d, 0x0d)         // LEAQ (PC), R9
693
            self.Sref(pin, 4)
694
            self.Emit("MOVQ", _R9, _VAR_pc)
695
            self.Sjmp("JMP" , _LB_skip_one)
696
        }
697
        self.Link("_check_err_{n}")
698
    } else {
699
        self.Sjmp("JS"   , _LB_parsing_error_v)     // JNE  _parsing_error_v
700
    }
701
}
702

703
func (self *_Assembler) check_eof(d int64) {
704
    if d == 1 {
705
        self.Emit("CMPQ", _IC, _IL)         // CMPQ IC, IL
706
        self.Sjmp("JAE" , _LB_eof_error)    // JAE  _eof_error
707
    } else {
708
        self.Emit("LEAQ", jit.Ptr(_IC, d), _AX)     // LEAQ ${d}(IC), AX
709
        self.Emit("CMPQ", _AX, _IL)                 // CMPQ AX, IL
710
        self.Sjmp("JA"  , _LB_eof_error)            // JA   _eof_error
711
    }
712
}
713

714
func (self *_Assembler) parse_string() {    // parse_string has a validate flag params in the last
715
    self.Emit("MOVQ", _ARG_fv, _CX)
716
    self.call_vf(_F_vstring)
717
    self.check_err(nil, "", -1)
718
}
719

720
func (self *_Assembler) parse_number(vt reflect.Type, pin string, pin2 int) {
721
    self.Emit("MOVQ", _IC, _BP)
722
    self.call_vf(_F_vnumber)                               // call  vnumber
723
    self.check_err(vt, pin, pin2)
724
}
725

726
func (self *_Assembler) parse_signed(vt reflect.Type, pin string, pin2 int) {
727
    self.Emit("MOVQ", _IC, _BP)
728
    self.call_vf(_F_vsigned)
729
    self.check_err(vt, pin, pin2)
730
}
731

732
func (self *_Assembler) parse_unsigned(vt reflect.Type, pin string, pin2 int) {
733
    self.Emit("MOVQ", _IC, _BP)
734
    self.call_vf(_F_vunsigned)
735
    self.check_err(vt, pin, pin2)
736
}
737

738
// Pointer: DI, Size: SI, Return: R9  
739
func (self *_Assembler) copy_string() {
740
    self.Link("_copy_string")
741
    self.Emit("MOVQ", _DI, _VAR_bs_p)
742
    self.Emit("MOVQ", _SI, _VAR_bs_n)
743
    self.Emit("MOVQ", _R9, _VAR_bs_LR)
744
    self.malloc(_SI, _AX)                              
745
    self.Emit("MOVQ", _AX, _VAR_sv_p)                    
746
    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))                    
747
    self.Emit("MOVQ", _VAR_bs_p, _DI)
748
    self.Emit("MOVQ", _DI, jit.Ptr(_SP, 8))
749
    self.Emit("MOVQ", _VAR_bs_n, _SI)
750
    self.Emit("MOVQ", _SI, jit.Ptr(_SP, 16))
751
    self.call_go(_F_memmove)
752
    self.Emit("MOVQ", _VAR_sv_p, _DI)
753
    self.Emit("MOVQ", _VAR_bs_n, _SI)
754
    self.Emit("MOVQ", _VAR_bs_LR, _R9)
755
    self.Rjmp("JMP", _R9)
756
}
757

758
// Pointer: DI, Size: SI, Return: R9
759
func (self *_Assembler) escape_string() {
760
    self.Link("_escape_string")
761
    self.Emit("MOVQ" , _DI, _VAR_bs_p)
762
    self.Emit("MOVQ" , _SI, _VAR_bs_n)
763
    self.Emit("MOVQ" , _R9, _VAR_bs_LR)
764
    self.malloc(_SI, _DX)                                    // MALLOC SI, DX
765
    self.Emit("MOVQ" , _DX, _VAR_sv_p)
766
    self.Emit("MOVQ" , _VAR_bs_p, _DI)
767
    self.Emit("MOVQ" , _VAR_bs_n, _SI)                                  
768
    self.Emit("LEAQ" , _VAR_sr, _CX)                            // LEAQ   sr, CX
769
    self.Emit("XORL" , _R8, _R8)                                // XORL   R8, R8
770
    self.Emit("BTQ"  , jit.Imm(_F_disable_urc), _ARG_fv)        // BTQ    ${_F_disable_urc}, fv
771
    self.Emit("SETCC", _R8)                                     // SETCC  R8
772
    self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8)   // SHLQ   ${types.B_UNICODE_REPLACE}, R8
773
    self.call(_F_unquote)                                       // CALL   unquote
774
    self.Emit("MOVQ" , _VAR_bs_n, _SI)                                  // MOVQ   ${n}, SI
775
    self.Emit("ADDQ" , jit.Imm(1), _SI)                         // ADDQ   $1, SI
776
    self.Emit("TESTQ", _AX, _AX)                                // TESTQ  AX, AX
777
    self.Sjmp("JS"   , _LB_unquote_error)                       // JS     _unquote_error
778
    self.Emit("MOVQ" , _AX, _SI)
779
    self.Emit("MOVQ" , _VAR_sv_p, _DI)
780
    self.Emit("MOVQ" , _VAR_bs_LR, _R9)
781
    self.Rjmp("JMP", _R9)
782
}
783

784
func (self *_Assembler) escape_string_twice() {
785
    self.Link("_escape_string_twice")
786
    self.Emit("MOVQ" , _DI, _VAR_bs_p)
787
    self.Emit("MOVQ" , _SI, _VAR_bs_n)
788
    self.Emit("MOVQ" , _R9, _VAR_bs_LR)
789
    self.malloc(_SI, _DX)                                        // MALLOC SI, DX
790
    self.Emit("MOVQ" , _DX, _VAR_sv_p)
791
    self.Emit("MOVQ" , _VAR_bs_p, _DI)
792
    self.Emit("MOVQ" , _VAR_bs_n, _SI)        
793
    self.Emit("LEAQ" , _VAR_sr, _CX)                                // LEAQ   sr, CX
794
    self.Emit("MOVL" , jit.Imm(types.F_DOUBLE_UNQUOTE), _R8)        // MOVL   ${types.F_DOUBLE_UNQUOTE}, R8
795
    self.Emit("BTQ"  , jit.Imm(_F_disable_urc), _ARG_fv)            // BTQ    ${_F_disable_urc}, AX
796
    self.Emit("XORL" , _AX, _AX)                                    // XORL   AX, AX
797
    self.Emit("SETCC", _AX)                                         // SETCC  AX
798
    self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _AX)       // SHLQ   ${types.B_UNICODE_REPLACE}, AX
799
    self.Emit("ORQ"  , _AX, _R8)                                    // ORQ    AX, R8
800
    self.call(_F_unquote)                                           // CALL   unquote
801
    self.Emit("MOVQ" , _VAR_bs_n, _SI)                              // MOVQ   ${n}, SI
802
    self.Emit("ADDQ" , jit.Imm(3), _SI)                             // ADDQ   $3, SI
803
    self.Emit("TESTQ", _AX, _AX)                                    // TESTQ  AX, AX
804
    self.Sjmp("JS"   , _LB_unquote_error)                           // JS     _unquote_error
805
    self.Emit("MOVQ" , _AX, _SI)
806
    self.Emit("MOVQ" , _VAR_sv_p, _DI)
807
    self.Emit("MOVQ" , _VAR_bs_LR, _R9)
808
    self.Rjmp("JMP", _R9)
809
}
810

811
/** Range Checking Routines **/
812

813
var (
814
    _V_max_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_max_f32))))
815
    _V_min_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_min_f32))))
816
)
817

818
var (
819
    _Vp_max_f32 = new(float32)
820
    _Vp_min_f32 = new(float32)
821
)
822

823
func init() {
824
    *_Vp_max_f32 = math.MaxFloat32
825
    *_Vp_min_f32 = -math.MaxFloat32
826
}
827

828
func (self *_Assembler) range_single() {
829
    self.Emit("CVTSD2SS", _VAR_st_Dv, _X0)              // CVTSD2SS st.Dv, X0
830
    self.Emit("MOVQ"    , _V_max_f32, _AX)              // MOVQ     _max_f32, AX
831
    self.Emit("MOVQ"    , jit.Gitab(_I_float32), _ET)   // MOVQ     ${itab(float32)}, ET
832
    self.Emit("MOVQ"    , jit.Gtype(_T_float32), _EP)   // MOVQ     ${type(float32)}, EP
833
    self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0)         // UCOMISS  (AX), X0
834
    self.Sjmp("JA"      , _LB_range_error)              // JA       _range_error
835
    self.Emit("MOVQ"    , _V_min_f32, _AX)              // MOVQ     _min_f32, AX
836
    self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0)         // UCOMISS  (AX), X0
837
    self.Sjmp("JB"      , _LB_range_error)              // JB       _range_error
838
}
839

840
func (self *_Assembler) range_signed(i *rt.GoItab, t *rt.GoType, a int64, b int64) {
841
    self.Emit("MOVQ", _VAR_st_Iv, _AX)      // MOVQ st.Iv, AX
842
    self.Emit("MOVQ", jit.Gitab(i), _ET)    // MOVQ ${i}, ET
843
    self.Emit("MOVQ", jit.Gtype(t), _EP)    // MOVQ ${t}, EP
844
    self.Emit("CMPQ", _AX, jit.Imm(a))      // CMPQ AX, ${a}
845
    self.Sjmp("JL"  , _LB_range_error)      // JL   _range_error
846
    self.Emit("CMPQ", _AX, jit.Imm(b))      // CMPQ AX, ${B}
847
    self.Sjmp("JG"  , _LB_range_error)      // JG   _range_error
848
}
849

850
func (self *_Assembler) range_unsigned(i *rt.GoItab, t *rt.GoType, v uint64) {
851
    self.Emit("MOVQ" , _VAR_st_Iv, _AX)         // MOVQ  st.Iv, AX
852
    self.Emit("MOVQ" , jit.Gitab(i), _ET)       // MOVQ  ${i}, ET
853
    self.Emit("MOVQ" , jit.Gtype(t), _EP)       // MOVQ  ${t}, EP
854
    self.Emit("TESTQ", _AX, _AX)                // TESTQ AX, AX
855
    self.Sjmp("JS"   , _LB_range_error)         // JS    _range_error
856
    self.Emit("CMPQ" , _AX, jit.Imm(int64(v)))  // CMPQ  AX, ${a}
857
    self.Sjmp("JA"   , _LB_range_error)         // JA    _range_error
858
}
859

860
/** String Manipulating Routines **/
861

862
var (
863
    _F_unquote = jit.Imm(int64(native.S_unquote))
864
)
865

866
func (self *_Assembler) slice_from(p obj.Addr, d int64) {
867
    self.Emit("MOVQ", p, _SI)   // MOVQ    ${p}, SI
868
    self.slice_from_r(_SI, d)   // SLICE_R SI, ${d}
869
}
870

871
func (self *_Assembler) slice_from_r(p obj.Addr, d int64) {
872
    self.Emit("LEAQ", jit.Sib(_IP, p, 1, 0), _DI)   // LEAQ (IP)(${p}), DI
873
    self.Emit("NEGQ", p)                            // NEGQ ${p}
874
    self.Emit("LEAQ", jit.Sib(_IC, p, 1, d), _SI)   // LEAQ d(IC)(${p}), SI
875
}
876

877
func (self *_Assembler) unquote_once(p obj.Addr, n obj.Addr, stack bool, copy bool) {
878
    self.slice_from(_VAR_st_Iv, -1)                             // SLICE  st.Iv, $-1
879
    self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1))                 // CMPQ   st.Ep, $-1
880
    self.Sjmp("JE"   , "_noescape_{n}")                         // JE     _noescape_{n}
881
    self.Byte(0x4c, 0x8d, 0x0d)                                 // LEAQ (PC), R9
882
    self.Sref("_unquote_once_write_{n}", 4)
883
    self.Sjmp("JMP" , "_escape_string")
884
    self.Link("_noescape_{n}")                                  // _noescape_{n}:
885
    if copy {
886
        self.Emit("BTQ"  , jit.Imm(_F_copy_string), _ARG_fv)    
887
        self.Sjmp("JNC", "_unquote_once_write_{n}")
888
        self.Byte(0x4c, 0x8d, 0x0d)                             // LEAQ (PC), R9
889
        self.Sref("_unquote_once_write_{n}", 4)
890
        self.Sjmp("JMP", "_copy_string")
891
    }
892
    self.Link("_unquote_once_write_{n}")
893
    self.Emit("MOVQ" , _SI, n)                                  // MOVQ   SI, ${n}
894
    if stack {
895
        self.Emit("MOVQ", _DI, p) 
896
    } else {
897
        self.WriteRecNotAX(10, _DI, p, false, false)
898
    }
899
}
900

901
func (self *_Assembler) unquote_twice(p obj.Addr, n obj.Addr, stack bool) {
902
    self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1))                     // CMPQ   st.Ep, $-1
903
    self.Sjmp("JE"   , _LB_eof_error)                               // JE     _eof_error
904
    self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -3), jit.Imm('\\'))     // CMPB   -3(IP)(IC), $'\\'
905
    self.Sjmp("JNE"  , _LB_char_m3_error)                           // JNE    _char_m3_error
906
    self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -2), jit.Imm('"'))      // CMPB   -2(IP)(IC), $'"'
907
    self.Sjmp("JNE"  , _LB_char_m2_error)                           // JNE    _char_m2_error
908
    self.slice_from(_VAR_st_Iv, -3)                                 // SLICE  st.Iv, $-3
909
    self.Emit("MOVQ" , _SI, _AX)                                    // MOVQ   SI, AX
910
    self.Emit("ADDQ" , _VAR_st_Iv, _AX)                             // ADDQ   st.Iv, AX
911
    self.Emit("CMPQ" , _VAR_st_Ep, _AX)                             // CMPQ   st.Ep, AX
912
    self.Sjmp("JE"   , "_noescape_{n}")                             // JE     _noescape_{n}
913
    self.Byte(0x4c, 0x8d, 0x0d)                                     // LEAQ (PC), R9
914
    self.Sref("_unquote_twice_write_{n}", 4)
915
    self.Sjmp("JMP" , "_escape_string_twice")
916
    self.Link("_noescape_{n}")                                      // _noescape_{n}:
917
    self.Emit("BTQ"  , jit.Imm(_F_copy_string), _ARG_fv)    
918
    self.Sjmp("JNC", "_unquote_twice_write_{n}") 
919
    self.Byte(0x4c, 0x8d, 0x0d)                                     // LEAQ (PC), R9
920
    self.Sref("_unquote_twice_write_{n}", 4)
921
    self.Sjmp("JMP", "_copy_string")
922
    self.Link("_unquote_twice_write_{n}")
923
    self.Emit("MOVQ" , _SI, n)                                      // MOVQ   SI, ${n}
924
    if stack {
925
        self.Emit("MOVQ", _DI, p) 
926
    } else {
927
        self.WriteRecNotAX(12, _DI, p, false, false)
928
    }
929
}
930

931
/** Memory Clearing Routines **/
932

933
var (
934
    _F_memclrHasPointers    = jit.Func(memclrHasPointers)
935
    _F_memclrNoHeapPointers = jit.Func(memclrNoHeapPointers)
936
)
937

938
func (self *_Assembler) mem_clear_fn(ptrfree bool) {
939
    if !ptrfree {
940
        self.call_go(_F_memclrHasPointers)
941
    } else {
942
        self.call_go(_F_memclrNoHeapPointers)
943
    }
944
}
945

946
func (self *_Assembler) mem_clear_rem(size int64, ptrfree bool) {
947
    self.Emit("MOVQ", jit.Imm(size), _CX)               // MOVQ    ${size}, CX
948
    self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)             // MOVQ    (ST), AX
949
    self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _AX)     // MOVQ    (ST)(AX), AX
950
    self.Emit("SUBQ", _VP, _AX)                         // SUBQ    VP, AX
951
    self.Emit("ADDQ", _AX, _CX)                         // ADDQ    AX, CX
952
    self.Emit("MOVQ", _VP, jit.Ptr(_SP, 0))             // MOVQ    VP, (SP)
953
    self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8))             // MOVQ    CX, 8(SP)
954
    self.mem_clear_fn(ptrfree)                          // CALL_GO memclr{Has,NoHeap}Pointers
955
}
956

957
/** Map Assigning Routines **/
958

959
var (
960
    _F_mapassign           = jit.Func(mapassign)
961
    _F_mapassign_fast32    = jit.Func(mapassign_fast32)
962
    _F_mapassign_faststr   = jit.Func(mapassign_faststr)
963
    _F_mapassign_fast64ptr = jit.Func(mapassign_fast64ptr)
964
)
965

966
var (
967
    _F_decodeJsonUnmarshaler obj.Addr
968
    _F_decodeTextUnmarshaler obj.Addr
969
)
970

971
func init() {
972
    _F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler)
973
    _F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler)
974
}
975

976
func (self *_Assembler) mapaccess_ptr(t reflect.Type) {
977
    if rt.MapType(rt.UnpackType(t)).IndirectElem() {
978
        self.vfollow(t.Elem())
979
    }
980
}
981

982
func (self *_Assembler) mapassign_std(t reflect.Type, v obj.Addr) {
983
    self.Emit("LEAQ", v, _AX)               // LEAQ      ${v}, AX
984
    self.mapassign_call(t, _F_mapassign)    // MAPASSIGN ${t}, mapassign
985
}
986

987
func (self *_Assembler) mapassign_str_fast(t reflect.Type, p obj.Addr, n obj.Addr) {
988
    self.Emit("MOVQ", jit.Type(t), _AX)         // MOVQ    ${t}, AX
989
    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))     // MOVQ    AX, (SP)
990
    self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8))     // MOVQ    VP, 8(SP)
991
    self.Emit("MOVQ", p, jit.Ptr(_SP, 16))      // MOVQ    ${p}, 16(SP)
992
    self.Emit("MOVQ", n, jit.Ptr(_SP, 24))      // MOVQ    ${n}, 24(SP)
993
    self.call_go(_F_mapassign_faststr)          // CALL_GO ${fn}
994
    self.Emit("MOVQ", jit.Ptr(_SP, 32), _VP)    // MOVQ    32(SP), VP
995
    self.mapaccess_ptr(t)
996
}
997

998
func (self *_Assembler) mapassign_call(t reflect.Type, fn obj.Addr) {
999
    self.Emit("MOVQ", jit.Type(t), _SI)         // MOVQ    ${t}, SI
1000
    self.Emit("MOVQ", _SI, jit.Ptr(_SP, 0))     // MOVQ    SI, (SP)
1001
    self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8))     // MOVQ    VP, 8(SP)
1002
    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))    // MOVQ    AX, 16(SP)
1003
    self.call_go(fn)                            // CALL_GO ${fn}
1004
    self.Emit("MOVQ", jit.Ptr(_SP, 24), _VP)    // MOVQ    24(SP), VP
1005
}
1006

1007
func (self *_Assembler) mapassign_fastx(t reflect.Type, fn obj.Addr) {
1008
    self.mapassign_call(t, fn)
1009
    self.mapaccess_ptr(t)
1010
}
1011

1012
func (self *_Assembler) mapassign_utext(t reflect.Type, addressable bool) {
1013
    pv := false
1014
    vk := t.Key()
1015
    tk := t.Key()
1016

1017
    /* deref pointer if needed */
1018
    if vk.Kind() == reflect.Ptr {
1019
        pv = true
1020
        vk = vk.Elem()
1021
    }
1022

1023
    /* addressable value with pointer receiver */
1024
    if addressable {
1025
        pv = false
1026
        tk = reflect.PtrTo(tk)
1027
    }
1028

1029
    /* allocate the key, and call the unmarshaler */
1030
    self.valloc(vk, _DI)                        // VALLOC  ${vk}, DI
1031
    // must spill vk pointer since next call_go may invoke GC
1032
    self.Emit("MOVQ" , _DI, _VAR_vk)
1033
    self.Emit("MOVQ" , jit.Type(tk), _AX)       // MOVQ    ${tk}, AX
1034
    self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))    // MOVQ    AX, (SP)
1035
    self.Emit("MOVQ" , _DI, jit.Ptr(_SP, 8))    // MOVQ    DI, 8(SP)
1036
    self.Emit("MOVOU", _VAR_sv, _X0)            // MOVOU   sv, X0
1037
    self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16))   // MOVOU   X0, 16(SP)
1038
    self.call_go(_F_decodeTextUnmarshaler)      // CALL_GO decodeTextUnmarshaler
1039
    self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET)   // MOVQ    32(SP), ET
1040
    self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP)   // MOVQ    40(SP), EP
1041
    self.Emit("TESTQ", _ET, _ET)                // TESTQ   ET, ET
1042
    self.Sjmp("JNZ"  , _LB_error)               // JNZ     _error
1043
    self.Emit("MOVQ" , _VAR_vk, _AX)
1044

1045
    /* select the correct assignment function */
1046
    if !pv {
1047
        self.mapassign_call(t, _F_mapassign)
1048
    } else {
1049
        self.mapassign_fastx(t, _F_mapassign_fast64ptr)
1050
    }
1051
}
1052

1053
/** External Unmarshaler Routines **/
1054

1055
var (
1056
    _F_skip_one = jit.Imm(int64(native.S_skip_one))
1057
    _F_skip_number = jit.Imm(int64(native.S_skip_number))
1058
)
1059

1060
func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) {
1061
    self.call_sf(_F_skip_one)                                   // CALL_SF   skip_one
1062
    self.Emit("TESTQ", _AX, _AX)                                // TESTQ     AX, AX
1063
    self.Sjmp("JS"   , _LB_parsing_error_v)                     // JS        _parse_error_v
1064
    self.slice_from_r(_AX, 0)                                   // SLICE_R   AX, $0
1065
    self.Emit("MOVQ" , _DI, _VAR_sv_p)                          // MOVQ      DI, sv.p
1066
    self.Emit("MOVQ" , _SI, _VAR_sv_n)                          // MOVQ      SI, sv.n
1067
    self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref)     // UNMARSHAL json, ${t}, ${deref}
1068
}
1069

1070
func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) {
1071
    self.parse_string()                                         // PARSE     STRING
1072
    self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true)        // UNQUOTE   once, sv.p, sv.n
1073
    self.unmarshal_func(t, _F_decodeTextUnmarshaler, deref)     // UNMARSHAL text, ${t}, ${deref}
1074
}
1075

1076
func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool) {
1077
    pt := t
1078
    vk := t.Kind()
1079

1080
    /* allocate the field if needed */
1081
    if deref && vk == reflect.Ptr {
1082
        self.Emit("MOVQ" , _VP, _AX)                // MOVQ   VP, AX
1083
        self.Emit("MOVQ" , jit.Ptr(_AX, 0), _AX)    // MOVQ   (AX), AX
1084
        self.Emit("TESTQ", _AX, _AX)                // TESTQ  AX, AX
1085
        self.Sjmp("JNZ"  , "_deref_{n}")            // JNZ    _deref_{n}
1086
        self.valloc(t.Elem(), _AX)                  // VALLOC ${t.Elem()}, AX
1087
        self.WritePtrAX(3, jit.Ptr(_VP, 0), false)    // MOVQ   AX, (VP)
1088
        self.Link("_deref_{n}")                     // _deref_{n}:
1089
    }
1090

1091
    /* set value type */
1092
    self.Emit("MOVQ", jit.Type(pt), _CX)        // MOVQ ${pt}, CX
1093
    self.Emit("MOVQ", _CX, jit.Ptr(_SP, 0))     // MOVQ CX, (SP)
1094

1095
    /* set value pointer */
1096
    if deref && vk == reflect.Ptr {
1097
        self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))     // MOVQ AX, 8(SP)
1098
    } else {
1099
        self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8))     // MOVQ VP, 8(SP)
1100
    }
1101

1102
    /* set the source string and call the unmarshaler */
1103
    self.Emit("MOVOU", _VAR_sv, _X0)            // MOVOU   sv, X0
1104
    self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16))   // MOVOU   X0, 16(SP)
1105
    self.call_go(fn)                            // CALL_GO ${fn}
1106
    self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET)   // MOVQ    32(SP), ET
1107
    self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP)   // MOVQ    40(SP), EP
1108
    self.Emit("TESTQ", _ET, _ET)                // TESTQ   ET, ET
1109
    self.Sjmp("JNZ"  , _LB_error)               // JNZ     _error
1110
}
1111

1112
/** Dynamic Decoding Routine **/
1113

1114
var (
1115
    _F_decodeTypedPointer obj.Addr
1116
)
1117

1118
func init() {
1119
    _F_decodeTypedPointer = jit.Func(decodeTypedPointer)
1120
}
1121

1122
func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) {
1123
    self.Emit("MOVQ" , _ARG_fv, _CX)            // MOVQ    fv, CX
1124
    self.Emit("MOVOU", _ARG_sp, _X0)            // MOVOU   sp, X0
1125
    self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0))    // MOVOU   X0, (SP)
1126
    self.Emit("MOVQ" , _IC, jit.Ptr(_SP, 16))   // MOVQ    IC, 16(SP)
1127
    self.Emit("MOVQ" , vt, jit.Ptr(_SP, 24))    // MOVQ    ${vt}, 24(SP)
1128
    self.Emit("MOVQ" , vp, jit.Ptr(_SP, 32))    // MOVQ    ${vp}, 32(SP)
1129
    self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 40))   // MOVQ    ST, 40(SP)
1130
    self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 48))   // MOVQ    CX, 48(SP)
1131
    self.call_go(_F_decodeTypedPointer)         // CALL_GO decodeTypedPointer
1132
    self.Emit("MOVQ" , jit.Ptr(_SP, 64), _ET)   // MOVQ    64(SP), ET
1133
    self.Emit("MOVQ" , jit.Ptr(_SP, 72), _EP)   // MOVQ    72(SP), EP
1134
    self.Emit("MOVQ" , jit.Ptr(_SP, 56), _IC)   // MOVQ    56(SP), IC
1135
    self.Emit("TESTQ", _ET, _ET)                // TESTQ   ET, ET
1136
    self.Sjmp("JE", "_decode_dynamic_end_{n}")  // JE, _decode_dynamic_end_{n}
1137
    self.Emit("MOVQ", _I_json_MismatchTypeError, _AX) // MOVQ _I_json_MismatchTypeError, AX
1138
    self.Emit("CMPQ",  _ET, _AX)                // CMPQ ET, AX
1139
    self.Sjmp("JNE" , _LB_error)                // JNE  LB_error
1140
    self.Emit("MOVQ", _EP, _VAR_ic)             // MOVQ EP, VAR_ic
1141
    self.Emit("MOVQ", _ET, _VAR_et)             // MOVQ ET, VAR_et
1142
    self.Link("_decode_dynamic_end_{n}")
1143
    
1144
}
1145

1146
/** OpCode Assembler Functions **/
1147

1148
var (
1149
    _F_memequal         = jit.Func(memequal)
1150
    _F_memmove          = jit.Func(memmove)
1151
    _F_growslice        = jit.Func(growslice)
1152
    _F_makeslice        = jit.Func(makeslice)
1153
    _F_makemap_small    = jit.Func(makemap_small)
1154
    _F_mapassign_fast64 = jit.Func(mapassign_fast64)
1155
)
1156

1157
var (
1158
    _F_lspace  = jit.Imm(int64(native.S_lspace))
1159
    _F_strhash = jit.Imm(int64(caching.S_strhash))
1160
)
1161

1162
var (
1163
    _F_b64decode   = jit.Imm(int64(_subr__b64decode))
1164
    _F_decodeValue = jit.Imm(int64(_subr_decode_value))
1165
)
1166

1167
var (
1168
    _F_skip_array  = jit.Imm(int64(native.S_skip_array))
1169
    _F_skip_object = jit.Imm(int64(native.S_skip_object))
1170
)
1171

1172
var (
1173
    _F_FieldMap_GetCaseInsensitive obj.Addr
1174
    _Empty_Slice = make([]byte, 0)
1175
    _Zero_Base = int64(uintptr(((*rt.GoSlice)(unsafe.Pointer(&_Empty_Slice))).Ptr))
1176
)
1177

1178
const (
1179
    _MODE_AVX2 = 1 << 2
1180
)
1181

1182
const (
1183
    _Fe_ID   = int64(unsafe.Offsetof(caching.FieldEntry{}.ID))
1184
    _Fe_Name = int64(unsafe.Offsetof(caching.FieldEntry{}.Name))
1185
    _Fe_Hash = int64(unsafe.Offsetof(caching.FieldEntry{}.Hash))
1186
)
1187

1188
const (
1189
    _Vk_Ptr       = int64(reflect.Ptr)
1190
    _Gt_KindFlags = int64(unsafe.Offsetof(rt.GoType{}.KindFlags))
1191
)
1192

1193
func init() {
1194
    _F_FieldMap_GetCaseInsensitive = jit.Func((*caching.FieldMap).GetCaseInsensitive)
1195
}
1196

1197
func (self *_Assembler) _asm_OP_any(_ *_Instr) {
1198
    self.Emit("MOVQ"   , jit.Ptr(_VP, 8), _CX)              // MOVQ    8(VP), CX
1199
    self.Emit("TESTQ"  , _CX, _CX)                          // TESTQ   CX, CX
1200
    self.Sjmp("JZ"     , "_decode_{n}")                     // JZ      _decode_{n}
1201
    self.Emit("CMPQ"   , _CX, _VP)                          // CMPQ    CX, VP
1202
    self.Sjmp("JE"     , "_decode_{n}")                     // JE      _decode_{n}
1203
    self.Emit("MOVQ"   , jit.Ptr(_VP, 0), _AX)              // MOVQ    (VP), AX
1204
    self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX)  // MOVBLZX _Gt_KindFlags(AX), DX
1205
    self.Emit("ANDL"   , jit.Imm(rt.F_kind_mask), _DX)      // ANDL    ${F_kind_mask}, DX
1206
    self.Emit("CMPL"   , _DX, jit.Imm(_Vk_Ptr))             // CMPL    DX, ${reflect.Ptr}
1207
    self.Sjmp("JNE"    , "_decode_{n}")                     // JNE     _decode_{n}
1208
    self.Emit("LEAQ"   , jit.Ptr(_VP, 8), _DI)              // LEAQ    8(VP), DI
1209
    self.decode_dynamic(_AX, _DI)                           // DECODE  AX, DI
1210
    self.Sjmp("JMP"    , "_decode_end_{n}")                 // JMP     _decode_end_{n}
1211
    self.Link("_decode_{n}")                                // _decode_{n}:
1212
    self.Emit("MOVQ"   , _ARG_fv, _DF)                      // MOVQ    fv, DF
1213
    self.Emit("MOVQ"   , _ST, jit.Ptr(_SP, 0))              // MOVQ    _ST, (SP)
1214
    self.call(_F_decodeValue)                               // CALL    decodeValue
1215
    self.Emit("TESTQ"  , _EP, _EP)                          // TESTQ   EP, EP
1216
    self.Sjmp("JNZ"    , _LB_parsing_error)                 // JNZ     _parsing_error
1217
    self.Link("_decode_end_{n}")                            // _decode_end_{n}:
1218
}
1219

1220
func (self *_Assembler) _asm_OP_dyn(p *_Instr) {
1221
    self.Emit("MOVQ"   , jit.Type(p.vt()), _ET)             // MOVQ    ${p.vt()}, ET
1222
    self.Emit("CMPQ"   , jit.Ptr(_VP, 8), jit.Imm(0))       // CMPQ    8(VP), $0
1223
    self.Sjmp("JE"     , _LB_type_error)                    // JE      _type_error
1224
    self.Emit("MOVQ"   , jit.Ptr(_VP, 0), _AX)              // MOVQ    (VP), AX
1225
    self.Emit("MOVQ"   , jit.Ptr(_AX, 8), _AX)              // MOVQ    8(AX), AX
1226
    self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX)  // MOVBLZX _Gt_KindFlags(AX), DX
1227
    self.Emit("ANDL"   , jit.Imm(rt.F_kind_mask), _DX)      // ANDL    ${F_kind_mask}, DX
1228
    self.Emit("CMPL"   , _DX, jit.Imm(_Vk_Ptr))             // CMPL    DX, ${reflect.Ptr}
1229
    self.Sjmp("JNE"    , _LB_type_error)                    // JNE     _type_error
1230
    self.Emit("LEAQ"   , jit.Ptr(_VP, 8), _DI)              // LEAQ    8(VP), DI
1231
    self.decode_dynamic(_AX, _DI)                           // DECODE  AX, DI
1232
    self.Link("_decode_end_{n}")                            // _decode_end_{n}:
1233
}
1234

1235
func (self *_Assembler) _asm_OP_str(_ *_Instr) {
1236
    self.parse_string()                                     // PARSE   STRING
1237
    self.unquote_once(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false, true)     // UNQUOTE once, (VP), 8(VP)
1238
}
1239

1240
func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
1241
    self.parse_string()                                 // PARSE  STRING
1242
    self.slice_from(_VAR_st_Iv, -1)                     // SLICE  st.Iv, $-1
1243
    self.Emit("MOVQ" , _DI, jit.Ptr(_VP, 0))            // MOVQ   DI, (VP)
1244
    self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 8))            // MOVQ   SI, 8(VP)
1245
    self.Emit("SHRQ" , jit.Imm(2), _SI)                 // SHRQ   $2, SI
1246
    self.Emit("LEAQ" , jit.Sib(_SI, _SI, 2, 0), _SI)    // LEAQ   (SI)(SI*2), SI
1247
    self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16))           // MOVQ   SI, 16(VP)
1248
    self.malloc(_SI, _SI)                               // MALLOC SI, SI
1249

1250
    // TODO: due to base64x's bug, only use AVX mode now
1251
    self.Emit("MOVL", jit.Imm(_MODE_JSON), _CX)          //  MOVL $_MODE_JSON, CX
1252

1253
    /* call the decoder */
1254
    self.Emit("XORL" , _DX, _DX)                // XORL  DX, DX
1255
    self.Emit("MOVQ" , _VP, _DI)                // MOVQ  VP, DI
1256

1257
    self.Emit("MOVQ" , jit.Ptr(_VP, 0), _R9)    // MOVQ SI, (VP)
1258
    self.WriteRecNotAX(4, _SI, jit.Ptr(_VP, 0), true, false)    // XCHGQ SI, (VP) 
1259
    self.Emit("MOVQ" , _R9, _SI)
1260

1261
    self.Emit("XCHGQ", _DX, jit.Ptr(_VP, 8))    // XCHGQ DX, 8(VP)
1262
    self.call(_F_b64decode)                     // CALL  b64decode
1263
    self.Emit("TESTQ", _AX, _AX)                // TESTQ AX, AX
1264
    self.Sjmp("JS"   , _LB_base64_error)        // JS    _base64_error
1265
    self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))    // MOVQ  AX, 8(VP)
1266
}
1267

1268
func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
1269
    self.Emit("LEAQ", jit.Ptr(_IC, 4), _AX)                     // LEAQ 4(IC), AX
1270
    self.Emit("CMPQ", _AX, _IL)                                 // CMPQ AX, IL
1271
    self.Sjmp("JA"  , _LB_eof_error)                            // JA   _eof_error
1272
    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('f'))    // CMPB (IP)(IC), $'f'
1273
    self.Sjmp("JE"  , "_false_{n}")                             // JE   _false_{n}
1274
    self.Emit("MOVL", jit.Imm(_IM_true), _CX)                   // MOVL $"true", CX
1275
    self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0))             // CMPL CX, (IP)(IC)
1276
    self.Sjmp("JE" , "_bool_true_{n}")  
1277

1278
    // try to skip the value
1279
    self.Emit("MOVQ", _IC, _VAR_ic)           
1280
    self.Emit("MOVQ", _T_bool, _ET)         
1281
    self.Emit("MOVQ", _ET, _VAR_et)
1282
    self.Byte(0x4c, 0x8d, 0x0d)         // LEAQ (PC), R9
1283
    self.Sref("_end_{n}", 4)
1284
    self.Emit("MOVQ", _R9, _VAR_pc)
1285
    self.Sjmp("JMP"  , _LB_skip_one) 
1286

1287
    self.Link("_bool_true_{n}")
1288
    self.Emit("MOVQ", _AX, _IC)                                 // MOVQ AX, IC
1289
    self.Emit("MOVB", jit.Imm(1), jit.Ptr(_VP, 0))              // MOVB $1, (VP)
1290
    self.Sjmp("JMP" , "_end_{n}")                               // JMP  _end_{n}
1291
    self.Link("_false_{n}")                                     // _false_{n}:
1292
    self.Emit("ADDQ", jit.Imm(1), _AX)                          // ADDQ $1, AX
1293
    self.Emit("ADDQ", jit.Imm(1), _IC)                          // ADDQ $1, IC
1294
    self.Emit("CMPQ", _AX, _IL)                                 // CMPQ AX, IL
1295
    self.Sjmp("JA"  , _LB_eof_error)                            // JA   _eof_error
1296
    self.Emit("MOVL", jit.Imm(_IM_alse), _CX)                   // MOVL $"alse", CX
1297
    self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0))             // CMPL CX, (IP)(IC)
1298
    self.Sjmp("JNE" , _LB_im_error)                             // JNE  _im_error
1299
    self.Emit("MOVQ", _AX, _IC)                                 // MOVQ AX, IC
1300
    self.Emit("XORL", _AX, _AX)                                 // XORL AX, AX
1301
    self.Emit("MOVB", _AX, jit.Ptr(_VP, 0))                     // MOVB AX, (VP)
1302
    self.Link("_end_{n}")                                       // _end_{n}:
1303
}
1304

1305
func (self *_Assembler) _asm_OP_num(_ *_Instr) {
1306
    self.Emit("MOVQ", jit.Imm(0), _VAR_fl)
1307
    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
1308
    self.Emit("MOVQ", _IC, _BP)
1309
    self.Sjmp("JNE", "_skip_number_{n}")
1310
    self.Emit("MOVQ", jit.Imm(1), _VAR_fl)
1311
    self.Emit("ADDQ", jit.Imm(1), _IC)
1312
    self.Link("_skip_number_{n}")
1313

1314
    /* call skip_number */
1315
    self.call_sf(_F_skip_number)                   // CALL_SF skip_one
1316
    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
1317
    self.Sjmp("JNS"   , "_num_next_{n}")
1318

1319
    /* call skip one */
1320
    self.Emit("MOVQ", _BP, _VAR_ic)           
1321
    self.Emit("MOVQ", _T_number, _ET)       
1322
    self.Emit("MOVQ", _ET, _VAR_et)
1323
    self.Byte(0x4c, 0x8d, 0x0d)       
1324
    self.Sref("_num_end_{n}", 4)
1325
    self.Emit("MOVQ", _R9, _VAR_pc)
1326
    self.Sjmp("JMP"  , _LB_skip_one)
1327

1328
    /* assgin string */
1329
    self.Link("_num_next_{n}")
1330
    self.slice_from_r(_AX, 0)
1331
    self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv)
1332
    self.Sjmp("JNC", "_num_write_{n}")
1333
    self.Byte(0x4c, 0x8d, 0x0d)                 // LEAQ (PC), R9
1334
    self.Sref("_num_write_{n}", 4)
1335
    self.Sjmp("JMP", "_copy_string")
1336
    self.Link("_num_write_{n}")
1337
    self.Emit("MOVQ", _SI, jit.Ptr(_VP, 8))     // MOVQ  SI, 8(VP)
1338
    self.WriteRecNotAX(13, _DI, jit.Ptr(_VP, 0), false, false)   
1339
    
1340
    /* check if quoted */
1341
    self.Emit("CMPQ", _VAR_fl, jit.Imm(1))
1342
    self.Sjmp("JNE", "_num_end_{n}")
1343
    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
1344
    self.Sjmp("JNE", _LB_char_0_error)
1345
    self.Emit("ADDQ", jit.Imm(1), _IC)
1346
    self.Link("_num_end_{n}")
1347
}
1348

1349
func (self *_Assembler) _asm_OP_i8(ins *_Instr) {
1350
    var pin = "_i8_end_{n}"
1351
    self.parse_signed(int8Type, pin, -1)                                                 // PARSE int8
1352
    self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8)     // RANGE int8
1353
    self.Emit("MOVB", _AX, jit.Ptr(_VP, 0))                             // MOVB  AX, (VP)
1354
    self.Link(pin)
1355
}
1356

1357
func (self *_Assembler) _asm_OP_i16(ins *_Instr) {
1358
    var pin = "_i16_end_{n}"
1359
    self.parse_signed(int16Type, pin, -1)                                                     // PARSE int16
1360
    self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16)     // RANGE int16
1361
    self.Emit("MOVW", _AX, jit.Ptr(_VP, 0))                                 // MOVW  AX, (VP)
1362
    self.Link(pin)
1363
}
1364

1365
func (self *_Assembler) _asm_OP_i32(ins *_Instr) {
1366
    var pin = "_i32_end_{n}"
1367
    self.parse_signed(int32Type, pin, -1)                                                     // PARSE int32
1368
    self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32)     // RANGE int32
1369
    self.Emit("MOVL", _AX, jit.Ptr(_VP, 0))                                 // MOVL  AX, (VP)
1370
    self.Link(pin)
1371
}
1372

1373
func (self *_Assembler) _asm_OP_i64(ins *_Instr) {
1374
    var pin = "_i64_end_{n}"
1375
    self.parse_signed(int64Type, pin, -1)                         // PARSE int64
1376
    self.Emit("MOVQ", _VAR_st_Iv, _AX)          // MOVQ  st.Iv, AX
1377
    self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))     // MOVQ  AX, (VP)
1378
    self.Link(pin)
1379
}
1380

1381
func (self *_Assembler) _asm_OP_u8(ins *_Instr) {
1382
    var pin = "_u8_end_{n}"
1383
    self.parse_unsigned(uint8Type, pin, -1)                                   // PARSE uint8
1384
    self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8)  // RANGE uint8
1385
    self.Emit("MOVB", _AX, jit.Ptr(_VP, 0))                 // MOVB  AX, (VP)
1386
    self.Link(pin)
1387
}
1388

1389
func (self *_Assembler) _asm_OP_u16(ins *_Instr) {
1390
    var pin = "_u16_end_{n}"
1391
    self.parse_unsigned(uint16Type, pin, -1)                                       // PARSE uint16
1392
    self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16)   // RANGE uint16
1393
    self.Emit("MOVW", _AX, jit.Ptr(_VP, 0))                     // MOVW  AX, (VP)
1394
    self.Link(pin)
1395
}
1396

1397
func (self *_Assembler) _asm_OP_u32(ins *_Instr) {
1398
    var pin = "_u32_end_{n}"
1399
    self.parse_unsigned(uint32Type, pin, -1)                                       // PARSE uint32
1400
    self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32)   // RANGE uint32
1401
    self.Emit("MOVL", _AX, jit.Ptr(_VP, 0))                     // MOVL  AX, (VP)
1402
    self.Link(pin)
1403
}
1404

1405
func (self *_Assembler) _asm_OP_u64(ins *_Instr) {
1406
    var pin = "_u64_end_{n}"
1407
    self.parse_unsigned(uint64Type, pin, -1)                       // PARSE uint64
1408
    self.Emit("MOVQ", _VAR_st_Iv, _AX)          // MOVQ  st.Iv, AX
1409
    self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))     // MOVQ  AX, (VP)
1410
    self.Link(pin)
1411
}
1412

1413
func (self *_Assembler) _asm_OP_f32(ins *_Instr) {
1414
    var pin = "_f32_end_{n}"
1415
    self.parse_number(float32Type, pin, -1)                         // PARSE NUMBER
1416
    self.range_single()                         // RANGE float32
1417
    self.Emit("MOVSS", _X0, jit.Ptr(_VP, 0))    // MOVSS X0, (VP)
1418
    self.Link(pin)
1419
}
1420

1421
func (self *_Assembler) _asm_OP_f64(ins *_Instr) {
1422
    var pin = "_f64_end_{n}"
1423
    self.parse_number(float64Type, pin, -1)                         // PARSE NUMBER
1424
    self.Emit("MOVSD", _VAR_st_Dv, _X0)         // MOVSD st.Dv, X0
1425
    self.Emit("MOVSD", _X0, jit.Ptr(_VP, 0))    // MOVSD X0, (VP)
1426
    self.Link(pin)
1427
}
1428

1429
func (self *_Assembler) _asm_OP_unquote(ins *_Instr) {
1430
    self.check_eof(2)
1431
    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('\\'))   // CMPB    (IP)(IC), $'\\'
1432
    self.Sjmp("JNE" , _LB_char_0_error)                         // JNE     _char_0_error
1433
    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 1), jit.Imm('"'))    // CMPB    1(IP)(IC), $'"'
1434
    self.Sjmp("JNE" , _LB_char_1_error)                         // JNE     _char_1_error
1435
    self.Emit("ADDQ", jit.Imm(2), _IC)                          // ADDQ    $2, IC
1436
    self.parse_string()                                         // PARSE   STRING
1437
    self.unquote_twice(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false)        // UNQUOTE twice, (VP), 8(VP)
1438
}
1439

1440
func (self *_Assembler) _asm_OP_nil_1(_ *_Instr) {
1441
    self.Emit("XORL", _AX, _AX)                 // XORL AX, AX
1442
    self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))     // MOVQ AX, (VP)
1443
}
1444

1445
func (self *_Assembler) _asm_OP_nil_2(_ *_Instr) {
1446
    self.Emit("PXOR" , _X0, _X0)                // PXOR  X0, X0
1447
    self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0))    // MOVOU X0, (VP)
1448
}
1449

1450
func (self *_Assembler) _asm_OP_nil_3(_ *_Instr) {
1451
    self.Emit("XORL" , _AX, _AX)                // XORL  AX, AX
1452
    self.Emit("PXOR" , _X0, _X0)                // PXOR  X0, X0
1453
    self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0))    // MOVOU X0, (VP)
1454
    self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 16))   // MOVOU X0, 16(VP)
1455
}
1456

1457
func (self *_Assembler) _asm_OP_deref(p *_Instr) {
1458
    self.vfollow(p.vt())
1459
}
1460

1461
func (self *_Assembler) _asm_OP_index(p *_Instr) {
1462
    self.Emit("MOVQ", jit.Imm(p.i64()), _AX)    // MOVQ ${p.vi()}, AX
1463
    self.Emit("ADDQ", _AX, _VP)                 // ADDQ _AX, _VP
1464
}
1465

1466
func (self *_Assembler) _asm_OP_is_null(p *_Instr) {
1467
    self.Emit("LEAQ"   , jit.Ptr(_IC, 4), _AX)                          // LEAQ    4(IC), AX
1468
    self.Emit("CMPQ"   , _AX, _IL)                                      // CMPQ    AX, IL
1469
    self.Sjmp("JA"     , "_not_null_{n}")                               // JA      _not_null_{n}
1470
    self.Emit("CMPL"   , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null))    // CMPL    (IP)(IC), $"null"
1471
    self.Emit("CMOVQEQ", _AX, _IC)                                      // CMOVQEQ AX, IC
1472
    self.Xjmp("JE"     , p.vi())                                        // JE      {p.vi()}
1473
    self.Link("_not_null_{n}")                                          // _not_null_{n}:
1474
}
1475

1476
func (self *_Assembler) _asm_OP_is_null_quote(p *_Instr) {
1477
    self.Emit("LEAQ"   , jit.Ptr(_IC, 5), _AX)                          // LEAQ    4(IC), AX
1478
    self.Emit("CMPQ"   , _AX, _IL)                                      // CMPQ    AX, IL
1479
    self.Sjmp("JA"     , "_not_null_quote_{n}")                         // JA      _not_null_quote_{n}
1480
    self.Emit("CMPL"   , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null))    // CMPL    (IP)(IC), $"null"
1481
    self.Sjmp("JNE"    , "_not_null_quote_{n}")                         // JNE     _not_null_quote_{n}
1482
    self.Emit("CMPB"   , jit.Sib(_IP, _IC, 1, 4), jit.Imm('"'))         // CMPB    4(IP)(IC), $'"'
1483
    self.Emit("CMOVQEQ", _AX, _IC)                                      // CMOVQEQ AX, IC
1484
    self.Xjmp("JE"     , p.vi())                                        // JE      {p.vi()}
1485
    self.Link("_not_null_quote_{n}")                                    // _not_null_quote_{n}:
1486
}
1487

1488
func (self *_Assembler) _asm_OP_map_init(_ *_Instr) {
1489
    self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX)    // MOVQ    (VP), AX
1490
    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
1491
    self.Sjmp("JNZ"  , "_end_{n}")              // JNZ     _end_{n}
1492
    self.call_go(_F_makemap_small)              // CALL_GO makemap_small
1493
    self.Emit("MOVQ" , jit.Ptr(_SP, 0), _AX)    // MOVQ    (SP), AX
1494
    self.WritePtrAX(6, jit.Ptr(_VP, 0), false)    // MOVQ    AX, (VP)
1495
    self.Link("_end_{n}")                       // _end_{n}:
1496
    self.Emit("MOVQ" , _AX, _VP)                // MOVQ    AX, VP
1497
}
1498

1499
func (self *_Assembler) _asm_OP_map_key_i8(p *_Instr) {
1500
    self.parse_signed(int8Type, "", p.vi())                                                 // PARSE     int8
1501
    self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8)     // RANGE     int8
1502
    self.match_char('"')
1503
    self.mapassign_std(p.vt(), _VAR_st_Iv)                              // MAPASSIGN int8, mapassign, st.Iv
1504
}
1505

1506
func (self *_Assembler) _asm_OP_map_key_i16(p *_Instr) {
1507
    self.parse_signed(int16Type, "", p.vi())                                                     // PARSE     int16
1508
    self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16)     // RANGE     int16
1509
    self.match_char('"')
1510
    self.mapassign_std(p.vt(), _VAR_st_Iv)                                  // MAPASSIGN int16, mapassign, st.Iv
1511
}
1512

1513
func (self *_Assembler) _asm_OP_map_key_i32(p *_Instr) {
1514
    self.parse_signed(int32Type, "", p.vi())                                                     // PARSE     int32
1515
    self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32)     // RANGE     int32
1516
    self.match_char('"')
1517
    if vt := p.vt(); !mapfast(vt) {
1518
        self.mapassign_std(vt, _VAR_st_Iv)                                  // MAPASSIGN int32, mapassign, st.Iv
1519
    } else {
1520
        self.mapassign_fastx(vt, _F_mapassign_fast32)                       // MAPASSIGN int32, mapassign_fast32
1521
    }
1522
}
1523

1524
func (self *_Assembler) _asm_OP_map_key_i64(p *_Instr) {
1525
    self.parse_signed(int64Type, "", p.vi())                                 // PARSE     int64
1526
    self.match_char('"')
1527
    if vt := p.vt(); !mapfast(vt) {
1528
        self.mapassign_std(vt, _VAR_st_Iv)              // MAPASSIGN int64, mapassign, st.Iv
1529
    } else {
1530
        self.Emit("MOVQ", _VAR_st_Iv, _AX)              // MOVQ      st.Iv, AX
1531
        self.mapassign_fastx(vt, _F_mapassign_fast64)   // MAPASSIGN int64, mapassign_fast64
1532
    }
1533
}
1534

1535
func (self *_Assembler) _asm_OP_map_key_u8(p *_Instr) {
1536
    self.parse_unsigned(uint8Type, "", p.vi())                                   // PARSE     uint8
1537
    self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8)  // RANGE     uint8
1538
    self.match_char('"')
1539
    self.mapassign_std(p.vt(), _VAR_st_Iv)                  // MAPASSIGN uint8, vt.Iv
1540
}
1541

1542
func (self *_Assembler) _asm_OP_map_key_u16(p *_Instr) {
1543
    self.parse_unsigned(uint16Type, "", p.vi())                                       // PARSE     uint16
1544
    self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16)   // RANGE     uint16
1545
    self.match_char('"')
1546
    self.mapassign_std(p.vt(), _VAR_st_Iv)                      // MAPASSIGN uint16, vt.Iv
1547
}
1548

1549
func (self *_Assembler) _asm_OP_map_key_u32(p *_Instr) {
1550
    self.parse_unsigned(uint32Type, "", p.vi())                                       // PARSE     uint32
1551
    self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32)   // RANGE     uint32
1552
    self.match_char('"')
1553
    if vt := p.vt(); !mapfast(vt) {
1554
        self.mapassign_std(vt, _VAR_st_Iv)                      // MAPASSIGN uint32, vt.Iv
1555
    } else {
1556
        self.mapassign_fastx(vt, _F_mapassign_fast32)           // MAPASSIGN uint32, mapassign_fast32
1557
    }
1558
}
1559

1560
func (self *_Assembler) _asm_OP_map_key_u64(p *_Instr) {
1561
    self.parse_unsigned(uint64Type, "", p.vi())                                       // PARSE     uint64
1562
    self.match_char('"')
1563
    if vt := p.vt(); !mapfast(vt) {
1564
        self.mapassign_std(vt, _VAR_st_Iv)                      // MAPASSIGN uint64, vt.Iv
1565
    } else {
1566
        self.Emit("MOVQ", _VAR_st_Iv, _AX)                      // MOVQ      st.Iv, AX
1567
        self.mapassign_fastx(vt, _F_mapassign_fast64)           // MAPASSIGN uint64, mapassign_fast64
1568
    }
1569
}
1570

1571
func (self *_Assembler) _asm_OP_map_key_f32(p *_Instr) {
1572
    self.parse_number(float32Type, "", p.vi())                     // PARSE     NUMBER
1573
    self.range_single()                     // RANGE     float32
1574
    self.Emit("MOVSS", _X0, _VAR_st_Dv)     // MOVSS     X0, st.Dv
1575
    self.match_char('"')
1576
    self.mapassign_std(p.vt(), _VAR_st_Dv)  // MAPASSIGN ${p.vt()}, mapassign, st.Dv
1577
}
1578

1579
func (self *_Assembler) _asm_OP_map_key_f64(p *_Instr) {
1580
    self.parse_number(float64Type, "", p.vi())                     // PARSE     NUMBER
1581
    self.match_char('"')
1582
    self.mapassign_std(p.vt(), _VAR_st_Dv)  // MAPASSIGN ${p.vt()}, mapassign, st.Dv
1583
}
1584

1585
func (self *_Assembler) _asm_OP_map_key_str(p *_Instr) {
1586
    self.parse_string()                          // PARSE     STRING
1587
    self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true)      // UNQUOTE   once, sv.p, sv.n
1588
    if vt := p.vt(); !mapfast(vt) {
1589
        self.valloc(vt.Key(), _DI)
1590
        self.Emit("MOVOU", _VAR_sv, _X0)
1591
        self.Emit("MOVOU", _X0, jit.Ptr(_DI, 0))
1592
        self.mapassign_std(vt, jit.Ptr(_DI, 0))        
1593
    } else {
1594
        self.Emit("MOVQ", _VAR_sv_p, _DI)        // MOVQ      sv.p, DI
1595
        self.Emit("MOVQ", _VAR_sv_n, _SI)        // MOVQ      sv.n, SI
1596
        self.mapassign_str_fast(vt, _DI, _SI)    // MAPASSIGN string, DI, SI
1597
    }
1598
}
1599

1600
func (self *_Assembler) _asm_OP_map_key_utext(p *_Instr) {
1601
    self.parse_string()                         // PARSE     STRING
1602
    self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true)     // UNQUOTE   once, sv.p, sv.n
1603
    self.mapassign_utext(p.vt(), false)         // MAPASSIGN utext, ${p.vt()}, false
1604
}
1605

1606
func (self *_Assembler) _asm_OP_map_key_utext_p(p *_Instr) {
1607
    self.parse_string()                         // PARSE     STRING
1608
    self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false)     // UNQUOTE   once, sv.p, sv.n
1609
    self.mapassign_utext(p.vt(), true)          // MAPASSIGN utext, ${p.vt()}, true
1610
}
1611

1612
func (self *_Assembler) _asm_OP_array_skip(_ *_Instr) {
1613
    self.call_sf(_F_skip_array)                 // CALL_SF skip_array
1614
    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
1615
    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
1616
}
1617

1618
func (self *_Assembler) _asm_OP_array_clear(p *_Instr) {
1619
    self.mem_clear_rem(p.i64(), true)
1620
}
1621

1622
func (self *_Assembler) _asm_OP_array_clear_p(p *_Instr) {
1623
    self.mem_clear_rem(p.i64(), false)
1624
}
1625

1626
func (self *_Assembler) _asm_OP_slice_init(p *_Instr) {
1627
    self.Emit("XORL" , _AX, _AX)                    // XORL    AX, AX
1628
    self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))        // MOVQ    AX, 8(VP)
1629
    self.Emit("MOVQ" , jit.Ptr(_VP, 16), _AX)       // MOVQ    16(VP), AX
1630
    self.Emit("TESTQ", _AX, _AX)                    // TESTQ   AX, AX
1631
    self.Sjmp("JNZ"  , "_done_{n}")                 // JNZ     _done_{n}
1632
    self.Emit("MOVQ" , jit.Imm(_MinSlice), _CX)     // MOVQ    ${_MinSlice}, CX
1633
    self.Emit("MOVQ" , _CX, jit.Ptr(_VP, 16))       // MOVQ    CX, 16(VP)
1634
    self.Emit("MOVQ" , jit.Type(p.vt()), _DX)       // MOVQ    ${p.vt()}, DX
1635
    self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 0))        // MOVQ    DX, (SP)
1636
    self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8))        // MOVQ    AX, 8(SP)
1637
    self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 16))       // MOVQ    CX, 16(SP)
1638
    self.call_go(_F_makeslice)                      // CALL_GO makeslice
1639
    self.Emit("MOVQ" , jit.Ptr(_SP, 24), _AX)       // MOVQ    24(SP), AX
1640
    self.WritePtrAX(7, jit.Ptr(_VP, 0), false)      // MOVQ    AX, (VP)
1641
    self.Link("_done_{n}")                          // _done_{n}:
1642
    self.Emit("XORL" , _AX, _AX)                    // XORL    AX, AX
1643
    self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))        // MOVQ    AX, 8(VP)
1644
}
1645

1646
func (self *_Assembler) _asm_OP_check_empty(p *_Instr) {
1647
    rbracket := p.vb()
1648
    if rbracket == ']' {
1649
        self.check_eof(1)
1650
        self.Emit("LEAQ", jit.Ptr(_IC, 1), _AX)                              // LEAQ    1(IC), AX
1651
        self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(rbracket))) // CMPB    (IP)(IC), ']'
1652
        self.Sjmp("JNE" , "_not_empty_array_{n}")                            // JNE     _not_empty_array_{n}
1653
        self.Emit("MOVQ", _AX, _IC)                                          // MOVQ    AX, IC
1654
        self.Emit("MOVQ", jit.Imm(_Zero_Base), _AX)
1655
        self.WritePtrAX(9, jit.Ptr(_VP, 0), false)
1656
        self.Emit("PXOR" , _X0, _X0)                                         // PXOR    X0, X0
1657
        self.Emit("MOVOU", _X0, jit.Ptr(_VP, 8))                             // MOVOU   X0, 8(VP)
1658
        self.Xjmp("JMP" , p.vi())                                            // JMP     {p.vi()}
1659
        self.Link("_not_empty_array_{n}")
1660
    } else {
1661
        panic("only implement check empty array here!")
1662
    }
1663
}
1664

1665
func (self *_Assembler) _asm_OP_slice_append(p *_Instr) {
1666
    self.Emit("MOVQ" , jit.Ptr(_VP, 8), _AX)            // MOVQ    8(VP), AX
1667
    self.Emit("CMPQ" , _AX, jit.Ptr(_VP, 16))           // CMPQ    AX, 16(VP)
1668
    self.Sjmp("JB"   , "_index_{n}")                    // JB      _index_{n}
1669
    self.Emit("MOVQ" , jit.Type(p.vt()), _AX)           // MOVQ    ${p.vt()}, AX
1670
    self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))            // MOVQ    AX, (SP)
1671
    self.Emit("MOVOU", jit.Ptr(_VP, 0), _X0)            // MOVOU   (VP), X0
1672
    self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8))            // MOVOU   X0, 8(SP)
1673
    self.Emit("MOVQ" , jit.Ptr(_VP, 16), _AX)           // MOVQ    16(VP), AX
1674
    self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 24))           // MOVQ    AX, 24(SP)
1675
    self.Emit("SHLQ" , jit.Imm(1), _AX)                 // SHLQ    $1, AX
1676
    self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32))           // MOVQ    AX, 32(SP)
1677
    self.call_go(_F_growslice)                          // CALL_GO growslice
1678
    self.Emit("MOVQ" , jit.Ptr(_SP, 40), _DI)           // MOVQ    40(SP), DI
1679
    self.Emit("MOVQ" , jit.Ptr(_SP, 48), _AX)           // MOVQ    48(SP), AX
1680
    self.Emit("MOVQ" , jit.Ptr(_SP, 56), _SI)           // MOVQ    56(SP), SI
1681
    self.WriteRecNotAX(8, _DI, jit.Ptr(_VP, 0), true, true)// MOVQ    DI, (VP)
1682
    self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))            // MOVQ    AX, 8(VP)
1683
    self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16))           // MOVQ    SI, 16(VP)
1684

1685
    // because growslice not zero memory {oldcap, newlen} when append et not has ptrdata.
1686
    // but we should zero it, avoid decode it as random values.
1687
    if rt.UnpackType(p.vt()).PtrData == 0 {
1688
        self.Emit("SUBQ" , _AX, _SI)                        // MOVQ    AX, SI
1689
    
1690
        self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8))     // ADDQ    $1, 8(VP)
1691
        self.Emit("MOVQ" , _DI, _VP)                        // MOVQ    DI, VP
1692
        self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX)   // MOVQ    ${p.vlen()}, CX
1693
        self.From("MULQ" , _CX)                             // MULQ    CX
1694
        self.Emit("ADDQ" , _AX, _VP)                        // ADDQ    AX, VP
1695

1696
        self.Emit("MOVQ" , _SI, _AX)                        // MOVQ    SI, AX
1697
        self.From("MULQ" , _CX)                             // MULQ    CX
1698
        self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8))            // MOVQ    AX, 8(SP)
1699

1700
        self.Emit("MOVQ" , _VP, jit.Ptr(_SP, 0))            // MOVQ    VP, (SP)
1701
        self.mem_clear_fn(true)                             // CALL_GO memclr{Has,NoHeap}
1702
        self.Sjmp("JMP", "_append_slice_end_{n}")           // JMP    _append_slice_end_{n}
1703
    }
1704

1705
    self.Link("_index_{n}")                             // _index_{n}:
1706
    self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8))     // ADDQ    $1, 8(VP)
1707
    self.Emit("MOVQ" , jit.Ptr(_VP, 0), _VP)            // MOVQ    (VP), VP
1708
    self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX)   // MOVQ    ${p.vlen()}, CX
1709
    self.From("MULQ" , _CX)                             // MULQ    CX
1710
    self.Emit("ADDQ" , _AX, _VP)                        // ADDQ    AX, VP
1711
    self.Link("_append_slice_end_{n}")
1712
}
1713

1714
func (self *_Assembler) _asm_OP_object_skip(_ *_Instr) {
1715
    self.call_sf(_F_skip_object)                // CALL_SF skip_object
1716
    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
1717
    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
1718
}
1719

1720
func (self *_Assembler) _asm_OP_object_next(_ *_Instr) {
1721
    self.call_sf(_F_skip_one)                   // CALL_SF skip_one
1722
    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
1723
    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
1724
}
1725

1726
func (self *_Assembler) _asm_OP_struct_field(p *_Instr) {
1727
    assert_eq(caching.FieldEntrySize, 32, "invalid field entry size")
1728
    self.Emit("MOVQ" , jit.Imm(-1), _AX)                        // MOVQ    $-1, AX
1729
    self.Emit("MOVQ" , _AX, _VAR_sr)                            // MOVQ    AX, sr
1730
    self.parse_string()                                         // PARSE   STRING
1731
    self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false)                     // UNQUOTE once, sv.p, sv.n
1732
    self.Emit("LEAQ" , _VAR_sv, _AX)                            // LEAQ    sv, AX
1733
    self.Emit("XORL" , _CX, _CX)                                // XORL    CX, CX
1734
    self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))                    // MOVQ    AX, (SP)
1735
    self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8))                    // MOVQ    CX, 8(SP)
1736
    self.call_go(_F_strhash)                                    // CALL_GO strhash
1737
    self.Emit("MOVQ" , jit.Ptr(_SP, 16), _AX)                   // MOVQ    16(SP), AX
1738
    self.Emit("MOVQ" , _AX, _R9)                                // MOVQ    AX, R9
1739
    self.Emit("MOVQ" , jit.Imm(freezeFields(p.vf())), _CX)      // MOVQ    ${p.vf()}, CX
1740
    self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_b), _SI)   // MOVQ    FieldMap.b(CX), SI
1741
    self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_N), _CX)   // MOVQ    FieldMap.N(CX), CX
1742
    self.Emit("TESTQ", _CX, _CX)                                // TESTQ   CX, CX
1743
    self.Sjmp("JZ"   , "_try_lowercase_{n}")                    // JZ      _try_lowercase_{n}
1744
    self.Link("_loop_{n}")                                      // _loop_{n}:
1745
    self.Emit("XORL" , _DX, _DX)                                // XORL    DX, DX
1746
    self.From("DIVQ" , _CX)                                     // DIVQ    CX
1747
    self.Emit("LEAQ" , jit.Ptr(_DX, 1), _AX)                    // LEAQ    1(DX), AX
1748
    self.Emit("SHLQ" , jit.Imm(5), _DX)                         // SHLQ    $5, DX
1749
    self.Emit("LEAQ" , jit.Sib(_SI, _DX, 1, 0), _DI)            // LEAQ    (SI)(DX), DI
1750
    self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Hash), _R8)             // MOVQ    FieldEntry.Hash(DI), R8
1751
    self.Emit("TESTQ", _R8, _R8)                                // TESTQ   R8, R8
1752
    self.Sjmp("JZ"   , "_try_lowercase_{n}")                    // JZ      _try_lowercase_{n}
1753
    self.Emit("CMPQ" , _R8, _R9)                                // CMPQ    R8, R9
1754
    self.Sjmp("JNE"  , "_loop_{n}")                             // JNE     _loop_{n}
1755
    self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name + 8), _DX)         // MOVQ    FieldEntry.Name+8(DI), DX
1756
    self.Emit("CMPQ" , _DX, _VAR_sv_n)                          // CMPQ    DX, sv.n
1757
    self.Sjmp("JNE"  , "_loop_{n}")                             // JNE     _loop_{n}
1758
    self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_ID), _R8)               // MOVQ    FieldEntry.ID(DI), R8
1759
    self.Emit("MOVQ" , _AX, _VAR_ss_AX)                         // MOVQ    AX, ss.AX
1760
    self.Emit("MOVQ" , _CX, _VAR_ss_CX)                         // MOVQ    CX, ss.CX
1761
    self.Emit("MOVQ" , _SI, _VAR_ss_SI)                         // MOVQ    SI, ss.SI
1762
    self.Emit("MOVQ" , _R8, _VAR_ss_R8)                         // MOVQ    R8, ss.R8
1763
    self.Emit("MOVQ" , _R9, _VAR_ss_R9)                         // MOVQ    R9, ss.R9
1764
    self.Emit("MOVQ" , _VAR_sv_p, _AX)                          // MOVQ    _VAR_sv_p, AX
1765
    self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name), _CX)             // MOVQ    FieldEntry.Name(DI), CX
1766
    self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))                    // MOVQ    AX, (SP)
1767
    self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8))                    // MOVQ    CX, 8(SP)
1768
    self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 16))                   // MOVQ    DX, 16(SP)
1769
    self.call_go(_F_memequal)                                   // CALL_GO memequal
1770
    self.Emit("MOVQ" , _VAR_ss_AX, _AX)                         // MOVQ    ss.AX, AX
1771
    self.Emit("MOVQ" , _VAR_ss_CX, _CX)                         // MOVQ    ss.CX, CX
1772
    self.Emit("MOVQ" , _VAR_ss_SI, _SI)                         // MOVQ    ss.SI, SI
1773
    self.Emit("MOVQ" , _VAR_ss_R9, _R9)                         // MOVQ    ss.R9, R9
1774
    self.Emit("MOVB" , jit.Ptr(_SP, 24), _DX)                   // MOVB    24(SP), DX
1775
    self.Emit("TESTB", _DX, _DX)                                // TESTB   DX, DX
1776
    self.Sjmp("JZ"   , "_loop_{n}")                             // JZ      _loop_{n}
1777
    self.Emit("MOVQ" , _VAR_ss_R8, _R8)                         // MOVQ    ss.R8, R8
1778
    self.Emit("MOVQ" , _R8, _VAR_sr)                            // MOVQ    R8, sr
1779
    self.Sjmp("JMP"  , "_end_{n}")                              // JMP     _end_{n}
1780
    self.Link("_try_lowercase_{n}")                             // _try_lowercase_{n}:
1781
    self.Emit("MOVQ" , jit.Imm(referenceFields(p.vf())), _AX)   // MOVQ    ${p.vf()}, AX
1782
    self.Emit("MOVOU", _VAR_sv, _X0)                            // MOVOU   sv, X0
1783
    self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))                    // MOVQ    AX, (SP)
1784
    self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8))                    // MOVOU   X0, 8(SP)
1785
    self.call_go(_F_FieldMap_GetCaseInsensitive)                // CALL_GO FieldMap::GetCaseInsensitive
1786
    self.Emit("MOVQ" , jit.Ptr(_SP, 24), _AX)                   // MOVQ    24(SP), AX
1787
    self.Emit("MOVQ" , _AX, _VAR_sr)                            // MOVQ    AX, _VAR_sr
1788
    self.Emit("TESTQ", _AX, _AX)                                // TESTQ   AX, AX
1789
    self.Sjmp("JNS"  , "_end_{n}")                              // JNS     _end_{n}
1790
    self.Emit("BTQ"  , jit.Imm(_F_disable_unknown), _ARG_fv)    // BTQ     ${_F_disable_unknown}, fv
1791
    self.Sjmp("JC"   , _LB_field_error)                         // JC      _field_error
1792
    self.Link("_end_{n}")                                       // _end_{n}:
1793
}
1794

1795
func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) {
1796
    self.unmarshal_json(p.vt(), true)
1797
}
1798

1799
func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) {
1800
    self.unmarshal_json(p.vt(), false)
1801
}
1802

1803
func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) {
1804
    self.unmarshal_text(p.vt(), true)
1805
}
1806

1807
func (self *_Assembler) _asm_OP_unmarshal_text_p(p *_Instr) {
1808
    self.unmarshal_text(p.vt(), false)
1809
}
1810

1811
func (self *_Assembler) _asm_OP_lspace(_ *_Instr) {
1812
    self.lspace("_{n}")
1813
}
1814

1815
func (self *_Assembler) lspace(subfix string) {
1816
    var label = "_lspace" + subfix
1817

1818
    self.Emit("CMPQ"   , _IC, _IL)                      // CMPQ    IC, IL
1819
    self.Sjmp("JAE"    , _LB_eof_error)                 // JAE     _eof_error
1820
    self.Emit("MOVQ"   , jit.Imm(_BM_space), _DX)       // MOVQ    _BM_space, DX
1821
    self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX)  // MOVBQZX (IP)(IC), AX
1822
    self.Emit("CMPQ"   , _AX, jit.Imm(' '))             // CMPQ    AX, $' '
1823
    self.Sjmp("JA"     , label)                // JA      _nospace_{n}
1824
    self.Emit("BTQ"    , _AX, _DX)                      // BTQ     AX, DX
1825
    self.Sjmp("JNC"    , label)                // JNC     _nospace_{n}
1826

1827
    /* test up to 4 characters */
1828
    for i := 0; i < 3; i++ {
1829
        self.Emit("ADDQ"   , jit.Imm(1), _IC)               // ADDQ    $1, IC
1830
        self.Emit("CMPQ"   , _IC, _IL)                      // CMPQ    IC, IL
1831
        self.Sjmp("JAE"    , _LB_eof_error)                 // JAE     _eof_error
1832
        self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX)  // MOVBQZX (IP)(IC), AX
1833
        self.Emit("CMPQ"   , _AX, jit.Imm(' '))             // CMPQ    AX, $' '
1834
        self.Sjmp("JA"     , label)                // JA      _nospace_{n}
1835
        self.Emit("BTQ"    , _AX, _DX)                      // BTQ     AX, DX
1836
        self.Sjmp("JNC"    , label)                // JNC     _nospace_{n}
1837
    }
1838

1839
    /* handle over to the native function */
1840
    self.Emit("MOVQ"   , _IP, _DI)                      // MOVQ    IP, DI
1841
    self.Emit("MOVQ"   , _IL, _SI)                      // MOVQ    IL, SI
1842
    self.Emit("MOVQ"   , _IC, _DX)                      // MOVQ    IC, DX
1843
    self.call(_F_lspace)                                // CALL    lspace
1844
    self.Emit("TESTQ"  , _AX, _AX)                      // TESTQ   AX, AX
1845
    self.Sjmp("JS"     , _LB_parsing_error_v)           // JS      _parsing_error_v
1846
    self.Emit("CMPQ"   , _AX, _IL)                      // CMPQ    AX, IL
1847
    self.Sjmp("JAE"    , _LB_eof_error)                 // JAE     _eof_error
1848
    self.Emit("MOVQ"   , _AX, _IC)                      // MOVQ    AX, IC
1849
    self.Link(label)                           // _nospace_{n}:
1850
}
1851

1852
func (self *_Assembler) _asm_OP_match_char(p *_Instr) {
1853
    self.match_char(p.vb())
1854
}
1855

1856
func (self *_Assembler) match_char(char byte) {
1857
    self.check_eof(1)
1858
    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(char)))  // CMPB (IP)(IC), ${p.vb()}
1859
    self.Sjmp("JNE" , _LB_char_0_error)                               // JNE  _char_0_error
1860
    self.Emit("ADDQ", jit.Imm(1), _IC)                                // ADDQ $1, IC
1861
}
1862

1863
func (self *_Assembler) _asm_OP_check_char(p *_Instr) {
1864
    self.check_eof(1)
1865
    self.Emit("LEAQ"   , jit.Ptr(_IC, 1), _AX)                              // LEAQ    1(IC), AX
1866
    self.Emit("CMPB"   , jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb())))   // CMPB    (IP)(IC), ${p.vb()}
1867
    self.Emit("CMOVQEQ", _AX, _IC)                                          // CMOVQEQ AX, IC
1868
    self.Xjmp("JE"     , p.vi())                                            // JE      {p.vi()}
1869
}
1870

1871
func (self *_Assembler) _asm_OP_check_char_0(p *_Instr) {
1872
    self.check_eof(1)
1873
    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb())))   // CMPB    (IP)(IC), ${p.vb()}
1874
    self.Xjmp("JE"  , p.vi())                                            // JE      {p.vi()}
1875
}
1876

1877
func (self *_Assembler) _asm_OP_add(p *_Instr) {
1878
    self.Emit("ADDQ", jit.Imm(int64(p.vi())), _IC)  // ADDQ ${p.vi()}, IC
1879
}
1880

1881
func (self *_Assembler) _asm_OP_load(_ *_Instr) {
1882
    self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)             // MOVQ (ST), AX
1883
    self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _VP)     // MOVQ (ST)(AX), VP
1884
}
1885

1886
func (self *_Assembler) _asm_OP_save(_ *_Instr) {
1887
    self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX)             // MOVQ (ST), CX
1888
    self.Emit("CMPQ", _CX, jit.Imm(_MaxStackBytes))          // CMPQ CX, ${_MaxStackBytes}
1889
    self.Sjmp("JAE"  , _LB_stack_error)                 // JA   _stack_error
1890
    self.WriteRecNotAX(0 , _VP, jit.Sib(_ST, _CX, 1, 8), false, false) // MOVQ VP, 8(ST)(CX)
1891
    self.Emit("ADDQ", jit.Imm(8), _CX)                  // ADDQ $8, CX
1892
    self.Emit("MOVQ", _CX, jit.Ptr(_ST, 0))             // MOVQ CX, (ST)
1893
}
1894

1895
func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
1896
    self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)             // MOVQ (ST), AX
1897
    self.Emit("SUBQ", jit.Imm(8), _AX)                  // SUBQ $8, AX
1898
    self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _VP)     // MOVQ 8(ST)(AX), VP
1899
    self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0))             // MOVQ AX, (ST)
1900
    self.Emit("XORL", _ET, _ET)                         // XORL ET, ET
1901
    self.Emit("MOVQ", _ET, jit.Sib(_ST, _AX, 1, 8))     // MOVQ ET, 8(ST)(AX)
1902
}
1903

1904
func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
1905
    self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX)            // MOVQ  (ST), AX
1906
    self.Emit("SUBQ" , jit.Imm(16), _AX)                // SUBQ  $16, AX
1907
    self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _VP)    // MOVQ  8(ST)(AX), VP
1908
    self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0))            // MOVQ  AX, (ST)
1909
    self.Emit("PXOR" , _X0, _X0)                        // PXOR  X0, X0
1910
    self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8))    // MOVOU X0, 8(ST)(AX)
1911
}
1912

1913
func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
1914
    self.Emit("MOVQ", jit.Type(p.vt()), _AX)    // MOVQ   ${p.vt()}, AX
1915
    self.decode_dynamic(_AX, _VP)               // DECODE AX, VP
1916
}
1917

1918
func (self *_Assembler) _asm_OP_goto(p *_Instr) {
1919
    self.Xjmp("JMP", p.vi())
1920
}
1921

1922
func (self *_Assembler) _asm_OP_switch(p *_Instr) {
1923
    self.Emit("MOVQ", _VAR_sr, _AX)             // MOVQ sr, AX
1924
    self.Emit("CMPQ", _AX, jit.Imm(p.i64()))    // CMPQ AX, ${len(p.vs())}
1925
    self.Sjmp("JAE" , "_default_{n}")           // JAE  _default_{n}
1926

1927
    /* jump table selector */
1928
    self.Byte(0x48, 0x8d, 0x3d)                         // LEAQ    ?(PC), DI
1929
    self.Sref("_switch_table_{n}", 4)                   // ....    &_switch_table_{n}
1930
    self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX)  // MOVLQSX (DI)(AX*4), AX
1931
    self.Emit("ADDQ"   , _DI, _AX)                      // ADDQ    DI, AX
1932
    self.Rjmp("JMP"    , _AX)                           // JMP     AX
1933
    self.Link("_switch_table_{n}")                      // _switch_table_{n}:
1934

1935
    /* generate the jump table */
1936
    for i, v := range p.vs() {
1937
        self.Xref(v, int64(-i) * 4)
1938
    }
1939

1940
    /* default case */
1941
    self.Link("_default_{n}")
1942
    self.NOP()
1943
}
1944

1945
func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
1946
    self.Emit("MOVQ", jit.Imm(int64(p2.op())),  jit.Ptr(_SP, 16))// MOVQ $(p2.op()), 16(SP)
1947
    self.Emit("MOVQ", jit.Imm(int64(p1.op())),  jit.Ptr(_SP, 8)) // MOVQ $(p1.op()), 8(SP)
1948
    self.Emit("MOVQ", jit.Imm(int64(i)),  jit.Ptr(_SP, 0))       // MOVQ $(i), (SP)
1949
    self.call_go(_F_println)
1950
}
1951

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.