2
* Copyright 2021 ByteDance Inc.
4
* Licensed under the Apache License, Version 2.0 (the "License");
5
* you may not use this file except in compliance with the License.
6
* You may obtain a copy of the License at
8
* http://www.apache.org/licenses/LICENSE-2.0
10
* Unless required by applicable law or agreed to in writing, software
11
* distributed under the License is distributed on an "AS IS" BASIS,
12
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
* See the License for the specific language governing permissions and
14
* limitations under the License.
26
`github.com/bytedance/sonic/internal/resolver`
27
`github.com/bytedance/sonic/internal/rt`
28
`github.com/bytedance/sonic/option`
34
_OP_null _Op = iota + 1
87
_INT_SIZE = 32 << (^uint(0) >> 63)
88
_PTR_SIZE = 32 << (^uintptr(0) >> 63)
89
_PTR_BYTE = unsafe.Sizeof(uintptr(0))
93
_MAX_ILBUF = 100000 // cutoff at 100k of IL instructions
94
_MAX_FIELDS = 50 // cutoff at 50 fields struct
97
var _OpNames = [256]string {
99
_OP_empty_arr : "empty_arr",
100
_OP_empty_obj : "empty_obj",
115
_OP_number : "number",
125
_OP_drop_2 : "drop_2",
126
_OP_recurse : "recurse",
127
_OP_is_nil : "is_nil",
128
_OP_is_nil_p1 : "is_nil_p1",
129
_OP_is_zero_1 : "is_zero_1",
130
_OP_is_zero_2 : "is_zero_2",
131
_OP_is_zero_4 : "is_zero_4",
132
_OP_is_zero_8 : "is_zero_8",
133
_OP_is_zero_map : "is_zero_map",
135
_OP_map_iter : "map_iter",
136
_OP_map_stop : "map_stop",
137
_OP_map_check_key : "map_check_key",
138
_OP_map_write_key : "map_write_key",
139
_OP_map_value_next : "map_value_next",
140
_OP_slice_len : "slice_len",
141
_OP_slice_next : "slice_next",
142
_OP_marshal : "marshal",
143
_OP_marshal_p : "marshal_p",
144
_OP_marshal_text : "marshal_text",
145
_OP_marshal_text_p : "marshal_text_p",
146
_OP_cond_set : "cond_set",
147
_OP_cond_testc : "cond_testc",
150
func (self _Op) String() string {
151
if ret := _OpNames[self]; ret != "" {
160
case 32: return _OP_i32
161
case 64: return _OP_i64
162
default: panic("unsupported int size")
168
case 32: return _OP_u32
169
case 64: return _OP_u64
170
default: panic("unsupported uint size")
174
func _OP_uintptr() _Op {
176
case 32: return _OP_u32
177
case 64: return _OP_u64
178
default: panic("unsupported pointer size")
182
func _OP_is_zero_ints() _Op {
184
case 32: return _OP_is_zero_4
185
case 64: return _OP_is_zero_8
186
default: panic("unsupported integer size")
191
u uint64 // union {op: 8, _: 8, vi: 48}, vi maybe int or len(str)
192
p unsafe.Pointer // maybe GoString.Ptr, or *GoType
195
func packOp(op _Op) uint64 {
196
return uint64(op) << 56
199
func newInsOp(op _Op) _Instr {
200
return _Instr{u: packOp(op)}
203
func newInsVi(op _Op, vi int) _Instr {
204
return _Instr{u: packOp(op) | rt.PackInt(vi)}
207
func newInsVs(op _Op, vs string) _Instr {
209
u: packOp(op) | rt.PackInt(len(vs)),
210
p: (*rt.GoString)(unsafe.Pointer(&vs)).Ptr,
214
func newInsVt(op _Op, vt reflect.Type) _Instr {
217
p: unsafe.Pointer(rt.UnpackType(vt)),
221
func newInsVp(op _Op, vt reflect.Type, pv bool) _Instr {
227
u: packOp(op) | rt.PackInt(i),
228
p: unsafe.Pointer(rt.UnpackType(vt)),
232
func (self _Instr) op() _Op {
233
return _Op(self.u >> 56)
236
func (self _Instr) vi() int {
237
return rt.UnpackInt(self.u)
240
func (self _Instr) vf() uint8 {
241
return (*rt.GoType)(self.p).KindFlags
244
func (self _Instr) vs() (v string) {
245
(*rt.GoString)(unsafe.Pointer(&v)).Ptr = self.p
246
(*rt.GoString)(unsafe.Pointer(&v)).Len = self.vi()
250
func (self _Instr) vk() reflect.Kind {
251
return (*rt.GoType)(self.p).Kind()
254
func (self _Instr) vt() reflect.Type {
255
return (*rt.GoType)(self.p).Pack()
258
func (self _Instr) vp() (vt reflect.Type, pv bool) {
259
return (*rt.GoType)(self.p).Pack(), rt.UnpackInt(self.u) == 1
262
func (self _Instr) i64() int64 {
263
return int64(self.vi())
266
func (self _Instr) vlen() int {
267
return int((*rt.GoType)(self.p).Size)
270
func (self _Instr) isBranch() bool {
272
case _OP_goto : fallthrough
273
case _OP_is_nil : fallthrough
274
case _OP_is_nil_p1 : fallthrough
275
case _OP_is_zero_1 : fallthrough
276
case _OP_is_zero_2 : fallthrough
277
case _OP_is_zero_4 : fallthrough
278
case _OP_is_zero_8 : fallthrough
279
case _OP_map_check_key : fallthrough
280
case _OP_map_write_key : fallthrough
281
case _OP_slice_next : fallthrough
282
case _OP_cond_testc : return true
283
default : return false
287
func (self _Instr) disassemble() string {
289
case _OP_byte : return fmt.Sprintf("%-18s%s", self.op().String(), strconv.QuoteRune(rune(self.vi())))
290
case _OP_text : return fmt.Sprintf("%-18s%s", self.op().String(), strconv.Quote(self.vs()))
291
case _OP_index : return fmt.Sprintf("%-18s%d", self.op().String(), self.vi())
292
case _OP_recurse : fallthrough
293
case _OP_map_iter : fallthrough
294
case _OP_marshal : fallthrough
295
case _OP_marshal_p : fallthrough
296
case _OP_marshal_text : fallthrough
297
case _OP_marshal_text_p : return fmt.Sprintf("%-18s%s", self.op().String(), self.vt())
298
case _OP_goto : fallthrough
299
case _OP_is_nil : fallthrough
300
case _OP_is_nil_p1 : fallthrough
301
case _OP_is_zero_1 : fallthrough
302
case _OP_is_zero_2 : fallthrough
303
case _OP_is_zero_4 : fallthrough
304
case _OP_is_zero_8 : fallthrough
305
case _OP_is_zero_map : fallthrough
306
case _OP_cond_testc : fallthrough
307
case _OP_map_check_key : fallthrough
308
case _OP_map_write_key : return fmt.Sprintf("%-18sL_%d", self.op().String(), self.vi())
309
case _OP_slice_next : return fmt.Sprintf("%-18sL_%d, %s", self.op().String(), self.vi(), self.vt())
310
default : return self.op().String()
318
func (self _Program) pc() int {
322
func (self _Program) tag(n int) {
324
panic("type nesting too deep")
328
func (self _Program) pin(i int) {
330
v.u &= 0xffff000000000000
331
v.u |= rt.PackInt(self.pc())
334
func (self _Program) rel(v []int) {
335
for _, i := range v {
340
func (self *_Program) add(op _Op) {
341
*self = append(*self, newInsOp(op))
344
func (self *_Program) key(op _Op) {
345
*self = append(*self,
346
newInsVi(_OP_byte, '"'),
348
newInsVi(_OP_byte, '"'),
352
func (self *_Program) int(op _Op, vi int) {
353
*self = append(*self, newInsVi(op, vi))
356
func (self *_Program) str(op _Op, vs string) {
357
*self = append(*self, newInsVs(op, vs))
360
func (self *_Program) rtt(op _Op, vt reflect.Type) {
361
*self = append(*self, newInsVt(op, vt))
364
func (self *_Program) vp(op _Op, vt reflect.Type, pv bool) {
365
*self = append(*self, newInsVp(op, vt, pv))
368
func (self _Program) disassemble() string {
370
tab := make([]bool, nb + 1)
371
ret := make([]string, 0, nb + 1)
373
/* prescan to get all the labels */
374
for _, ins := range self {
380
/* disassemble each instruction */
381
for i, ins := range self {
383
ret = append(ret, "\t" + ins.disassemble())
385
ret = append(ret, fmt.Sprintf("L_%d:\n\t%s", i, ins.disassemble()))
389
/* add the last label, if needed */
391
ret = append(ret, fmt.Sprintf("L_%d:", nb))
394
/* add an "end" indicator, and join all the strings */
395
return strings.Join(append(ret, "\tend"), "\n")
398
type _Compiler struct {
399
opts option.CompileOptions
401
tab map[reflect.Type]bool
402
rec map[reflect.Type]uint8
405
func newCompiler() *_Compiler {
407
opts: option.DefaultCompileOptions(),
408
tab: map[reflect.Type]bool{},
409
rec: map[reflect.Type]uint8{},
413
func (self *_Compiler) apply(opts option.CompileOptions) *_Compiler {
415
if self.opts.RecursiveDepth > 0 {
416
self.rec = map[reflect.Type]uint8{}
421
func (self *_Compiler) rescue(ep *error) {
422
if val := recover(); val != nil {
423
if err, ok := val.(error); ok {
431
func (self *_Compiler) compile(vt reflect.Type, pv bool) (ret _Program, err error) {
432
defer self.rescue(&err)
433
self.compileOne(&ret, 0, vt, pv)
437
func (self *_Compiler) compileOne(p *_Program, sp int, vt reflect.Type, pv bool) {
439
p.vp(_OP_recurse, vt, pv)
441
self.compileRec(p, sp, vt, pv)
445
func (self *_Compiler) compileRec(p *_Program, sp int, vt reflect.Type, pv bool) {
447
pt := reflect.PtrTo(vt)
449
/* check for addressable `json.Marshaler` with pointer receiver */
450
if pv && pt.Implements(jsonMarshalerType) {
451
p.rtt(_OP_marshal_p, pt)
455
/* check for `json.Marshaler` */
456
if vt.Implements(jsonMarshalerType) {
457
self.compileMarshaler(p, _OP_marshal, vt, jsonMarshalerType)
461
/* check for addressable `encoding.TextMarshaler` with pointer receiver */
462
if pv && pt.Implements(encodingTextMarshalerType) {
463
p.rtt(_OP_marshal_text_p, pt)
467
/* check for `encoding.TextMarshaler` */
468
if vt.Implements(encodingTextMarshalerType) {
469
self.compileMarshaler(p, _OP_marshal_text, vt, encodingTextMarshalerType)
473
/* enter the recursion, and compile the type */
476
self.compileOps(p, sp, vt)
478
/* exit the recursion */
483
func (self *_Compiler) compileOps(p *_Program, sp int, vt reflect.Type) {
485
case reflect.Bool : p.add(_OP_bool)
486
case reflect.Int : p.add(_OP_int())
487
case reflect.Int8 : p.add(_OP_i8)
488
case reflect.Int16 : p.add(_OP_i16)
489
case reflect.Int32 : p.add(_OP_i32)
490
case reflect.Int64 : p.add(_OP_i64)
491
case reflect.Uint : p.add(_OP_uint())
492
case reflect.Uint8 : p.add(_OP_u8)
493
case reflect.Uint16 : p.add(_OP_u16)
494
case reflect.Uint32 : p.add(_OP_u32)
495
case reflect.Uint64 : p.add(_OP_u64)
496
case reflect.Uintptr : p.add(_OP_uintptr())
497
case reflect.Float32 : p.add(_OP_f32)
498
case reflect.Float64 : p.add(_OP_f64)
499
case reflect.String : self.compileString (p, vt)
500
case reflect.Array : self.compileArray (p, sp, vt.Elem(), vt.Len())
501
case reflect.Interface : self.compileInterface (p, vt)
502
case reflect.Map : self.compileMap (p, sp, vt)
503
case reflect.Ptr : self.compilePtr (p, sp, vt.Elem())
504
case reflect.Slice : self.compileSlice (p, sp, vt.Elem())
505
case reflect.Struct : self.compileStruct (p, sp, vt)
506
default : panic (error_type(vt))
510
func (self *_Compiler) compileNil(p *_Program, sp int, vt reflect.Type, nil_op _Op, fn func(*_Program, int, reflect.Type)) {
521
func (self *_Compiler) compilePtr(p *_Program, sp int, vt reflect.Type) {
522
self.compileNil(p, sp, vt, _OP_null, self.compilePtrBody)
525
func (self *_Compiler) compilePtrBody(p *_Program, sp int, vt reflect.Type) {
529
self.compileOne(p, sp + 1, vt, true)
533
func (self *_Compiler) compileMap(p *_Program, sp int, vt reflect.Type) {
534
self.compileNil(p, sp, vt, _OP_empty_obj, self.compileMapBody)
537
func (self *_Compiler) compileMapBody(p *_Program, sp int, vt reflect.Type) {
541
p.rtt(_OP_map_iter, vt)
544
p.add(_OP_map_check_key)
546
p.add(_OP_map_write_key)
547
self.compileMapBodyKey(p, vt.Key())
550
p.add(_OP_map_value_next)
551
self.compileOne(p, sp + 2, vt.Elem(), false)
553
p.add(_OP_map_check_key)
556
p.add(_OP_map_write_key)
557
self.compileMapBodyKey(p, vt.Key())
560
p.add(_OP_map_value_next)
561
self.compileOne(p, sp + 2, vt.Elem(), false)
570
func (self *_Compiler) compileMapBodyKey(p *_Program, vk reflect.Type) {
571
if !vk.Implements(encodingTextMarshalerType) {
572
self.compileMapBodyTextKey(p, vk)
574
self.compileMapBodyUtextKey(p, vk)
578
func (self *_Compiler) compileMapBodyTextKey(p *_Program, vk reflect.Type) {
580
case reflect.Invalid : panic("map key is nil")
581
case reflect.Bool : p.key(_OP_bool)
582
case reflect.Int : p.key(_OP_int())
583
case reflect.Int8 : p.key(_OP_i8)
584
case reflect.Int16 : p.key(_OP_i16)
585
case reflect.Int32 : p.key(_OP_i32)
586
case reflect.Int64 : p.key(_OP_i64)
587
case reflect.Uint : p.key(_OP_uint())
588
case reflect.Uint8 : p.key(_OP_u8)
589
case reflect.Uint16 : p.key(_OP_u16)
590
case reflect.Uint32 : p.key(_OP_u32)
591
case reflect.Uint64 : p.key(_OP_u64)
592
case reflect.Uintptr : p.key(_OP_uintptr())
593
case reflect.Float32 : p.key(_OP_f32)
594
case reflect.Float64 : p.key(_OP_f64)
595
case reflect.String : self.compileString(p, vk)
596
default : panic(error_type(vk))
600
func (self *_Compiler) compileMapBodyUtextKey(p *_Program, vk reflect.Type) {
601
if vk.Kind() != reflect.Ptr {
602
p.rtt(_OP_marshal_text, vk)
604
self.compileMapBodyUtextPtr(p, vk)
608
func (self *_Compiler) compileMapBodyUtextPtr(p *_Program, vk reflect.Type) {
611
p.rtt(_OP_marshal_text, vk)
615
p.str(_OP_text, "\"\"")
619
func (self *_Compiler) compileSlice(p *_Program, sp int, vt reflect.Type) {
620
self.compileNil(p, sp, vt, _OP_empty_arr, self.compileSliceBody)
623
func (self *_Compiler) compileSliceBody(p *_Program, sp int, vt reflect.Type) {
624
if isSimpleByte(vt) {
627
self.compileSliceArray(p, sp, vt)
631
func (self *_Compiler) compileSliceArray(p *_Program, sp int, vt reflect.Type) {
637
p.rtt(_OP_slice_next, vt)
638
self.compileOne(p, sp + 1, vt, true)
640
p.rtt(_OP_slice_next, vt)
642
self.compileOne(p, sp + 1, vt, true)
650
func (self *_Compiler) compileArray(p *_Program, sp int, vt reflect.Type, nb int) {
657
self.compileOne(p, sp + 1, vt, self.pv)
661
/* remaining items */
662
for i := 1; i < nb; i++ {
664
p.int(_OP_index, i * int(vt.Size()))
665
self.compileOne(p, sp + 1, vt, self.pv)
674
func (self *_Compiler) compileString(p *_Program, vt reflect.Type) {
675
if vt != jsonNumberType {
682
func (self *_Compiler) compileStruct(p *_Program, sp int, vt reflect.Type) {
683
if sp >= self.opts.MaxInlineDepth || p.pc() >= _MAX_ILBUF || (sp > 0 && vt.NumField() >= _MAX_FIELDS) {
684
p.vp(_OP_recurse, vt, self.pv)
685
if self.opts.RecursiveDepth > 0 {
693
self.compileStructBody(p, sp, vt)
697
func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) {
703
/* compile each field */
704
for _, fv := range resolver.ResolveStruct(vt) {
706
var o resolver.Offset
708
/* "omitempty" for arrays */
709
if fv.Type.Kind() == reflect.Array {
710
if fv.Type.Len() == 0 && (fv.Opts & resolver.F_omitempty) != 0 {
715
/* index to the field */
716
for _, o = range fv.Path {
717
if p.int(_OP_index, int(o.Size)); o.Kind == resolver.F_deref {
718
s = append(s, p.pc())
724
/* check for "omitempty" option */
725
if fv.Type.Kind() != reflect.Struct && fv.Type.Kind() != reflect.Array && (fv.Opts & resolver.F_omitempty) != 0 {
726
s = append(s, p.pc())
727
self.compileStructFieldZero(p, fv.Type)
730
/* add the comma if not the first element */
732
p.add(_OP_cond_testc)
736
/* compile the key and value */
738
p.str(_OP_text, Quote(fv.Name) + ":")
740
/* check for "stringnize" option */
741
if (fv.Opts & resolver.F_stringize) == 0 {
742
self.compileOne(p, sp + 1, ft, self.pv)
744
self.compileStructFieldStr(p, sp + 1, ft)
747
/* patch the skipping jumps and reload the struct pointer */
757
func (self *_Compiler) compileStructFieldStr(p *_Program, sp int, vt reflect.Type) {
762
/* dereference the pointer if needed */
763
if ft.Kind() == reflect.Ptr {
767
/* check if it can be stringized */
769
case reflect.Bool : sv = true
770
case reflect.Int : sv = true
771
case reflect.Int8 : sv = true
772
case reflect.Int16 : sv = true
773
case reflect.Int32 : sv = true
774
case reflect.Int64 : sv = true
775
case reflect.Uint : sv = true
776
case reflect.Uint8 : sv = true
777
case reflect.Uint16 : sv = true
778
case reflect.Uint32 : sv = true
779
case reflect.Uint64 : sv = true
780
case reflect.Uintptr : sv = true
781
case reflect.Float32 : sv = true
782
case reflect.Float64 : sv = true
783
case reflect.String : sv = true
786
/* if it's not, ignore the "string" and follow the regular path */
788
self.compileOne(p, sp, vt, self.pv)
792
/* dereference the pointer */
793
if vt.Kind() == reflect.Ptr {
800
/* special case of a double-quoted string */
801
if ft != jsonNumberType && ft.Kind() == reflect.String {
804
self.compileStructFieldQuoted(p, sp, vt)
807
/* the "null" case of the pointer */
817
func (self *_Compiler) compileStructFieldZero(p *_Program, vt reflect.Type) {
819
case reflect.Bool : p.add(_OP_is_zero_1)
820
case reflect.Int : p.add(_OP_is_zero_ints())
821
case reflect.Int8 : p.add(_OP_is_zero_1)
822
case reflect.Int16 : p.add(_OP_is_zero_2)
823
case reflect.Int32 : p.add(_OP_is_zero_4)
824
case reflect.Int64 : p.add(_OP_is_zero_8)
825
case reflect.Uint : p.add(_OP_is_zero_ints())
826
case reflect.Uint8 : p.add(_OP_is_zero_1)
827
case reflect.Uint16 : p.add(_OP_is_zero_2)
828
case reflect.Uint32 : p.add(_OP_is_zero_4)
829
case reflect.Uint64 : p.add(_OP_is_zero_8)
830
case reflect.Uintptr : p.add(_OP_is_nil)
831
case reflect.Float32 : p.add(_OP_is_zero_4)
832
case reflect.Float64 : p.add(_OP_is_zero_8)
833
case reflect.String : p.add(_OP_is_nil_p1)
834
case reflect.Interface : p.add(_OP_is_nil_p1)
835
case reflect.Map : p.add(_OP_is_zero_map)
836
case reflect.Ptr : p.add(_OP_is_nil)
837
case reflect.Slice : p.add(_OP_is_nil_p1)
838
default : panic(error_type(vt))
842
func (self *_Compiler) compileStructFieldQuoted(p *_Program, sp int, vt reflect.Type) {
844
self.compileOne(p, sp, vt, self.pv)
848
func (self *_Compiler) compileInterface(p *_Program, vt reflect.Type) {
852
/* iface and efaces are different */
853
if vt.NumMethod() == 0 {
859
/* the "null" value */
867
func (self *_Compiler) compileMarshaler(p *_Program, op _Op, vt reflect.Type, mt reflect.Type) {
871
/* direct receiver */
872
if vk != reflect.Ptr {
877
/* value receiver with a pointer type, check for nil before calling the marshaler */