2
* Copyright 2021 ByteDance Inc.
4
* Licensed under the Apache License, Version 2.0 (the "License");
5
* you may not use this file except in compliance with the License.
6
* You may obtain a copy of the License at
8
* http://www.apache.org/licenses/LICENSE-2.0
10
* Unless required by applicable law or agreed to in writing, software
11
* distributed under the License is distributed on an "AS IS" BASIS,
12
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
* See the License for the specific language governing permissions and
14
* limitations under the License.
28
`github.com/bytedance/sonic/internal/caching`
29
`github.com/bytedance/sonic/internal/resolver`
30
`github.com/bytedance/sonic/internal/rt`
31
`github.com/bytedance/sonic/option`
37
_OP_any _Op = iota + 1
106
_INT_SIZE = 32 << (^uint(0) >> 63)
107
_PTR_SIZE = 32 << (^uintptr(0) >> 63)
108
_PTR_BYTE = unsafe.Sizeof(uintptr(0))
112
_MAX_ILBUF = 100000 // cutoff at 100k of IL instructions
113
_MAX_FIELDS = 50 // cutoff at 50 fields struct
116
var _OpNames = [256]string {
133
_OP_unquote : "unquote",
139
_OP_is_null : "is_null",
140
_OP_is_null_quote : "is_null_quote",
141
_OP_map_init : "map_init",
142
_OP_map_key_i8 : "map_key_i8",
143
_OP_map_key_i16 : "map_key_i16",
144
_OP_map_key_i32 : "map_key_i32",
145
_OP_map_key_i64 : "map_key_i64",
146
_OP_map_key_u8 : "map_key_u8",
147
_OP_map_key_u16 : "map_key_u16",
148
_OP_map_key_u32 : "map_key_u32",
149
_OP_map_key_u64 : "map_key_u64",
150
_OP_map_key_f32 : "map_key_f32",
151
_OP_map_key_f64 : "map_key_f64",
152
_OP_map_key_str : "map_key_str",
153
_OP_map_key_utext : "map_key_utext",
154
_OP_map_key_utext_p : "map_key_utext_p",
155
_OP_array_skip : "array_skip",
156
_OP_slice_init : "slice_init",
157
_OP_slice_append : "slice_append",
158
_OP_object_skip : "object_skip",
159
_OP_object_next : "object_next",
160
_OP_struct_field : "struct_field",
161
_OP_unmarshal : "unmarshal",
162
_OP_unmarshal_p : "unmarshal_p",
163
_OP_unmarshal_text : "unmarshal_text",
164
_OP_unmarshal_text_p : "unmarshal_text_p",
165
_OP_lspace : "lspace",
166
_OP_match_char : "match_char",
167
_OP_check_char : "check_char",
171
_OP_drop_2 : "drop_2",
172
_OP_recurse : "recurse",
174
_OP_switch : "switch",
175
_OP_check_char_0 : "check_char_0",
176
_OP_dismatch_err : "dismatch_err",
178
_OP_go_skip : "go_skip",
179
_OP_check_empty : "check_empty",
183
func (self _Op) String() string {
184
if ret := _OpNames[self]; ret != "" {
193
case 32: return _OP_i32
194
case 64: return _OP_i64
195
default: panic("unsupported int size")
201
case 32: return _OP_u32
202
case 64: return _OP_u64
203
default: panic("unsupported uint size")
207
func _OP_uintptr() _Op {
209
case 32: return _OP_u32
210
case 64: return _OP_u64
211
default: panic("unsupported pointer size")
215
func _OP_map_key_int() _Op {
217
case 32: return _OP_map_key_i32
218
case 64: return _OP_map_key_i64
219
default: panic("unsupported int size")
223
func _OP_map_key_uint() _Op {
225
case 32: return _OP_map_key_u32
226
case 64: return _OP_map_key_u64
227
default: panic("unsupported uint size")
231
func _OP_map_key_uintptr() _Op {
233
case 32: return _OP_map_key_u32
234
case 64: return _OP_map_key_u64
235
default: panic("unsupported pointer size")
240
u uint64 // union {op: 8, vb: 8, vi: 48}, iv maybe int or len([]int)
241
p unsafe.Pointer // maybe GoSlice.Data, *GoType or *caching.FieldMap
244
func packOp(op _Op) uint64 {
245
return uint64(op) << 56
248
func newInsOp(op _Op) _Instr {
249
return _Instr{u: packOp(op)}
252
func newInsVi(op _Op, vi int) _Instr {
253
return _Instr{u: packOp(op) | rt.PackInt(vi)}
256
func newInsVb(op _Op, vb byte) _Instr {
257
return _Instr{u: packOp(op) | (uint64(vb) << 48)}
260
func newInsVs(op _Op, vs []int) _Instr {
262
u: packOp(op) | rt.PackInt(len(vs)),
263
p: (*rt.GoSlice)(unsafe.Pointer(&vs)).Ptr,
267
func newInsVt(op _Op, vt reflect.Type) _Instr {
270
p: unsafe.Pointer(rt.UnpackType(vt)),
274
func newInsVf(op _Op, vf *caching.FieldMap) _Instr {
277
p: unsafe.Pointer(vf),
281
func (self _Instr) op() _Op {
282
return _Op(self.u >> 56)
285
func (self _Instr) vi() int {
286
return rt.UnpackInt(self.u)
289
func (self _Instr) vb() byte {
290
return byte(self.u >> 48)
293
func (self _Instr) vs() (v []int) {
294
(*rt.GoSlice)(unsafe.Pointer(&v)).Ptr = self.p
295
(*rt.GoSlice)(unsafe.Pointer(&v)).Cap = self.vi()
296
(*rt.GoSlice)(unsafe.Pointer(&v)).Len = self.vi()
300
func (self _Instr) vf() *caching.FieldMap {
301
return (*caching.FieldMap)(self.p)
304
func (self _Instr) vk() reflect.Kind {
305
return (*rt.GoType)(self.p).Kind()
308
func (self _Instr) vt() reflect.Type {
309
return (*rt.GoType)(self.p).Pack()
312
func (self _Instr) i64() int64 {
313
return int64(self.vi())
316
func (self _Instr) vlen() int {
317
return int((*rt.GoType)(self.p).Size)
320
func (self _Instr) isBranch() bool {
322
case _OP_goto : fallthrough
323
case _OP_switch : fallthrough
324
case _OP_is_null : fallthrough
325
case _OP_is_null_quote : fallthrough
326
case _OP_check_char : return true
327
default : return false
331
func (self _Instr) disassemble() string {
333
case _OP_dyn : fallthrough
334
case _OP_deref : fallthrough
335
case _OP_map_key_i8 : fallthrough
336
case _OP_map_key_i16 : fallthrough
337
case _OP_map_key_i32 : fallthrough
338
case _OP_map_key_i64 : fallthrough
339
case _OP_map_key_u8 : fallthrough
340
case _OP_map_key_u16 : fallthrough
341
case _OP_map_key_u32 : fallthrough
342
case _OP_map_key_u64 : fallthrough
343
case _OP_map_key_f32 : fallthrough
344
case _OP_map_key_f64 : fallthrough
345
case _OP_map_key_str : fallthrough
346
case _OP_map_key_utext : fallthrough
347
case _OP_map_key_utext_p : fallthrough
348
case _OP_slice_init : fallthrough
349
case _OP_slice_append : fallthrough
350
case _OP_unmarshal : fallthrough
351
case _OP_unmarshal_p : fallthrough
352
case _OP_unmarshal_text : fallthrough
353
case _OP_unmarshal_text_p : fallthrough
354
case _OP_recurse : return fmt.Sprintf("%-18s%s", self.op(), self.vt())
355
case _OP_goto : fallthrough
356
case _OP_is_null_quote : fallthrough
357
case _OP_is_null : return fmt.Sprintf("%-18sL_%d", self.op(), self.vi())
358
case _OP_index : fallthrough
359
case _OP_array_clear : fallthrough
360
case _OP_array_clear_p : return fmt.Sprintf("%-18s%d", self.op(), self.vi())
361
case _OP_switch : return fmt.Sprintf("%-18s%s", self.op(), self.formatSwitchLabels())
362
case _OP_struct_field : return fmt.Sprintf("%-18s%s", self.op(), self.formatStructFields())
363
case _OP_match_char : return fmt.Sprintf("%-18s%s", self.op(), strconv.QuoteRune(rune(self.vb())))
364
case _OP_check_char : return fmt.Sprintf("%-18sL_%d, %s", self.op(), self.vi(), strconv.QuoteRune(rune(self.vb())))
365
default : return self.op().String()
369
func (self _Instr) formatSwitchLabels() string {
374
/* format each label */
375
for i, v = range self.vs() {
376
m = append(m, fmt.Sprintf("%d=L_%d", i, v))
379
/* join them with "," */
380
return strings.Join(m, ", ")
383
func (self _Instr) formatStructFields() string {
386
var m []struct{i int; n string}
388
/* extract all the fields */
389
for i = 0; i < self.vf().N; i++ {
390
if v := self.vf().At(i); v.Hash != 0 {
391
m = append(m, struct{i int; n string}{i: v.ID, n: v.Name})
395
/* sort by field name */
396
sort.Slice(m, func(i, j int) bool {
397
return m[i].n < m[j].n
400
/* format each field */
401
for _, v := range m {
402
r = append(r, fmt.Sprintf("%s=%d", v.n, v.i))
405
/* join them with "," */
406
return strings.Join(r, ", ")
413
func (self _Program) pc() int {
417
func (self _Program) tag(n int) {
419
panic("type nesting too deep")
423
func (self _Program) pin(i int) {
425
v.u &= 0xffff000000000000
426
v.u |= rt.PackInt(self.pc())
429
func (self _Program) rel(v []int) {
430
for _, i := range v {
435
func (self *_Program) add(op _Op) {
436
*self = append(*self, newInsOp(op))
439
func (self *_Program) int(op _Op, vi int) {
440
*self = append(*self, newInsVi(op, vi))
443
func (self *_Program) chr(op _Op, vb byte) {
444
*self = append(*self, newInsVb(op, vb))
447
func (self *_Program) tab(op _Op, vs []int) {
448
*self = append(*self, newInsVs(op, vs))
451
func (self *_Program) rtt(op _Op, vt reflect.Type) {
452
*self = append(*self, newInsVt(op, vt))
455
func (self *_Program) fmv(op _Op, vf *caching.FieldMap) {
456
*self = append(*self, newInsVf(op, vf))
459
func (self _Program) disassemble() string {
461
tab := make([]bool, nb + 1)
462
ret := make([]string, 0, nb + 1)
464
/* prescan to get all the labels */
465
for _, ins := range self {
467
if ins.op() != _OP_switch {
470
for _, v := range ins.vs() {
477
/* disassemble each instruction */
478
for i, ins := range self {
480
ret = append(ret, "\t" + ins.disassemble())
482
ret = append(ret, fmt.Sprintf("L_%d:\n\t%s", i, ins.disassemble()))
486
/* add the last label, if needed */
488
ret = append(ret, fmt.Sprintf("L_%d:", nb))
491
/* add an "end" indicator, and join all the strings */
492
return strings.Join(append(ret, "\tend"), "\n")
495
type _Compiler struct {
496
opts option.CompileOptions
497
tab map[reflect.Type]bool
498
rec map[reflect.Type]bool
501
func newCompiler() *_Compiler {
503
opts: option.DefaultCompileOptions(),
504
tab: map[reflect.Type]bool{},
505
rec: map[reflect.Type]bool{},
509
func (self *_Compiler) apply(opts option.CompileOptions) *_Compiler {
514
func (self *_Compiler) rescue(ep *error) {
515
if val := recover(); val != nil {
516
if err, ok := val.(error); ok {
524
func (self *_Compiler) compile(vt reflect.Type) (ret _Program, err error) {
525
defer self.rescue(&err)
526
self.compileOne(&ret, 0, vt)
530
func (self *_Compiler) checkMarshaler(p *_Program, vt reflect.Type) bool {
531
pt := reflect.PtrTo(vt)
533
/* check for `json.Unmarshaler` with pointer receiver */
534
if pt.Implements(jsonUnmarshalerType) {
535
p.rtt(_OP_unmarshal_p, pt)
539
/* check for `json.Unmarshaler` */
540
if vt.Implements(jsonUnmarshalerType) {
542
self.compileUnmarshalJson(p, vt)
546
/* check for `encoding.TextMarshaler` with pointer receiver */
547
if pt.Implements(encodingTextUnmarshalerType) {
549
self.compileUnmarshalTextPtr(p, pt)
553
/* check for `encoding.TextUnmarshaler` */
554
if vt.Implements(encodingTextUnmarshalerType) {
556
self.compileUnmarshalText(p, vt)
562
func (self *_Compiler) compileOne(p *_Program, sp int, vt reflect.Type) {
563
/* check for recursive nesting */
566
p.rtt(_OP_recurse, vt)
570
if self.checkMarshaler(p, vt) {
574
/* enter the recursion */
577
self.compileOps(p, sp, vt)
581
func (self *_Compiler) compileOps(p *_Program, sp int, vt reflect.Type) {
583
case reflect.Bool : self.compilePrimitive (vt, p, _OP_bool)
584
case reflect.Int : self.compilePrimitive (vt, p, _OP_int())
585
case reflect.Int8 : self.compilePrimitive (vt, p, _OP_i8)
586
case reflect.Int16 : self.compilePrimitive (vt, p, _OP_i16)
587
case reflect.Int32 : self.compilePrimitive (vt, p, _OP_i32)
588
case reflect.Int64 : self.compilePrimitive (vt, p, _OP_i64)
589
case reflect.Uint : self.compilePrimitive (vt, p, _OP_uint())
590
case reflect.Uint8 : self.compilePrimitive (vt, p, _OP_u8)
591
case reflect.Uint16 : self.compilePrimitive (vt, p, _OP_u16)
592
case reflect.Uint32 : self.compilePrimitive (vt, p, _OP_u32)
593
case reflect.Uint64 : self.compilePrimitive (vt, p, _OP_u64)
594
case reflect.Uintptr : self.compilePrimitive (vt, p, _OP_uintptr())
595
case reflect.Float32 : self.compilePrimitive (vt, p, _OP_f32)
596
case reflect.Float64 : self.compilePrimitive (vt, p, _OP_f64)
597
case reflect.String : self.compileString (p, vt)
598
case reflect.Array : self.compileArray (p, sp, vt)
599
case reflect.Interface : self.compileInterface (p, vt)
600
case reflect.Map : self.compileMap (p, sp, vt)
601
case reflect.Ptr : self.compilePtr (p, sp, vt)
602
case reflect.Slice : self.compileSlice (p, sp, vt)
603
case reflect.Struct : self.compileStruct (p, sp, vt)
604
default : panic (&json.UnmarshalTypeError{Type: vt})
608
func (self *_Compiler) compileMap(p *_Program, sp int, vt reflect.Type) {
609
if reflect.PtrTo(vt.Key()).Implements(encodingTextUnmarshalerType) {
610
self.compileMapOp(p, sp, vt, _OP_map_key_utext_p)
611
} else if vt.Key().Implements(encodingTextUnmarshalerType) {
612
self.compileMapOp(p, sp, vt, _OP_map_key_utext)
614
self.compileMapUt(p, sp, vt)
618
func (self *_Compiler) compileMapUt(p *_Program, sp int, vt reflect.Type) {
619
switch vt.Key().Kind() {
620
case reflect.Int : self.compileMapOp(p, sp, vt, _OP_map_key_int())
621
case reflect.Int8 : self.compileMapOp(p, sp, vt, _OP_map_key_i8)
622
case reflect.Int16 : self.compileMapOp(p, sp, vt, _OP_map_key_i16)
623
case reflect.Int32 : self.compileMapOp(p, sp, vt, _OP_map_key_i32)
624
case reflect.Int64 : self.compileMapOp(p, sp, vt, _OP_map_key_i64)
625
case reflect.Uint : self.compileMapOp(p, sp, vt, _OP_map_key_uint())
626
case reflect.Uint8 : self.compileMapOp(p, sp, vt, _OP_map_key_u8)
627
case reflect.Uint16 : self.compileMapOp(p, sp, vt, _OP_map_key_u16)
628
case reflect.Uint32 : self.compileMapOp(p, sp, vt, _OP_map_key_u32)
629
case reflect.Uint64 : self.compileMapOp(p, sp, vt, _OP_map_key_u64)
630
case reflect.Uintptr : self.compileMapOp(p, sp, vt, _OP_map_key_uintptr())
631
case reflect.Float32 : self.compileMapOp(p, sp, vt, _OP_map_key_f32)
632
case reflect.Float64 : self.compileMapOp(p, sp, vt, _OP_map_key_f64)
633
case reflect.String : self.compileMapOp(p, sp, vt, _OP_map_key_str)
634
default : panic(&json.UnmarshalTypeError{Type: vt})
638
func (self *_Compiler) compileMapOp(p *_Program, sp int, vt reflect.Type, op _Op) {
642
skip := self.checkIfSkip(p, vt, '{')
648
p.chr(_OP_check_char, '}')
649
p.chr(_OP_match_char, '"')
653
/* match the value separator */
655
p.chr(_OP_match_char, ':')
656
self.compileOne(p, sp + 2, vt.Elem())
662
p.chr(_OP_check_char, '}')
663
p.chr(_OP_match_char, ',')
665
p.chr(_OP_match_char, '"')
669
/* match the value separator */
671
p.chr(_OP_match_char, ':')
672
self.compileOne(p, sp + 2, vt.Elem())
687
func (self *_Compiler) compilePtr(p *_Program, sp int, et reflect.Type) {
691
/* dereference all the way down */
692
for et.Kind() == reflect.Ptr {
693
if self.checkMarshaler(p, et) {
700
/* check for recursive nesting */
703
p.rtt(_OP_recurse, et)
705
/* enter the recursion */
709
/* not inline the pointer type
710
* recursing the defined pointer type's elem will casue issue379.
712
self.compileOps(p, sp, et)
719
// set val pointer as nil
727
func (self *_Compiler) compileArray(p *_Program, sp int, vt reflect.Type) {
731
skip := self.checkIfSkip(p, vt, '[')
736
p.chr(_OP_check_char, ']')
738
/* decode every item */
739
for i := 1; i <= vt.Len(); i++ {
740
self.compileOne(p, sp + 1, vt.Elem())
742
p.int(_OP_index, i * int(vt.Elem().Size()))
744
v = append(v, p.pc())
745
p.chr(_OP_check_char, ']')
746
p.chr(_OP_match_char, ',')
749
/* drop rest of the array */
750
p.add(_OP_array_skip)
755
/* check for pointer data */
756
if rt.UnpackType(vt.Elem()).PtrData == 0 {
757
p.int(_OP_array_clear, int(vt.Size()))
759
p.int(_OP_array_clear_p, int(vt.Size()))
762
/* restore the stack */
770
func (self *_Compiler) compileSlice(p *_Program, sp int, vt reflect.Type) {
771
if vt.Elem().Kind() == byteType.Kind() {
772
self.compileSliceBin(p, sp, vt)
774
self.compileSliceList(p, sp, vt)
778
func (self *_Compiler) compileSliceBin(p *_Program, sp int, vt reflect.Type) {
782
p.chr(_OP_check_char, '[')
783
skip := self.checkIfSkip(p, vt, '"')
785
p.chr(_OP_check_char, '"')
790
self.compileSliceBody(p, sp, vt.Elem())
801
func (self *_Compiler) compileSliceList(p *_Program, sp int, vt reflect.Type) {
805
skip := self.checkIfSkip(p, vt, '[')
806
self.compileSliceBody(p, sp, vt.Elem())
815
func (self *_Compiler) compileSliceBody(p *_Program, sp int, et reflect.Type) {
818
p.chr(_OP_check_empty, ']')
819
p.rtt(_OP_slice_init, et)
821
p.rtt(_OP_slice_append, et)
822
self.compileOne(p, sp + 1, et)
827
p.chr(_OP_check_char, ']')
828
p.chr(_OP_match_char, ',')
829
p.rtt(_OP_slice_append, et)
830
self.compileOne(p, sp + 1, et)
838
func (self *_Compiler) compileString(p *_Program, vt reflect.Type) {
839
if vt == jsonNumberType {
840
self.compilePrimitive(vt, p, _OP_num)
842
self.compileStringBody(vt, p)
846
func (self *_Compiler) compileStringBody(vt reflect.Type, p *_Program) {
849
skip := self.checkIfSkip(p, vt, '"')
855
func (self *_Compiler) compileStruct(p *_Program, sp int, vt reflect.Type) {
856
if sp >= self.opts.MaxInlineDepth || p.pc() >= _MAX_ILBUF || (sp > 0 && vt.NumField() >= _MAX_FIELDS) {
857
p.rtt(_OP_recurse, vt)
858
if self.opts.RecursiveDepth > 0 {
862
self.compileStructBody(p, sp, vt)
866
func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) {
867
fv := resolver.ResolveStruct(vt)
868
fm, sw := caching.CreateFieldMap(len(fv)), make([]int, len(fv))
870
/* start of object */
875
skip := self.checkIfSkip(p, vt, '{')
880
p.chr(_OP_check_char, '}')
881
p.chr(_OP_match_char, '"')
882
p.fmv(_OP_struct_field, fm)
884
p.chr(_OP_match_char, ':')
885
p.tab(_OP_switch, sw)
886
p.add(_OP_object_next)
890
p.chr(_OP_check_char, '}')
891
p.chr(_OP_match_char, ',')
893
/* special case of an empty struct */
895
p.add(_OP_object_skip)
899
/* match the remaining fields */
901
p.chr(_OP_match_char, '"')
902
p.fmv(_OP_struct_field, fm)
904
p.chr(_OP_match_char, ':')
905
p.tab(_OP_switch, sw)
906
p.add(_OP_object_next)
909
/* process each field */
910
for i, f := range fv {
914
/* index to the field */
915
for _, o := range f.Path {
916
if p.int(_OP_index, int(o.Size)); o.Kind == resolver.F_deref {
917
p.rtt(_OP_deref, o.Type)
921
/* check for "stringnize" option */
922
if (f.Opts & resolver.F_stringize) == 0 {
923
self.compileOne(p, sp + 1, f.Type)
925
self.compileStructFieldStr(p, sp + 1, f.Type)
928
/* load the state, and try next field */
941
func (self *_Compiler) compileStructFieldStr(p *_Program, sp int, vt reflect.Type) {
946
/* dereference the pointer if needed */
947
if ft.Kind() == reflect.Ptr {
951
/* check if it can be stringized */
953
case reflect.Bool : sv = true
954
case reflect.Int : sv = true
955
case reflect.Int8 : sv = true
956
case reflect.Int16 : sv = true
957
case reflect.Int32 : sv = true
958
case reflect.Int64 : sv = true
959
case reflect.Uint : sv = true
960
case reflect.Uint8 : sv = true
961
case reflect.Uint16 : sv = true
962
case reflect.Uint32 : sv = true
963
case reflect.Uint64 : sv = true
964
case reflect.Uintptr : sv = true
965
case reflect.Float32 : sv = true
966
case reflect.Float64 : sv = true
967
case reflect.String : sv = true
970
/* if it's not, ignore the "string" and follow the regular path */
972
self.compileOne(p, sp, vt)
976
/* remove the leading space, and match the leading quote */
982
skip := self.checkIfSkip(p, stringType, '"')
984
/* also check for inner "null" */
986
p.add(_OP_is_null_quote)
988
/* dereference the pointer only when it is not null */
989
if vk == reflect.Ptr {
995
p.chr(_OP_check_char_0, '"')
997
/* string opcode selector */
998
_OP_string := func() _Op {
999
if ft == jsonNumberType {
1006
/* compile for each type */
1008
case reflect.Bool : p.add(_OP_bool)
1009
case reflect.Int : p.add(_OP_int())
1010
case reflect.Int8 : p.add(_OP_i8)
1011
case reflect.Int16 : p.add(_OP_i16)
1012
case reflect.Int32 : p.add(_OP_i32)
1013
case reflect.Int64 : p.add(_OP_i64)
1014
case reflect.Uint : p.add(_OP_uint())
1015
case reflect.Uint8 : p.add(_OP_u8)
1016
case reflect.Uint16 : p.add(_OP_u16)
1017
case reflect.Uint32 : p.add(_OP_u32)
1018
case reflect.Uint64 : p.add(_OP_u64)
1019
case reflect.Uintptr : p.add(_OP_uintptr())
1020
case reflect.Float32 : p.add(_OP_f32)
1021
case reflect.Float64 : p.add(_OP_f64)
1022
case reflect.String : p.add(_OP_string())
1023
default : panic("not reachable")
1026
/* the closing quote is not needed when parsing a pure string */
1027
if vt == jsonNumberType || vt.Kind() != reflect.String {
1028
p.chr(_OP_match_char, '"')
1031
/* pin the `is_null_quote` jump location */
1032
if n1 != -1 && vk != reflect.Ptr {
1036
/* "null" but not a pointer, act as if the field is not present */
1037
if vk != reflect.Ptr {
1041
p.rtt(_OP_dismatch_err, vt)
1048
/* the "null" case of the pointer */
1051
p.pin(n0) // `is_null` jump location
1052
p.pin(n1) // `is_null_quote` jump location
1057
p.rtt(_OP_dismatch_err, vt)
1064
func (self *_Compiler) compileInterface(p *_Program, vt reflect.Type) {
1068
/* check for empty interface */
1069
if vt.NumMethod() == 0 {
1075
/* finish the OpCode */
1083
func (self *_Compiler) compilePrimitive(vt reflect.Type, p *_Program, op _Op) {
1086
// skip := self.checkPrimitive(p, vt)
1092
func (self *_Compiler) compileUnmarshalEnd(p *_Program, vt reflect.Type, i int) {
1097
if k != reflect.Ptr {
1102
/* it seems that in Go JSON library, "null" takes priority over any kind of unmarshaler */
1109
func (self *_Compiler) compileUnmarshalJson(p *_Program, vt reflect.Type) {
1114
/* check for dynamic interface */
1115
if vt.Kind() == reflect.Interface {
1119
/* call the unmarshaler */
1121
self.compileUnmarshalEnd(p, vt, i)
1124
func (self *_Compiler) compileUnmarshalText(p *_Program, vt reflect.Type) {
1126
v := _OP_unmarshal_text
1129
/* check for dynamic interface */
1130
if vt.Kind() == reflect.Interface {
1133
p.chr(_OP_match_char, '"')
1136
/* call the unmarshaler */
1138
self.compileUnmarshalEnd(p, vt, i)
1141
func (self *_Compiler) compileUnmarshalTextPtr(p *_Program, vt reflect.Type) {
1144
p.chr(_OP_match_char, '"')
1145
p.rtt(_OP_unmarshal_text_p, vt)
1149
func (self *_Compiler) checkIfSkip(p *_Program, vt reflect.Type, c byte) int {
1151
p.chr(_OP_check_char_0, c)
1152
p.rtt(_OP_dismatch_err, vt)