github.com/bytedance/sonic@v1.11.7-0.20240517092252-d2edb31b167b/internal/decoder/compiler.go (about) 1 /* 2 * Copyright 2021 ByteDance Inc. 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package decoder 18 19 import ( 20 `encoding/json` 21 `fmt` 22 `reflect` 23 `sort` 24 `strconv` 25 `strings` 26 `unsafe` 27 28 `github.com/bytedance/sonic/internal/caching` 29 `github.com/bytedance/sonic/internal/resolver` 30 `github.com/bytedance/sonic/internal/rt` 31 `github.com/bytedance/sonic/option` 32 ) 33 34 type _Op uint8 35 36 const ( 37 _OP_any _Op = iota + 1 38 _OP_dyn 39 _OP_str 40 _OP_bin 41 _OP_bool 42 _OP_num 43 _OP_i8 44 _OP_i16 45 _OP_i32 46 _OP_i64 47 _OP_u8 48 _OP_u16 49 _OP_u32 50 _OP_u64 51 _OP_f32 52 _OP_f64 53 _OP_unquote 54 _OP_nil_1 55 _OP_nil_2 56 _OP_nil_3 57 _OP_deref 58 _OP_index 59 _OP_is_null 60 _OP_is_null_quote 61 _OP_map_init 62 _OP_map_key_i8 63 _OP_map_key_i16 64 _OP_map_key_i32 65 _OP_map_key_i64 66 _OP_map_key_u8 67 _OP_map_key_u16 68 _OP_map_key_u32 69 _OP_map_key_u64 70 _OP_map_key_f32 71 _OP_map_key_f64 72 _OP_map_key_str 73 _OP_map_key_utext 74 _OP_map_key_utext_p 75 _OP_array_skip 76 _OP_array_clear 77 _OP_array_clear_p 78 _OP_slice_init 79 _OP_slice_append 80 _OP_object_skip 81 _OP_object_next 82 _OP_struct_field 83 _OP_unmarshal 84 _OP_unmarshal_p 85 _OP_unmarshal_text 86 _OP_unmarshal_text_p 87 _OP_lspace 88 _OP_match_char 89 _OP_check_char 90 _OP_load 91 _OP_save 92 _OP_drop 93 _OP_drop_2 94 _OP_recurse 95 _OP_goto 96 _OP_switch 97 _OP_check_char_0 98 _OP_dismatch_err 99 _OP_go_skip 100 _OP_add 101 _OP_check_empty 102 _OP_debug 103 ) 104 105 const ( 106 _INT_SIZE = 32 << (^uint(0) >> 63) 107 _PTR_SIZE = 32 << (^uintptr(0) >> 63) 108 _PTR_BYTE = unsafe.Sizeof(uintptr(0)) 109 ) 110 111 const ( 112 _MAX_ILBUF = 100000 // cutoff at 100k of IL instructions 113 _MAX_FIELDS = 50 // cutoff at 50 fields struct 114 ) 115 116 var _OpNames = [256]string { 117 _OP_any : "any", 118 _OP_dyn : "dyn", 119 _OP_str : "str", 120 _OP_bin : "bin", 121 _OP_bool : "bool", 122 _OP_num : "num", 123 _OP_i8 : "i8", 124 _OP_i16 : "i16", 125 _OP_i32 : "i32", 126 _OP_i64 : "i64", 127 _OP_u8 : "u8", 128 _OP_u16 : "u16", 129 _OP_u32 : "u32", 130 _OP_u64 : "u64", 131 _OP_f32 : "f32", 132 _OP_f64 : "f64", 133 _OP_unquote : "unquote", 134 _OP_nil_1 : "nil_1", 135 _OP_nil_2 : "nil_2", 136 _OP_nil_3 : "nil_3", 137 _OP_deref : "deref", 138 _OP_index : "index", 139 _OP_is_null : "is_null", 140 _OP_is_null_quote : "is_null_quote", 141 _OP_map_init : "map_init", 142 _OP_map_key_i8 : "map_key_i8", 143 _OP_map_key_i16 : "map_key_i16", 144 _OP_map_key_i32 : "map_key_i32", 145 _OP_map_key_i64 : "map_key_i64", 146 _OP_map_key_u8 : "map_key_u8", 147 _OP_map_key_u16 : "map_key_u16", 148 _OP_map_key_u32 : "map_key_u32", 149 _OP_map_key_u64 : "map_key_u64", 150 _OP_map_key_f32 : "map_key_f32", 151 _OP_map_key_f64 : "map_key_f64", 152 _OP_map_key_str : "map_key_str", 153 _OP_map_key_utext : "map_key_utext", 154 _OP_map_key_utext_p : "map_key_utext_p", 155 _OP_array_skip : "array_skip", 156 _OP_slice_init : "slice_init", 157 _OP_slice_append : "slice_append", 158 _OP_object_skip : "object_skip", 159 _OP_object_next : "object_next", 160 _OP_struct_field : "struct_field", 161 _OP_unmarshal : "unmarshal", 162 _OP_unmarshal_p : "unmarshal_p", 163 _OP_unmarshal_text : "unmarshal_text", 164 _OP_unmarshal_text_p : "unmarshal_text_p", 165 _OP_lspace : "lspace", 166 _OP_match_char : "match_char", 167 _OP_check_char : "check_char", 168 _OP_load : "load", 169 _OP_save : "save", 170 _OP_drop : "drop", 171 _OP_drop_2 : "drop_2", 172 _OP_recurse : "recurse", 173 _OP_goto : "goto", 174 _OP_switch : "switch", 175 _OP_check_char_0 : "check_char_0", 176 _OP_dismatch_err : "dismatch_err", 177 _OP_add : "add", 178 _OP_go_skip : "go_skip", 179 _OP_check_empty : "check_empty", 180 _OP_debug : "debug", 181 } 182 183 func (self _Op) String() string { 184 if ret := _OpNames[self]; ret != "" { 185 return ret 186 } else { 187 return "<invalid>" 188 } 189 } 190 191 func _OP_int() _Op { 192 switch _INT_SIZE { 193 case 32: return _OP_i32 194 case 64: return _OP_i64 195 default: panic("unsupported int size") 196 } 197 } 198 199 func _OP_uint() _Op { 200 switch _INT_SIZE { 201 case 32: return _OP_u32 202 case 64: return _OP_u64 203 default: panic("unsupported uint size") 204 } 205 } 206 207 func _OP_uintptr() _Op { 208 switch _PTR_SIZE { 209 case 32: return _OP_u32 210 case 64: return _OP_u64 211 default: panic("unsupported pointer size") 212 } 213 } 214 215 func _OP_map_key_int() _Op { 216 switch _INT_SIZE { 217 case 32: return _OP_map_key_i32 218 case 64: return _OP_map_key_i64 219 default: panic("unsupported int size") 220 } 221 } 222 223 func _OP_map_key_uint() _Op { 224 switch _INT_SIZE { 225 case 32: return _OP_map_key_u32 226 case 64: return _OP_map_key_u64 227 default: panic("unsupported uint size") 228 } 229 } 230 231 func _OP_map_key_uintptr() _Op { 232 switch _PTR_SIZE { 233 case 32: return _OP_map_key_u32 234 case 64: return _OP_map_key_u64 235 default: panic("unsupported pointer size") 236 } 237 } 238 239 type _Instr struct { 240 u uint64 // union {op: 8, vb: 8, vi: 48}, iv maybe int or len([]int) 241 p unsafe.Pointer // maybe GoSlice.Data, *GoType or *caching.FieldMap 242 } 243 244 func packOp(op _Op) uint64 { 245 return uint64(op) << 56 246 } 247 248 func newInsOp(op _Op) _Instr { 249 return _Instr{u: packOp(op)} 250 } 251 252 func newInsVi(op _Op, vi int) _Instr { 253 return _Instr{u: packOp(op) | rt.PackInt(vi)} 254 } 255 256 func newInsVb(op _Op, vb byte) _Instr { 257 return _Instr{u: packOp(op) | (uint64(vb) << 48)} 258 } 259 260 func newInsVs(op _Op, vs []int) _Instr { 261 return _Instr { 262 u: packOp(op) | rt.PackInt(len(vs)), 263 p: (*rt.GoSlice)(unsafe.Pointer(&vs)).Ptr, 264 } 265 } 266 267 func newInsVt(op _Op, vt reflect.Type) _Instr { 268 return _Instr { 269 u: packOp(op), 270 p: unsafe.Pointer(rt.UnpackType(vt)), 271 } 272 } 273 274 func newInsVf(op _Op, vf *caching.FieldMap) _Instr { 275 return _Instr { 276 u: packOp(op), 277 p: unsafe.Pointer(vf), 278 } 279 } 280 281 func (self _Instr) op() _Op { 282 return _Op(self.u >> 56) 283 } 284 285 func (self _Instr) vi() int { 286 return rt.UnpackInt(self.u) 287 } 288 289 func (self _Instr) vb() byte { 290 return byte(self.u >> 48) 291 } 292 293 func (self _Instr) vs() (v []int) { 294 (*rt.GoSlice)(unsafe.Pointer(&v)).Ptr = self.p 295 (*rt.GoSlice)(unsafe.Pointer(&v)).Cap = self.vi() 296 (*rt.GoSlice)(unsafe.Pointer(&v)).Len = self.vi() 297 return 298 } 299 300 func (self _Instr) vf() *caching.FieldMap { 301 return (*caching.FieldMap)(self.p) 302 } 303 304 func (self _Instr) vk() reflect.Kind { 305 return (*rt.GoType)(self.p).Kind() 306 } 307 308 func (self _Instr) vt() reflect.Type { 309 return (*rt.GoType)(self.p).Pack() 310 } 311 312 func (self _Instr) i64() int64 { 313 return int64(self.vi()) 314 } 315 316 func (self _Instr) vlen() int { 317 return int((*rt.GoType)(self.p).Size) 318 } 319 320 func (self _Instr) isBranch() bool { 321 switch self.op() { 322 case _OP_goto : fallthrough 323 case _OP_switch : fallthrough 324 case _OP_is_null : fallthrough 325 case _OP_is_null_quote : fallthrough 326 case _OP_check_char : return true 327 default : return false 328 } 329 } 330 331 func (self _Instr) disassemble() string { 332 switch self.op() { 333 case _OP_dyn : fallthrough 334 case _OP_deref : fallthrough 335 case _OP_map_key_i8 : fallthrough 336 case _OP_map_key_i16 : fallthrough 337 case _OP_map_key_i32 : fallthrough 338 case _OP_map_key_i64 : fallthrough 339 case _OP_map_key_u8 : fallthrough 340 case _OP_map_key_u16 : fallthrough 341 case _OP_map_key_u32 : fallthrough 342 case _OP_map_key_u64 : fallthrough 343 case _OP_map_key_f32 : fallthrough 344 case _OP_map_key_f64 : fallthrough 345 case _OP_map_key_str : fallthrough 346 case _OP_map_key_utext : fallthrough 347 case _OP_map_key_utext_p : fallthrough 348 case _OP_slice_init : fallthrough 349 case _OP_slice_append : fallthrough 350 case _OP_unmarshal : fallthrough 351 case _OP_unmarshal_p : fallthrough 352 case _OP_unmarshal_text : fallthrough 353 case _OP_unmarshal_text_p : fallthrough 354 case _OP_recurse : return fmt.Sprintf("%-18s%s", self.op(), self.vt()) 355 case _OP_goto : fallthrough 356 case _OP_is_null_quote : fallthrough 357 case _OP_is_null : return fmt.Sprintf("%-18sL_%d", self.op(), self.vi()) 358 case _OP_index : fallthrough 359 case _OP_array_clear : fallthrough 360 case _OP_array_clear_p : return fmt.Sprintf("%-18s%d", self.op(), self.vi()) 361 case _OP_switch : return fmt.Sprintf("%-18s%s", self.op(), self.formatSwitchLabels()) 362 case _OP_struct_field : return fmt.Sprintf("%-18s%s", self.op(), self.formatStructFields()) 363 case _OP_match_char : return fmt.Sprintf("%-18s%s", self.op(), strconv.QuoteRune(rune(self.vb()))) 364 case _OP_check_char : return fmt.Sprintf("%-18sL_%d, %s", self.op(), self.vi(), strconv.QuoteRune(rune(self.vb()))) 365 default : return self.op().String() 366 } 367 } 368 369 func (self _Instr) formatSwitchLabels() string { 370 var i int 371 var v int 372 var m []string 373 374 /* format each label */ 375 for i, v = range self.vs() { 376 m = append(m, fmt.Sprintf("%d=L_%d", i, v)) 377 } 378 379 /* join them with "," */ 380 return strings.Join(m, ", ") 381 } 382 383 func (self _Instr) formatStructFields() string { 384 var i uint64 385 var r []string 386 var m []struct{i int; n string} 387 388 /* extract all the fields */ 389 for i = 0; i < self.vf().N; i++ { 390 if v := self.vf().At(i); v.Hash != 0 { 391 m = append(m, struct{i int; n string}{i: v.ID, n: v.Name}) 392 } 393 } 394 395 /* sort by field name */ 396 sort.Slice(m, func(i, j int) bool { 397 return m[i].n < m[j].n 398 }) 399 400 /* format each field */ 401 for _, v := range m { 402 r = append(r, fmt.Sprintf("%s=%d", v.n, v.i)) 403 } 404 405 /* join them with "," */ 406 return strings.Join(r, ", ") 407 } 408 409 type ( 410 _Program []_Instr 411 ) 412 413 func (self _Program) pc() int { 414 return len(self) 415 } 416 417 func (self _Program) tag(n int) { 418 if n >= _MaxStack { 419 panic("type nesting too deep") 420 } 421 } 422 423 func (self _Program) pin(i int) { 424 v := &self[i] 425 v.u &= 0xffff000000000000 426 v.u |= rt.PackInt(self.pc()) 427 } 428 429 func (self _Program) rel(v []int) { 430 for _, i := range v { 431 self.pin(i) 432 } 433 } 434 435 func (self *_Program) add(op _Op) { 436 *self = append(*self, newInsOp(op)) 437 } 438 439 func (self *_Program) int(op _Op, vi int) { 440 *self = append(*self, newInsVi(op, vi)) 441 } 442 443 func (self *_Program) chr(op _Op, vb byte) { 444 *self = append(*self, newInsVb(op, vb)) 445 } 446 447 func (self *_Program) tab(op _Op, vs []int) { 448 *self = append(*self, newInsVs(op, vs)) 449 } 450 451 func (self *_Program) rtt(op _Op, vt reflect.Type) { 452 *self = append(*self, newInsVt(op, vt)) 453 } 454 455 func (self *_Program) fmv(op _Op, vf *caching.FieldMap) { 456 *self = append(*self, newInsVf(op, vf)) 457 } 458 459 func (self _Program) disassemble() string { 460 nb := len(self) 461 tab := make([]bool, nb + 1) 462 ret := make([]string, 0, nb + 1) 463 464 /* prescan to get all the labels */ 465 for _, ins := range self { 466 if ins.isBranch() { 467 if ins.op() != _OP_switch { 468 tab[ins.vi()] = true 469 } else { 470 for _, v := range ins.vs() { 471 tab[v] = true 472 } 473 } 474 } 475 } 476 477 /* disassemble each instruction */ 478 for i, ins := range self { 479 if !tab[i] { 480 ret = append(ret, "\t" + ins.disassemble()) 481 } else { 482 ret = append(ret, fmt.Sprintf("L_%d:\n\t%s", i, ins.disassemble())) 483 } 484 } 485 486 /* add the last label, if needed */ 487 if tab[nb] { 488 ret = append(ret, fmt.Sprintf("L_%d:", nb)) 489 } 490 491 /* add an "end" indicator, and join all the strings */ 492 return strings.Join(append(ret, "\tend"), "\n") 493 } 494 495 type _Compiler struct { 496 opts option.CompileOptions 497 tab map[reflect.Type]bool 498 rec map[reflect.Type]bool 499 } 500 501 func newCompiler() *_Compiler { 502 return &_Compiler { 503 opts: option.DefaultCompileOptions(), 504 tab: map[reflect.Type]bool{}, 505 rec: map[reflect.Type]bool{}, 506 } 507 } 508 509 func (self *_Compiler) apply(opts option.CompileOptions) *_Compiler { 510 self.opts = opts 511 return self 512 } 513 514 func (self *_Compiler) rescue(ep *error) { 515 if val := recover(); val != nil { 516 if err, ok := val.(error); ok { 517 *ep = err 518 } else { 519 panic(val) 520 } 521 } 522 } 523 524 func (self *_Compiler) compile(vt reflect.Type) (ret _Program, err error) { 525 defer self.rescue(&err) 526 self.compileOne(&ret, 0, vt) 527 return 528 } 529 530 func (self *_Compiler) checkMarshaler(p *_Program, vt reflect.Type) bool { 531 pt := reflect.PtrTo(vt) 532 533 /* check for `json.Unmarshaler` with pointer receiver */ 534 if pt.Implements(jsonUnmarshalerType) { 535 p.rtt(_OP_unmarshal_p, pt) 536 return true 537 } 538 539 /* check for `json.Unmarshaler` */ 540 if vt.Implements(jsonUnmarshalerType) { 541 p.add(_OP_lspace) 542 self.compileUnmarshalJson(p, vt) 543 return true 544 } 545 546 /* check for `encoding.TextMarshaler` with pointer receiver */ 547 if pt.Implements(encodingTextUnmarshalerType) { 548 p.add(_OP_lspace) 549 self.compileUnmarshalTextPtr(p, pt) 550 return true 551 } 552 553 /* check for `encoding.TextUnmarshaler` */ 554 if vt.Implements(encodingTextUnmarshalerType) { 555 p.add(_OP_lspace) 556 self.compileUnmarshalText(p, vt) 557 return true 558 } 559 return false 560 } 561 562 func (self *_Compiler) compileOne(p *_Program, sp int, vt reflect.Type) { 563 /* check for recursive nesting */ 564 ok := self.tab[vt] 565 if ok { 566 p.rtt(_OP_recurse, vt) 567 return 568 } 569 570 if self.checkMarshaler(p, vt) { 571 return 572 } 573 574 /* enter the recursion */ 575 p.add(_OP_lspace) 576 self.tab[vt] = true 577 self.compileOps(p, sp, vt) 578 delete(self.tab, vt) 579 } 580 581 func (self *_Compiler) compileOps(p *_Program, sp int, vt reflect.Type) { 582 switch vt.Kind() { 583 case reflect.Bool : self.compilePrimitive (vt, p, _OP_bool) 584 case reflect.Int : self.compilePrimitive (vt, p, _OP_int()) 585 case reflect.Int8 : self.compilePrimitive (vt, p, _OP_i8) 586 case reflect.Int16 : self.compilePrimitive (vt, p, _OP_i16) 587 case reflect.Int32 : self.compilePrimitive (vt, p, _OP_i32) 588 case reflect.Int64 : self.compilePrimitive (vt, p, _OP_i64) 589 case reflect.Uint : self.compilePrimitive (vt, p, _OP_uint()) 590 case reflect.Uint8 : self.compilePrimitive (vt, p, _OP_u8) 591 case reflect.Uint16 : self.compilePrimitive (vt, p, _OP_u16) 592 case reflect.Uint32 : self.compilePrimitive (vt, p, _OP_u32) 593 case reflect.Uint64 : self.compilePrimitive (vt, p, _OP_u64) 594 case reflect.Uintptr : self.compilePrimitive (vt, p, _OP_uintptr()) 595 case reflect.Float32 : self.compilePrimitive (vt, p, _OP_f32) 596 case reflect.Float64 : self.compilePrimitive (vt, p, _OP_f64) 597 case reflect.String : self.compileString (p, vt) 598 case reflect.Array : self.compileArray (p, sp, vt) 599 case reflect.Interface : self.compileInterface (p, vt) 600 case reflect.Map : self.compileMap (p, sp, vt) 601 case reflect.Ptr : self.compilePtr (p, sp, vt) 602 case reflect.Slice : self.compileSlice (p, sp, vt) 603 case reflect.Struct : self.compileStruct (p, sp, vt) 604 default : panic (&json.UnmarshalTypeError{Type: vt}) 605 } 606 } 607 608 func (self *_Compiler) compileMap(p *_Program, sp int, vt reflect.Type) { 609 if reflect.PtrTo(vt.Key()).Implements(encodingTextUnmarshalerType) { 610 self.compileMapOp(p, sp, vt, _OP_map_key_utext_p) 611 } else if vt.Key().Implements(encodingTextUnmarshalerType) { 612 self.compileMapOp(p, sp, vt, _OP_map_key_utext) 613 } else { 614 self.compileMapUt(p, sp, vt) 615 } 616 } 617 618 func (self *_Compiler) compileMapUt(p *_Program, sp int, vt reflect.Type) { 619 switch vt.Key().Kind() { 620 case reflect.Int : self.compileMapOp(p, sp, vt, _OP_map_key_int()) 621 case reflect.Int8 : self.compileMapOp(p, sp, vt, _OP_map_key_i8) 622 case reflect.Int16 : self.compileMapOp(p, sp, vt, _OP_map_key_i16) 623 case reflect.Int32 : self.compileMapOp(p, sp, vt, _OP_map_key_i32) 624 case reflect.Int64 : self.compileMapOp(p, sp, vt, _OP_map_key_i64) 625 case reflect.Uint : self.compileMapOp(p, sp, vt, _OP_map_key_uint()) 626 case reflect.Uint8 : self.compileMapOp(p, sp, vt, _OP_map_key_u8) 627 case reflect.Uint16 : self.compileMapOp(p, sp, vt, _OP_map_key_u16) 628 case reflect.Uint32 : self.compileMapOp(p, sp, vt, _OP_map_key_u32) 629 case reflect.Uint64 : self.compileMapOp(p, sp, vt, _OP_map_key_u64) 630 case reflect.Uintptr : self.compileMapOp(p, sp, vt, _OP_map_key_uintptr()) 631 case reflect.Float32 : self.compileMapOp(p, sp, vt, _OP_map_key_f32) 632 case reflect.Float64 : self.compileMapOp(p, sp, vt, _OP_map_key_f64) 633 case reflect.String : self.compileMapOp(p, sp, vt, _OP_map_key_str) 634 default : panic(&json.UnmarshalTypeError{Type: vt}) 635 } 636 } 637 638 func (self *_Compiler) compileMapOp(p *_Program, sp int, vt reflect.Type, op _Op) { 639 i := p.pc() 640 p.add(_OP_is_null) 641 p.tag(sp + 1) 642 skip := self.checkIfSkip(p, vt, '{') 643 p.add(_OP_save) 644 p.add(_OP_map_init) 645 p.add(_OP_save) 646 p.add(_OP_lspace) 647 j := p.pc() 648 p.chr(_OP_check_char, '}') 649 p.chr(_OP_match_char, '"') 650 skip2 := p.pc() 651 p.rtt(op, vt) 652 653 /* match the value separator */ 654 p.add(_OP_lspace) 655 p.chr(_OP_match_char, ':') 656 self.compileOne(p, sp + 2, vt.Elem()) 657 p.pin(skip2) 658 p.add(_OP_load) 659 k0 := p.pc() 660 p.add(_OP_lspace) 661 k1 := p.pc() 662 p.chr(_OP_check_char, '}') 663 p.chr(_OP_match_char, ',') 664 p.add(_OP_lspace) 665 p.chr(_OP_match_char, '"') 666 skip3 := p.pc() 667 p.rtt(op, vt) 668 669 /* match the value separator */ 670 p.add(_OP_lspace) 671 p.chr(_OP_match_char, ':') 672 self.compileOne(p, sp + 2, vt.Elem()) 673 p.pin(skip3) 674 p.add(_OP_load) 675 p.int(_OP_goto, k0) 676 p.pin(j) 677 p.pin(k1) 678 p.add(_OP_drop_2) 679 x := p.pc() 680 p.add(_OP_goto) 681 p.pin(i) 682 p.add(_OP_nil_1) 683 p.pin(skip) 684 p.pin(x) 685 } 686 687 func (self *_Compiler) compilePtr(p *_Program, sp int, et reflect.Type) { 688 i := p.pc() 689 p.add(_OP_is_null) 690 691 /* dereference all the way down */ 692 for et.Kind() == reflect.Ptr { 693 if self.checkMarshaler(p, et) { 694 return 695 } 696 et = et.Elem() 697 p.rtt(_OP_deref, et) 698 } 699 700 /* check for recursive nesting */ 701 ok := self.tab[et] 702 if ok { 703 p.rtt(_OP_recurse, et) 704 } else { 705 /* enter the recursion */ 706 p.add(_OP_lspace) 707 self.tab[et] = true 708 709 /* not inline the pointer type 710 * recursing the defined pointer type's elem will casue issue379. 711 */ 712 self.compileOps(p, sp, et) 713 } 714 delete(self.tab, et) 715 716 j := p.pc() 717 p.add(_OP_goto) 718 719 // set val pointer as nil 720 p.pin(i) 721 p.add(_OP_nil_1) 722 723 // nothing todo 724 p.pin(j) 725 } 726 727 func (self *_Compiler) compileArray(p *_Program, sp int, vt reflect.Type) { 728 x := p.pc() 729 p.add(_OP_is_null) 730 p.tag(sp) 731 skip := self.checkIfSkip(p, vt, '[') 732 733 p.add(_OP_save) 734 p.add(_OP_lspace) 735 v := []int{p.pc()} 736 p.chr(_OP_check_char, ']') 737 738 /* decode every item */ 739 for i := 1; i <= vt.Len(); i++ { 740 self.compileOne(p, sp + 1, vt.Elem()) 741 p.add(_OP_load) 742 p.int(_OP_index, i * int(vt.Elem().Size())) 743 p.add(_OP_lspace) 744 v = append(v, p.pc()) 745 p.chr(_OP_check_char, ']') 746 p.chr(_OP_match_char, ',') 747 } 748 749 /* drop rest of the array */ 750 p.add(_OP_array_skip) 751 w := p.pc() 752 p.add(_OP_goto) 753 p.rel(v) 754 755 /* check for pointer data */ 756 if rt.UnpackType(vt.Elem()).PtrData == 0 { 757 p.int(_OP_array_clear, int(vt.Size())) 758 } else { 759 p.int(_OP_array_clear_p, int(vt.Size())) 760 } 761 762 /* restore the stack */ 763 p.pin(w) 764 p.add(_OP_drop) 765 766 p.pin(skip) 767 p.pin(x) 768 } 769 770 func (self *_Compiler) compileSlice(p *_Program, sp int, vt reflect.Type) { 771 if vt.Elem().Kind() == byteType.Kind() { 772 self.compileSliceBin(p, sp, vt) 773 } else { 774 self.compileSliceList(p, sp, vt) 775 } 776 } 777 778 func (self *_Compiler) compileSliceBin(p *_Program, sp int, vt reflect.Type) { 779 i := p.pc() 780 p.add(_OP_is_null) 781 j := p.pc() 782 p.chr(_OP_check_char, '[') 783 skip := self.checkIfSkip(p, vt, '"') 784 k := p.pc() 785 p.chr(_OP_check_char, '"') 786 p.add(_OP_bin) 787 x := p.pc() 788 p.add(_OP_goto) 789 p.pin(j) 790 self.compileSliceBody(p, sp, vt.Elem()) 791 y := p.pc() 792 p.add(_OP_goto) 793 p.pin(i) 794 p.pin(k) 795 p.add(_OP_nil_3) 796 p.pin(x) 797 p.pin(skip) 798 p.pin(y) 799 } 800 801 func (self *_Compiler) compileSliceList(p *_Program, sp int, vt reflect.Type) { 802 i := p.pc() 803 p.add(_OP_is_null) 804 p.tag(sp) 805 skip := self.checkIfSkip(p, vt, '[') 806 self.compileSliceBody(p, sp, vt.Elem()) 807 x := p.pc() 808 p.add(_OP_goto) 809 p.pin(i) 810 p.add(_OP_nil_3) 811 p.pin(x) 812 p.pin(skip) 813 } 814 815 func (self *_Compiler) compileSliceBody(p *_Program, sp int, et reflect.Type) { 816 p.add(_OP_lspace) 817 j := p.pc() 818 p.chr(_OP_check_empty, ']') 819 p.rtt(_OP_slice_init, et) 820 p.add(_OP_save) 821 p.rtt(_OP_slice_append, et) 822 self.compileOne(p, sp + 1, et) 823 p.add(_OP_load) 824 k0 := p.pc() 825 p.add(_OP_lspace) 826 k1 := p.pc() 827 p.chr(_OP_check_char, ']') 828 p.chr(_OP_match_char, ',') 829 p.rtt(_OP_slice_append, et) 830 self.compileOne(p, sp + 1, et) 831 p.add(_OP_load) 832 p.int(_OP_goto, k0) 833 p.pin(k1) 834 p.add(_OP_drop) 835 p.pin(j) 836 } 837 838 func (self *_Compiler) compileString(p *_Program, vt reflect.Type) { 839 if vt == jsonNumberType { 840 self.compilePrimitive(vt, p, _OP_num) 841 } else { 842 self.compileStringBody(vt, p) 843 } 844 } 845 846 func (self *_Compiler) compileStringBody(vt reflect.Type, p *_Program) { 847 i := p.pc() 848 p.add(_OP_is_null) 849 skip := self.checkIfSkip(p, vt, '"') 850 p.add(_OP_str) 851 p.pin(i) 852 p.pin(skip) 853 } 854 855 func (self *_Compiler) compileStruct(p *_Program, sp int, vt reflect.Type) { 856 if sp >= self.opts.MaxInlineDepth || p.pc() >= _MAX_ILBUF || (sp > 0 && vt.NumField() >= _MAX_FIELDS) { 857 p.rtt(_OP_recurse, vt) 858 if self.opts.RecursiveDepth > 0 { 859 self.rec[vt] = true 860 } 861 } else { 862 self.compileStructBody(p, sp, vt) 863 } 864 } 865 866 func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) { 867 fv := resolver.ResolveStruct(vt) 868 fm, sw := caching.CreateFieldMap(len(fv)), make([]int, len(fv)) 869 870 /* start of object */ 871 p.tag(sp) 872 n := p.pc() 873 p.add(_OP_is_null) 874 875 skip := self.checkIfSkip(p, vt, '{') 876 877 p.add(_OP_save) 878 p.add(_OP_lspace) 879 x := p.pc() 880 p.chr(_OP_check_char, '}') 881 p.chr(_OP_match_char, '"') 882 p.fmv(_OP_struct_field, fm) 883 p.add(_OP_lspace) 884 p.chr(_OP_match_char, ':') 885 p.tab(_OP_switch, sw) 886 p.add(_OP_object_next) 887 y0 := p.pc() 888 p.add(_OP_lspace) 889 y1 := p.pc() 890 p.chr(_OP_check_char, '}') 891 p.chr(_OP_match_char, ',') 892 893 /* special case of an empty struct */ 894 if len(fv) == 0 { 895 p.add(_OP_object_skip) 896 goto end_of_object 897 } 898 899 /* match the remaining fields */ 900 p.add(_OP_lspace) 901 p.chr(_OP_match_char, '"') 902 p.fmv(_OP_struct_field, fm) 903 p.add(_OP_lspace) 904 p.chr(_OP_match_char, ':') 905 p.tab(_OP_switch, sw) 906 p.add(_OP_object_next) 907 p.int(_OP_goto, y0) 908 909 /* process each field */ 910 for i, f := range fv { 911 sw[i] = p.pc() 912 fm.Set(f.Name, i) 913 914 /* index to the field */ 915 for _, o := range f.Path { 916 if p.int(_OP_index, int(o.Size)); o.Kind == resolver.F_deref { 917 p.rtt(_OP_deref, o.Type) 918 } 919 } 920 921 /* check for "stringnize" option */ 922 if (f.Opts & resolver.F_stringize) == 0 { 923 self.compileOne(p, sp + 1, f.Type) 924 } else { 925 self.compileStructFieldStr(p, sp + 1, f.Type) 926 } 927 928 /* load the state, and try next field */ 929 p.add(_OP_load) 930 p.int(_OP_goto, y0) 931 } 932 933 end_of_object: 934 p.pin(x) 935 p.pin(y1) 936 p.add(_OP_drop) 937 p.pin(n) 938 p.pin(skip) 939 } 940 941 func (self *_Compiler) compileStructFieldStr(p *_Program, sp int, vt reflect.Type) { 942 n1 := -1 943 ft := vt 944 sv := false 945 946 /* dereference the pointer if needed */ 947 if ft.Kind() == reflect.Ptr { 948 ft = ft.Elem() 949 } 950 951 /* check if it can be stringized */ 952 switch ft.Kind() { 953 case reflect.Bool : sv = true 954 case reflect.Int : sv = true 955 case reflect.Int8 : sv = true 956 case reflect.Int16 : sv = true 957 case reflect.Int32 : sv = true 958 case reflect.Int64 : sv = true 959 case reflect.Uint : sv = true 960 case reflect.Uint8 : sv = true 961 case reflect.Uint16 : sv = true 962 case reflect.Uint32 : sv = true 963 case reflect.Uint64 : sv = true 964 case reflect.Uintptr : sv = true 965 case reflect.Float32 : sv = true 966 case reflect.Float64 : sv = true 967 case reflect.String : sv = true 968 } 969 970 /* if it's not, ignore the "string" and follow the regular path */ 971 if !sv { 972 self.compileOne(p, sp, vt) 973 return 974 } 975 976 /* remove the leading space, and match the leading quote */ 977 vk := vt.Kind() 978 p.add(_OP_lspace) 979 n0 := p.pc() 980 p.add(_OP_is_null) 981 982 skip := self.checkIfSkip(p, stringType, '"') 983 984 /* also check for inner "null" */ 985 n1 = p.pc() 986 p.add(_OP_is_null_quote) 987 988 /* dereference the pointer only when it is not null */ 989 if vk == reflect.Ptr { 990 vt = vt.Elem() 991 p.rtt(_OP_deref, vt) 992 } 993 994 n2 := p.pc() 995 p.chr(_OP_check_char_0, '"') 996 997 /* string opcode selector */ 998 _OP_string := func() _Op { 999 if ft == jsonNumberType { 1000 return _OP_num 1001 } else { 1002 return _OP_unquote 1003 } 1004 } 1005 1006 /* compile for each type */ 1007 switch vt.Kind() { 1008 case reflect.Bool : p.add(_OP_bool) 1009 case reflect.Int : p.add(_OP_int()) 1010 case reflect.Int8 : p.add(_OP_i8) 1011 case reflect.Int16 : p.add(_OP_i16) 1012 case reflect.Int32 : p.add(_OP_i32) 1013 case reflect.Int64 : p.add(_OP_i64) 1014 case reflect.Uint : p.add(_OP_uint()) 1015 case reflect.Uint8 : p.add(_OP_u8) 1016 case reflect.Uint16 : p.add(_OP_u16) 1017 case reflect.Uint32 : p.add(_OP_u32) 1018 case reflect.Uint64 : p.add(_OP_u64) 1019 case reflect.Uintptr : p.add(_OP_uintptr()) 1020 case reflect.Float32 : p.add(_OP_f32) 1021 case reflect.Float64 : p.add(_OP_f64) 1022 case reflect.String : p.add(_OP_string()) 1023 default : panic("not reachable") 1024 } 1025 1026 /* the closing quote is not needed when parsing a pure string */ 1027 if vt == jsonNumberType || vt.Kind() != reflect.String { 1028 p.chr(_OP_match_char, '"') 1029 } 1030 1031 /* pin the `is_null_quote` jump location */ 1032 if n1 != -1 && vk != reflect.Ptr { 1033 p.pin(n1) 1034 } 1035 1036 /* "null" but not a pointer, act as if the field is not present */ 1037 if vk != reflect.Ptr { 1038 pc2 := p.pc() 1039 p.add(_OP_goto) 1040 p.pin(n2) 1041 p.rtt(_OP_dismatch_err, vt) 1042 p.int(_OP_add, 1) 1043 p.pin(pc2) 1044 p.pin(n0) 1045 return 1046 } 1047 1048 /* the "null" case of the pointer */ 1049 pc := p.pc() 1050 p.add(_OP_goto) 1051 p.pin(n0) // `is_null` jump location 1052 p.pin(n1) // `is_null_quote` jump location 1053 p.add(_OP_nil_1) 1054 pc2 := p.pc() 1055 p.add(_OP_goto) 1056 p.pin(n2) 1057 p.rtt(_OP_dismatch_err, vt) 1058 p.int(_OP_add, 1) 1059 p.pin(pc) 1060 p.pin(pc2) 1061 p.pin(skip) 1062 } 1063 1064 func (self *_Compiler) compileInterface(p *_Program, vt reflect.Type) { 1065 i := p.pc() 1066 p.add(_OP_is_null) 1067 1068 /* check for empty interface */ 1069 if vt.NumMethod() == 0 { 1070 p.add(_OP_any) 1071 } else { 1072 p.rtt(_OP_dyn, vt) 1073 } 1074 1075 /* finish the OpCode */ 1076 j := p.pc() 1077 p.add(_OP_goto) 1078 p.pin(i) 1079 p.add(_OP_nil_2) 1080 p.pin(j) 1081 } 1082 1083 func (self *_Compiler) compilePrimitive(vt reflect.Type, p *_Program, op _Op) { 1084 i := p.pc() 1085 p.add(_OP_is_null) 1086 // skip := self.checkPrimitive(p, vt) 1087 p.add(op) 1088 p.pin(i) 1089 // p.pin(skip) 1090 } 1091 1092 func (self *_Compiler) compileUnmarshalEnd(p *_Program, vt reflect.Type, i int) { 1093 j := p.pc() 1094 k := vt.Kind() 1095 1096 /* not a pointer */ 1097 if k != reflect.Ptr { 1098 p.pin(i) 1099 return 1100 } 1101 1102 /* it seems that in Go JSON library, "null" takes priority over any kind of unmarshaler */ 1103 p.add(_OP_goto) 1104 p.pin(i) 1105 p.add(_OP_nil_1) 1106 p.pin(j) 1107 } 1108 1109 func (self *_Compiler) compileUnmarshalJson(p *_Program, vt reflect.Type) { 1110 i := p.pc() 1111 v := _OP_unmarshal 1112 p.add(_OP_is_null) 1113 1114 /* check for dynamic interface */ 1115 if vt.Kind() == reflect.Interface { 1116 v = _OP_dyn 1117 } 1118 1119 /* call the unmarshaler */ 1120 p.rtt(v, vt) 1121 self.compileUnmarshalEnd(p, vt, i) 1122 } 1123 1124 func (self *_Compiler) compileUnmarshalText(p *_Program, vt reflect.Type) { 1125 i := p.pc() 1126 v := _OP_unmarshal_text 1127 p.add(_OP_is_null) 1128 1129 /* check for dynamic interface */ 1130 if vt.Kind() == reflect.Interface { 1131 v = _OP_dyn 1132 } else { 1133 p.chr(_OP_match_char, '"') 1134 } 1135 1136 /* call the unmarshaler */ 1137 p.rtt(v, vt) 1138 self.compileUnmarshalEnd(p, vt, i) 1139 } 1140 1141 func (self *_Compiler) compileUnmarshalTextPtr(p *_Program, vt reflect.Type) { 1142 i := p.pc() 1143 p.add(_OP_is_null) 1144 p.chr(_OP_match_char, '"') 1145 p.rtt(_OP_unmarshal_text_p, vt) 1146 p.pin(i) 1147 } 1148 1149 func (self *_Compiler) checkIfSkip(p *_Program, vt reflect.Type, c byte) int { 1150 j := p.pc() 1151 p.chr(_OP_check_char_0, c) 1152 p.rtt(_OP_dismatch_err, vt) 1153 s := p.pc() 1154 p.add(_OP_go_skip) 1155 p.pin(j) 1156 p.int(_OP_add, 1) 1157 return s 1158 }