github.com/goshafaq/sonic@v0.0.0-20231026082336-871835fb94c6/internal/decoder/assembler_regabi_amd64.go (about)

     1  //go:build go1.17 && !go1.22
     2  // +build go1.17,!go1.22
     3  
     4  /*
     5   * Copyright 2021 ByteDance Inc.
     6   *
     7   * Licensed under the Apache License, Version 2.0 (the "License");
     8   * you may not use this file except in compliance with the License.
     9   * You may obtain a copy of the License at
    10   *
    11   *     http://www.apache.org/licenses/LICENSE-2.0
    12   *
    13   * Unless required by applicable law or agreed to in writing, software
    14   * distributed under the License is distributed on an "AS IS" BASIS,
    15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    16   * See the License for the specific language governing permissions and
    17   * limitations under the License.
    18   */
    19  
    20  package decoder
    21  
    22  import (
    23  	"encoding/json"
    24  	"fmt"
    25  	"math"
    26  	"reflect"
    27  	"unsafe"
    28  
    29  	"github.com/goshafaq/sonic/internal/caching"
    30  	"github.com/goshafaq/sonic/internal/jit"
    31  	"github.com/goshafaq/sonic/internal/native"
    32  	"github.com/goshafaq/sonic/internal/native/types"
    33  	"github.com/goshafaq/sonic/internal/rt"
    34  	"github.com/twitchyliquid64/golang-asm/obj"
    35  )
    36  
    37  /** Register Allocations
    38   *
    39   *  State Registers:
    40   *
    41   *      %r13 : stack base
    42   *      %r10 : input pointer
    43   *      %r12 : input length
    44   *      %r11 : input cursor
    45   *      %r15 : value pointer
    46   *
    47   *  Error Registers:
    48   *
    49   *      %rax : error type register
    50   *      %rbx : error pointer register
    51   */
    52  
    53  /** Function Prototype & Stack Map
    54   *
    55   *  func (s string, ic int, vp unsafe.Pointer, sb *_Stack, fv uint64, sv string) (rc int, err error)
    56   *
    57   *  s.buf  :   (FP)
    58   *  s.len  :  8(FP)
    59   *  ic     : 16(FP)
    60   *  vp     : 24(FP)
    61   *  sb     : 32(FP)
    62   *  fv     : 40(FP)
    63   *  sv     : 56(FP)
    64   *  err.vt : 72(FP)
    65   *  err.vp : 80(FP)
    66   */
    67  
    68  const (
    69  	_FP_args   = 72  // 72 bytes to pass and spill register arguements
    70  	_FP_fargs  = 80  // 80 bytes for passing arguments to other Go functions
    71  	_FP_saves  = 48  // 48 bytes for saving the registers before CALL instructions
    72  	_FP_locals = 144 // 144 bytes for local variables
    73  )
    74  
    75  const (
    76  	_FP_offs = _FP_fargs + _FP_saves + _FP_locals
    77  	_FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer
    78  	_FP_base = _FP_size + 8 // 8 bytes for the return address
    79  )
    80  
    81  const (
    82  	_IM_null = 0x6c6c756e // 'null'
    83  	_IM_true = 0x65757274 // 'true'
    84  	_IM_alse = 0x65736c61 // 'alse' ('false' without the 'f')
    85  )
    86  
    87  const (
    88  	_BM_space = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n')
    89  )
    90  
    91  const (
    92  	_MODE_JSON = 1 << 3 // base64 mode
    93  )
    94  
    95  const (
    96  	_LB_error           = "_error"
    97  	_LB_im_error        = "_im_error"
    98  	_LB_eof_error       = "_eof_error"
    99  	_LB_type_error      = "_type_error"
   100  	_LB_field_error     = "_field_error"
   101  	_LB_range_error     = "_range_error"
   102  	_LB_stack_error     = "_stack_error"
   103  	_LB_base64_error    = "_base64_error"
   104  	_LB_unquote_error   = "_unquote_error"
   105  	_LB_parsing_error   = "_parsing_error"
   106  	_LB_parsing_error_v = "_parsing_error_v"
   107  	_LB_mismatch_error  = "_mismatch_error"
   108  )
   109  
   110  const (
   111  	_LB_char_0_error  = "_char_0_error"
   112  	_LB_char_1_error  = "_char_1_error"
   113  	_LB_char_2_error  = "_char_2_error"
   114  	_LB_char_3_error  = "_char_3_error"
   115  	_LB_char_4_error  = "_char_4_error"
   116  	_LB_char_m2_error = "_char_m2_error"
   117  	_LB_char_m3_error = "_char_m3_error"
   118  )
   119  
   120  const (
   121  	_LB_skip_one       = "_skip_one"
   122  	_LB_skip_key_value = "_skip_key_value"
   123  )
   124  
   125  var (
   126  	_AX = jit.Reg("AX")
   127  	_BX = jit.Reg("BX")
   128  	_CX = jit.Reg("CX")
   129  	_DX = jit.Reg("DX")
   130  	_DI = jit.Reg("DI")
   131  	_SI = jit.Reg("SI")
   132  	_BP = jit.Reg("BP")
   133  	_SP = jit.Reg("SP")
   134  	_R8 = jit.Reg("R8")
   135  	_R9 = jit.Reg("R9")
   136  	_X0 = jit.Reg("X0")
   137  	_X1 = jit.Reg("X1")
   138  )
   139  
   140  var (
   141  	_IP = jit.Reg("R10") // saved on BP when callc
   142  	_IC = jit.Reg("R11") // saved on BX when call_c
   143  	_IL = jit.Reg("R12")
   144  	_ST = jit.Reg("R13")
   145  	_VP = jit.Reg("R15")
   146  )
   147  
   148  var (
   149  	_DF = jit.Reg("AX") // reuse AX in generic decoder for flags
   150  	_ET = jit.Reg("AX")
   151  	_EP = jit.Reg("BX")
   152  )
   153  
   154  var (
   155  	_ARG_s  = _ARG_sp
   156  	_ARG_sp = jit.Ptr(_SP, _FP_base+0)
   157  	_ARG_sl = jit.Ptr(_SP, _FP_base+8)
   158  	_ARG_ic = jit.Ptr(_SP, _FP_base+16)
   159  	_ARG_vp = jit.Ptr(_SP, _FP_base+24)
   160  	_ARG_sb = jit.Ptr(_SP, _FP_base+32)
   161  	_ARG_fv = jit.Ptr(_SP, _FP_base+40)
   162  )
   163  
   164  var (
   165  	_ARG_sv   = _ARG_sv_p
   166  	_ARG_sv_p = jit.Ptr(_SP, _FP_base+48)
   167  	_ARG_sv_n = jit.Ptr(_SP, _FP_base+56)
   168  	_ARG_vk   = jit.Ptr(_SP, _FP_base+64)
   169  )
   170  
   171  var (
   172  	_VAR_st = _VAR_st_Vt
   173  	_VAR_sr = jit.Ptr(_SP, _FP_fargs+_FP_saves)
   174  )
   175  
   176  var (
   177  	_VAR_st_Vt = jit.Ptr(_SP, _FP_fargs+_FP_saves+0)
   178  	_VAR_st_Dv = jit.Ptr(_SP, _FP_fargs+_FP_saves+8)
   179  	_VAR_st_Iv = jit.Ptr(_SP, _FP_fargs+_FP_saves+16)
   180  	_VAR_st_Ep = jit.Ptr(_SP, _FP_fargs+_FP_saves+24)
   181  	_VAR_st_Db = jit.Ptr(_SP, _FP_fargs+_FP_saves+32)
   182  	_VAR_st_Dc = jit.Ptr(_SP, _FP_fargs+_FP_saves+40)
   183  )
   184  
   185  var (
   186  	_VAR_ss_AX = jit.Ptr(_SP, _FP_fargs+_FP_saves+48)
   187  	_VAR_ss_CX = jit.Ptr(_SP, _FP_fargs+_FP_saves+56)
   188  	_VAR_ss_SI = jit.Ptr(_SP, _FP_fargs+_FP_saves+64)
   189  	_VAR_ss_R8 = jit.Ptr(_SP, _FP_fargs+_FP_saves+72)
   190  	_VAR_ss_R9 = jit.Ptr(_SP, _FP_fargs+_FP_saves+80)
   191  )
   192  
   193  var (
   194  	_VAR_bs_p  = jit.Ptr(_SP, _FP_fargs+_FP_saves+88)
   195  	_VAR_bs_n  = jit.Ptr(_SP, _FP_fargs+_FP_saves+96)
   196  	_VAR_bs_LR = jit.Ptr(_SP, _FP_fargs+_FP_saves+104)
   197  )
   198  
   199  var _VAR_fl = jit.Ptr(_SP, _FP_fargs+_FP_saves+112)
   200  
   201  var (
   202  	_VAR_et = jit.Ptr(_SP, _FP_fargs+_FP_saves+120) // save dismatched type
   203  	_VAR_pc = jit.Ptr(_SP, _FP_fargs+_FP_saves+128) // save skip return pc
   204  	_VAR_ic = jit.Ptr(_SP, _FP_fargs+_FP_saves+136) // save dismatched position
   205  )
   206  
   207  type _Assembler struct {
   208  	jit.BaseAssembler
   209  	p    _Program
   210  	name string
   211  }
   212  
   213  func newAssembler(p _Program) *_Assembler {
   214  	return new(_Assembler).Init(p)
   215  }
   216  
   217  /** Assembler Interface **/
   218  
   219  func (self *_Assembler) Load() _Decoder {
   220  	return ptodec(self.BaseAssembler.Load("decode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
   221  }
   222  
   223  func (self *_Assembler) Init(p _Program) *_Assembler {
   224  	self.p = p
   225  	self.BaseAssembler.Init(self.compile)
   226  	return self
   227  }
   228  
   229  func (self *_Assembler) compile() {
   230  	self.prologue()
   231  	self.instrs()
   232  	self.epilogue()
   233  	self.copy_string()
   234  	self.escape_string()
   235  	self.escape_string_twice()
   236  	self.skip_one()
   237  	self.skip_key_value()
   238  	self.type_error()
   239  	self.mismatch_error()
   240  	self.field_error()
   241  	self.range_error()
   242  	self.stack_error()
   243  	self.base64_error()
   244  	self.parsing_error()
   245  }
   246  
   247  /** Assembler Stages **/
   248  
   249  var _OpFuncTab = [256]func(*_Assembler, *_Instr){
   250  	_OP_any:              (*_Assembler)._asm_OP_any,
   251  	_OP_dyn:              (*_Assembler)._asm_OP_dyn,
   252  	_OP_str:              (*_Assembler)._asm_OP_str,
   253  	_OP_bin:              (*_Assembler)._asm_OP_bin,
   254  	_OP_bool:             (*_Assembler)._asm_OP_bool,
   255  	_OP_num:              (*_Assembler)._asm_OP_num,
   256  	_OP_i8:               (*_Assembler)._asm_OP_i8,
   257  	_OP_i16:              (*_Assembler)._asm_OP_i16,
   258  	_OP_i32:              (*_Assembler)._asm_OP_i32,
   259  	_OP_i64:              (*_Assembler)._asm_OP_i64,
   260  	_OP_u8:               (*_Assembler)._asm_OP_u8,
   261  	_OP_u16:              (*_Assembler)._asm_OP_u16,
   262  	_OP_u32:              (*_Assembler)._asm_OP_u32,
   263  	_OP_u64:              (*_Assembler)._asm_OP_u64,
   264  	_OP_f32:              (*_Assembler)._asm_OP_f32,
   265  	_OP_f64:              (*_Assembler)._asm_OP_f64,
   266  	_OP_unquote:          (*_Assembler)._asm_OP_unquote,
   267  	_OP_nil_1:            (*_Assembler)._asm_OP_nil_1,
   268  	_OP_nil_2:            (*_Assembler)._asm_OP_nil_2,
   269  	_OP_nil_3:            (*_Assembler)._asm_OP_nil_3,
   270  	_OP_deref:            (*_Assembler)._asm_OP_deref,
   271  	_OP_index:            (*_Assembler)._asm_OP_index,
   272  	_OP_is_null:          (*_Assembler)._asm_OP_is_null,
   273  	_OP_is_null_quote:    (*_Assembler)._asm_OP_is_null_quote,
   274  	_OP_map_init:         (*_Assembler)._asm_OP_map_init,
   275  	_OP_map_key_i8:       (*_Assembler)._asm_OP_map_key_i8,
   276  	_OP_map_key_i16:      (*_Assembler)._asm_OP_map_key_i16,
   277  	_OP_map_key_i32:      (*_Assembler)._asm_OP_map_key_i32,
   278  	_OP_map_key_i64:      (*_Assembler)._asm_OP_map_key_i64,
   279  	_OP_map_key_u8:       (*_Assembler)._asm_OP_map_key_u8,
   280  	_OP_map_key_u16:      (*_Assembler)._asm_OP_map_key_u16,
   281  	_OP_map_key_u32:      (*_Assembler)._asm_OP_map_key_u32,
   282  	_OP_map_key_u64:      (*_Assembler)._asm_OP_map_key_u64,
   283  	_OP_map_key_f32:      (*_Assembler)._asm_OP_map_key_f32,
   284  	_OP_map_key_f64:      (*_Assembler)._asm_OP_map_key_f64,
   285  	_OP_map_key_str:      (*_Assembler)._asm_OP_map_key_str,
   286  	_OP_map_key_utext:    (*_Assembler)._asm_OP_map_key_utext,
   287  	_OP_map_key_utext_p:  (*_Assembler)._asm_OP_map_key_utext_p,
   288  	_OP_array_skip:       (*_Assembler)._asm_OP_array_skip,
   289  	_OP_array_clear:      (*_Assembler)._asm_OP_array_clear,
   290  	_OP_array_clear_p:    (*_Assembler)._asm_OP_array_clear_p,
   291  	_OP_slice_init:       (*_Assembler)._asm_OP_slice_init,
   292  	_OP_slice_append:     (*_Assembler)._asm_OP_slice_append,
   293  	_OP_object_skip:      (*_Assembler)._asm_OP_object_skip,
   294  	_OP_object_next:      (*_Assembler)._asm_OP_object_next,
   295  	_OP_struct_field:     (*_Assembler)._asm_OP_struct_field,
   296  	_OP_unmarshal:        (*_Assembler)._asm_OP_unmarshal,
   297  	_OP_unmarshal_p:      (*_Assembler)._asm_OP_unmarshal_p,
   298  	_OP_unmarshal_text:   (*_Assembler)._asm_OP_unmarshal_text,
   299  	_OP_unmarshal_text_p: (*_Assembler)._asm_OP_unmarshal_text_p,
   300  	_OP_lspace:           (*_Assembler)._asm_OP_lspace,
   301  	_OP_match_char:       (*_Assembler)._asm_OP_match_char,
   302  	_OP_check_char:       (*_Assembler)._asm_OP_check_char,
   303  	_OP_load:             (*_Assembler)._asm_OP_load,
   304  	_OP_save:             (*_Assembler)._asm_OP_save,
   305  	_OP_drop:             (*_Assembler)._asm_OP_drop,
   306  	_OP_drop_2:           (*_Assembler)._asm_OP_drop_2,
   307  	_OP_recurse:          (*_Assembler)._asm_OP_recurse,
   308  	_OP_goto:             (*_Assembler)._asm_OP_goto,
   309  	_OP_switch:           (*_Assembler)._asm_OP_switch,
   310  	_OP_check_char_0:     (*_Assembler)._asm_OP_check_char_0,
   311  	_OP_dismatch_err:     (*_Assembler)._asm_OP_dismatch_err,
   312  	_OP_go_skip:          (*_Assembler)._asm_OP_go_skip,
   313  	_OP_add:              (*_Assembler)._asm_OP_add,
   314  	_OP_check_empty:      (*_Assembler)._asm_OP_check_empty,
   315  	_OP_debug:            (*_Assembler)._asm_OP_debug,
   316  }
   317  
   318  func (self *_Assembler) _asm_OP_debug(_ *_Instr) {
   319  	self.Byte(0xcc)
   320  }
   321  
   322  func (self *_Assembler) instr(v *_Instr) {
   323  	if fn := _OpFuncTab[v.op()]; fn != nil {
   324  		fn(self, v)
   325  	} else {
   326  		panic(fmt.Sprintf("invalid opcode: %d", v.op()))
   327  	}
   328  }
   329  
   330  func (self *_Assembler) instrs() {
   331  	for i, v := range self.p {
   332  		self.Mark(i)
   333  		self.instr(&v)
   334  		self.debug_instr(i, &v)
   335  	}
   336  }
   337  
   338  func (self *_Assembler) epilogue() {
   339  	self.Mark(len(self.p))
   340  	self.Emit("XORL", _EP, _EP)                    // XORL EP, EP
   341  	self.Emit("MOVQ", _VAR_et, _ET)                // MOVQ VAR_et, ET
   342  	self.Emit("TESTQ", _ET, _ET)                   // TESTQ ET, ET
   343  	self.Sjmp("JNZ", _LB_mismatch_error)           // JNZ _LB_mismatch_error
   344  	self.Link(_LB_error)                           // _error:
   345  	self.Emit("MOVQ", _EP, _CX)                    // MOVQ BX, CX
   346  	self.Emit("MOVQ", _ET, _BX)                    // MOVQ AX, BX
   347  	self.Emit("MOVQ", _IC, _AX)                    // MOVQ IC, AX
   348  	self.Emit("MOVQ", jit.Imm(0), _ARG_sp)         // MOVQ $0, sv.p<>+48(FP)
   349  	self.Emit("MOVQ", jit.Imm(0), _ARG_vp)         // MOVQ $0, sv.p<>+48(FP)
   350  	self.Emit("MOVQ", jit.Imm(0), _ARG_sv_p)       // MOVQ $0, sv.p<>+48(FP)
   351  	self.Emit("MOVQ", jit.Imm(0), _ARG_vk)         // MOVQ $0, vk<>+64(FP)
   352  	self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP
   353  	self.Emit("ADDQ", jit.Imm(_FP_size), _SP)      // ADDQ $_FP_size, SP
   354  	self.Emit("RET")                               // RET
   355  }
   356  
   357  func (self *_Assembler) prologue() {
   358  	self.Emit("SUBQ", jit.Imm(_FP_size), _SP)      // SUBQ $_FP_size, SP
   359  	self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP)
   360  	self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP
   361  	self.Emit("MOVQ", _AX, _ARG_sp)                // MOVQ AX, s.p<>+0(FP)
   362  	self.Emit("MOVQ", _AX, _IP)                    // MOVQ AX, IP
   363  	self.Emit("MOVQ", _BX, _ARG_sl)                // MOVQ BX, s.l<>+8(FP)
   364  	self.Emit("MOVQ", _BX, _IL)                    // MOVQ BX, IL
   365  	self.Emit("MOVQ", _CX, _ARG_ic)                // MOVQ CX, ic<>+16(FP)
   366  	self.Emit("MOVQ", _CX, _IC)                    // MOVQ CX, IC
   367  	self.Emit("MOVQ", _DI, _ARG_vp)                // MOVQ DI, vp<>+24(FP)
   368  	self.Emit("MOVQ", _DI, _VP)                    // MOVQ DI, VP
   369  	self.Emit("MOVQ", _SI, _ARG_sb)                // MOVQ SI, sb<>+32(FP)
   370  	self.Emit("MOVQ", _SI, _ST)                    // MOVQ SI, ST
   371  	self.Emit("MOVQ", _R8, _ARG_fv)                // MOVQ R8, fv<>+40(FP)
   372  	self.Emit("MOVQ", jit.Imm(0), _ARG_sv_p)       // MOVQ $0, sv.p<>+48(FP)
   373  	self.Emit("MOVQ", jit.Imm(0), _ARG_sv_n)       // MOVQ $0, sv.n<>+56(FP)
   374  	self.Emit("MOVQ", jit.Imm(0), _ARG_vk)         // MOVQ $0, vk<>+64(FP)
   375  	self.Emit("MOVQ", jit.Imm(0), _VAR_et)         // MOVQ $0, et<>+120(FP)
   376  	// initialize digital buffer first
   377  	self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_st_Dc) // MOVQ $_MaxDigitNums, ss.Dcap
   378  	self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX)     // LEAQ _DbufOffset(ST), AX
   379  	self.Emit("MOVQ", _AX, _VAR_st_Db)                    // MOVQ AX, ss.Dbuf
   380  }
   381  
   382  /** Function Calling Helpers **/
   383  
   384  var (
   385  	_REG_go = []obj.Addr{_ST, _VP, _IP, _IL, _IC}
   386  	_REG_rt = []obj.Addr{_ST, _VP, _IP, _IL, _IC, _IL}
   387  )
   388  
   389  func (self *_Assembler) save(r ...obj.Addr) {
   390  	for i, v := range r {
   391  		if i > _FP_saves/8-1 {
   392  			panic("too many registers to save")
   393  		} else {
   394  			self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs+int64(i)*8))
   395  		}
   396  	}
   397  }
   398  
   399  func (self *_Assembler) load(r ...obj.Addr) {
   400  	for i, v := range r {
   401  		if i > _FP_saves/8-1 {
   402  			panic("too many registers to load")
   403  		} else {
   404  			self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs+int64(i)*8), v)
   405  		}
   406  	}
   407  }
   408  
   409  func (self *_Assembler) call(fn obj.Addr) {
   410  	self.Emit("MOVQ", fn, _R9) // MOVQ ${fn}, R11
   411  	self.Rjmp("CALL", _R9)     // CALL R11
   412  }
   413  
   414  func (self *_Assembler) call_go(fn obj.Addr) {
   415  	self.save(_REG_go...) // SAVE $REG_go
   416  	self.call(fn)
   417  	self.load(_REG_go...) // LOAD $REG_go
   418  }
   419  
   420  func (self *_Assembler) callc(fn obj.Addr) {
   421  	self.save(_IP)
   422  	self.call(fn)
   423  	self.load(_IP)
   424  }
   425  
   426  func (self *_Assembler) call_c(fn obj.Addr) {
   427  	self.Emit("XCHGQ", _IC, _BX)
   428  	self.callc(fn)
   429  	self.Emit("XCHGQ", _IC, _BX)
   430  }
   431  
   432  func (self *_Assembler) call_sf(fn obj.Addr) {
   433  	self.Emit("LEAQ", _ARG_s, _DI)                   // LEAQ s<>+0(FP), DI
   434  	self.Emit("MOVQ", _IC, _ARG_ic)                  // MOVQ IC, ic<>+16(FP)
   435  	self.Emit("LEAQ", _ARG_ic, _SI)                  // LEAQ ic<>+16(FP), SI
   436  	self.Emit("LEAQ", jit.Ptr(_ST, _FsmOffset), _DX) // LEAQ _FsmOffset(ST), DX
   437  	self.Emit("MOVQ", _ARG_fv, _CX)
   438  	self.callc(fn)
   439  	self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC
   440  }
   441  
   442  func (self *_Assembler) call_vf(fn obj.Addr) {
   443  	self.Emit("LEAQ", _ARG_s, _DI)  // LEAQ s<>+0(FP), DI
   444  	self.Emit("MOVQ", _IC, _ARG_ic) // MOVQ IC, ic<>+16(FP)
   445  	self.Emit("LEAQ", _ARG_ic, _SI) // LEAQ ic<>+16(FP), SI
   446  	self.Emit("LEAQ", _VAR_st, _DX) // LEAQ st, DX
   447  	self.callc(fn)
   448  	self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC
   449  }
   450  
   451  /** Assembler Error Handlers **/
   452  
   453  var (
   454  	_F_convT64        = jit.Func(convT64)
   455  	_F_error_wrap     = jit.Func(error_wrap)
   456  	_F_error_type     = jit.Func(error_type)
   457  	_F_error_field    = jit.Func(error_field)
   458  	_F_error_value    = jit.Func(error_value)
   459  	_F_error_mismatch = jit.Func(error_mismatch)
   460  )
   461  
   462  var (
   463  	_I_int8, _T_int8       = rtype(reflect.TypeOf(int8(0)))
   464  	_I_int16, _T_int16     = rtype(reflect.TypeOf(int16(0)))
   465  	_I_int32, _T_int32     = rtype(reflect.TypeOf(int32(0)))
   466  	_I_uint8, _T_uint8     = rtype(reflect.TypeOf(uint8(0)))
   467  	_I_uint16, _T_uint16   = rtype(reflect.TypeOf(uint16(0)))
   468  	_I_uint32, _T_uint32   = rtype(reflect.TypeOf(uint32(0)))
   469  	_I_float32, _T_float32 = rtype(reflect.TypeOf(float32(0)))
   470  )
   471  
   472  var (
   473  	_T_error                    = rt.UnpackType(errorType)
   474  	_I_base64_CorruptInputError = jit.Itab(_T_error, base64CorruptInputError)
   475  )
   476  
   477  var (
   478  	_V_stackOverflow              = jit.Imm(int64(uintptr(unsafe.Pointer(&stackOverflow))))
   479  	_I_json_UnsupportedValueError = jit.Itab(_T_error, reflect.TypeOf(new(json.UnsupportedValueError)))
   480  	_I_json_MismatchTypeError     = jit.Itab(_T_error, reflect.TypeOf(new(MismatchTypeError)))
   481  )
   482  
   483  func (self *_Assembler) type_error() {
   484  	self.Link(_LB_type_error)   // _type_error:
   485  	self.call_go(_F_error_type) // CALL_GO error_type
   486  	self.Sjmp("JMP", _LB_error) // JMP     _error
   487  }
   488  
   489  func (self *_Assembler) mismatch_error() {
   490  	self.Link(_LB_mismatch_error)                     // _type_error:
   491  	self.Emit("MOVQ", _VAR_et, _ET)                   // MOVQ _VAR_et, ET
   492  	self.Emit("MOVQ", _VAR_ic, _EP)                   // MOVQ _VAR_ic, EP
   493  	self.Emit("MOVQ", _I_json_MismatchTypeError, _CX) // MOVQ _I_json_MismatchType, CX
   494  	self.Emit("CMPQ", _ET, _CX)                       // CMPQ ET, CX
   495  	self.Sjmp("JE", _LB_error)                        // JE _LB_error
   496  	self.Emit("MOVQ", _ARG_sp, _AX)
   497  	self.Emit("MOVQ", _ARG_sl, _BX)
   498  	self.Emit("MOVQ", _VAR_ic, _CX)
   499  	self.Emit("MOVQ", _VAR_et, _DI)
   500  	self.call_go(_F_error_mismatch) // CALL_GO error_type
   501  	self.Sjmp("JMP", _LB_error)     // JMP     _error
   502  }
   503  
   504  func (self *_Assembler) field_error() {
   505  	self.Link(_LB_field_error)        // _field_error:
   506  	self.Emit("MOVQ", _ARG_sv_p, _AX) // MOVQ   sv.p, AX
   507  	self.Emit("MOVQ", _ARG_sv_n, _BX) // MOVQ   sv.n, BX
   508  	self.call_go(_F_error_field)      // CALL_GO error_field
   509  	self.Sjmp("JMP", _LB_error)       // JMP     _error
   510  }
   511  
   512  func (self *_Assembler) range_error() {
   513  	self.Link(_LB_range_error)     // _range_error:
   514  	self.Emit("MOVQ", _ET, _CX)    // MOVQ    ET, CX
   515  	self.slice_from(_VAR_st_Ep, 0) // SLICE   st.Ep, $0
   516  	self.Emit("MOVQ", _DI, _AX)    // MOVQ    DI, AX
   517  	self.Emit("MOVQ", _EP, _DI)    // MOVQ    EP, DI
   518  	self.Emit("MOVQ", _SI, _BX)    // MOVQ    SI, BX
   519  	self.call_go(_F_error_value)   // CALL_GO error_value
   520  	self.Sjmp("JMP", _LB_error)    // JMP     _error
   521  }
   522  
   523  func (self *_Assembler) stack_error() {
   524  	self.Link(_LB_stack_error)                            // _stack_error:
   525  	self.Emit("MOVQ", _V_stackOverflow, _EP)              // MOVQ ${_V_stackOverflow}, EP
   526  	self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ ${_I_json_UnsupportedValueError}, ET
   527  	self.Sjmp("JMP", _LB_error)                           // JMP  _error
   528  }
   529  
   530  func (self *_Assembler) base64_error() {
   531  	self.Link(_LB_base64_error)
   532  	self.Emit("NEGQ", _AX)                              // NEGQ    AX
   533  	self.Emit("SUBQ", jit.Imm(1), _AX)                  // SUBQ    $1, AX
   534  	self.call_go(_F_convT64)                            // CALL_GO convT64
   535  	self.Emit("MOVQ", _AX, _EP)                         // MOVQ    AX, EP
   536  	self.Emit("MOVQ", _I_base64_CorruptInputError, _ET) // MOVQ    ${itab(base64.CorruptInputError)}, ET
   537  	self.Sjmp("JMP", _LB_error)                         // JMP     _error
   538  }
   539  
   540  func (self *_Assembler) parsing_error() {
   541  	self.Link(_LB_eof_error)                                       // _eof_error:
   542  	self.Emit("MOVQ", _IL, _IC)                                    // MOVQ    IL, IC
   543  	self.Emit("MOVL", jit.Imm(int64(types.ERR_EOF)), _EP)          // MOVL    ${types.ERR_EOF}, EP
   544  	self.Sjmp("JMP", _LB_parsing_error)                            // JMP     _parsing_error
   545  	self.Link(_LB_unquote_error)                                   // _unquote_error:
   546  	self.Emit("SUBQ", _VAR_sr, _SI)                                // SUBQ    sr, SI
   547  	self.Emit("SUBQ", _SI, _IC)                                    // SUBQ    IL, IC
   548  	self.Link(_LB_parsing_error_v)                                 // _parsing_error_v:
   549  	self.Emit("MOVQ", _AX, _EP)                                    // MOVQ    AX, EP
   550  	self.Emit("NEGQ", _EP)                                         // NEGQ    EP
   551  	self.Sjmp("JMP", _LB_parsing_error)                            // JMP     _parsing_error
   552  	self.Link(_LB_char_m3_error)                                   // _char_m3_error:
   553  	self.Emit("SUBQ", jit.Imm(1), _IC)                             // SUBQ    $1, IC
   554  	self.Link(_LB_char_m2_error)                                   // _char_m2_error:
   555  	self.Emit("SUBQ", jit.Imm(2), _IC)                             // SUBQ    $2, IC
   556  	self.Sjmp("JMP", _LB_char_0_error)                             // JMP     _char_0_error
   557  	self.Link(_LB_im_error)                                        // _im_error:
   558  	self.Emit("CMPB", _CX, jit.Sib(_IP, _IC, 1, 0))                // CMPB    CX, (IP)(IC)
   559  	self.Sjmp("JNE", _LB_char_0_error)                             // JNE     _char_0_error
   560  	self.Emit("SHRL", jit.Imm(8), _CX)                             // SHRL    $8, CX
   561  	self.Emit("CMPB", _CX, jit.Sib(_IP, _IC, 1, 1))                // CMPB    CX, 1(IP)(IC)
   562  	self.Sjmp("JNE", _LB_char_1_error)                             // JNE     _char_1_error
   563  	self.Emit("SHRL", jit.Imm(8), _CX)                             // SHRL    $8, CX
   564  	self.Emit("CMPB", _CX, jit.Sib(_IP, _IC, 1, 2))                // CMPB    CX, 2(IP)(IC)
   565  	self.Sjmp("JNE", _LB_char_2_error)                             // JNE     _char_2_error
   566  	self.Sjmp("JMP", _LB_char_3_error)                             // JNE     _char_3_error
   567  	self.Link(_LB_char_4_error)                                    // _char_4_error:
   568  	self.Emit("ADDQ", jit.Imm(1), _IC)                             // ADDQ    $1, IC
   569  	self.Link(_LB_char_3_error)                                    // _char_3_error:
   570  	self.Emit("ADDQ", jit.Imm(1), _IC)                             // ADDQ    $1, IC
   571  	self.Link(_LB_char_2_error)                                    // _char_2_error:
   572  	self.Emit("ADDQ", jit.Imm(1), _IC)                             // ADDQ    $1, IC
   573  	self.Link(_LB_char_1_error)                                    // _char_1_error:
   574  	self.Emit("ADDQ", jit.Imm(1), _IC)                             // ADDQ    $1, IC
   575  	self.Link(_LB_char_0_error)                                    // _char_0_error:
   576  	self.Emit("MOVL", jit.Imm(int64(types.ERR_INVALID_CHAR)), _EP) // MOVL    ${types.ERR_INVALID_CHAR}, EP
   577  	self.Link(_LB_parsing_error)                                   // _parsing_error:
   578  	self.Emit("MOVQ", _EP, _DI)                                    // MOVQ    EP, DI
   579  	self.Emit("MOVQ", _ARG_sp, _AX)                                // MOVQ  sp, AX
   580  	self.Emit("MOVQ", _ARG_sl, _BX)                                // MOVQ  sl, BX
   581  	self.Emit("MOVQ", _IC, _CX)                                    // MOVQ    IC, CX
   582  	self.call_go(_F_error_wrap)                                    // CALL_GO error_wrap
   583  	self.Sjmp("JMP", _LB_error)                                    // JMP     _error
   584  }
   585  
   586  func (self *_Assembler) _asm_OP_dismatch_err(p *_Instr) {
   587  	self.Emit("MOVQ", _IC, _VAR_ic)
   588  	self.Emit("MOVQ", jit.Type(p.vt()), _ET)
   589  	self.Emit("MOVQ", _ET, _VAR_et)
   590  }
   591  
   592  func (self *_Assembler) _asm_OP_go_skip(p *_Instr) {
   593  	self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
   594  	self.Xref(p.vi(), 4)
   595  	// self.Byte(0xcc)
   596  	self.Emit("MOVQ", _R9, _VAR_pc)
   597  	self.Sjmp("JMP", _LB_skip_one) // JMP     _skip_one
   598  }
   599  
   600  func (self *_Assembler) skip_one() {
   601  	self.Link(_LB_skip_one)              // _skip:
   602  	self.Emit("MOVQ", _VAR_ic, _IC)      // MOVQ    _VAR_ic, IC
   603  	self.call_sf(_F_skip_one)            // CALL_SF skip_one
   604  	self.Emit("TESTQ", _AX, _AX)         // TESTQ   AX, AX
   605  	self.Sjmp("JS", _LB_parsing_error_v) // JS      _parse_error_v
   606  	self.Emit("MOVQ", _VAR_pc, _R9)      // MOVQ    pc, R9
   607  	// self.Byte(0xcc)
   608  	self.Rjmp("JMP", _R9) // JMP     (R9)
   609  }
   610  
   611  func (self *_Assembler) skip_key_value() {
   612  	self.Link(_LB_skip_key_value) // _skip:
   613  	// skip the key
   614  	self.Emit("MOVQ", _VAR_ic, _IC)      // MOVQ    _VAR_ic, IC
   615  	self.call_sf(_F_skip_one)            // CALL_SF skip_one
   616  	self.Emit("TESTQ", _AX, _AX)         // TESTQ   AX, AX
   617  	self.Sjmp("JS", _LB_parsing_error_v) // JS      _parse_error_v
   618  	// match char ':'
   619  	self.lspace("_global_1")
   620  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(':'))
   621  	self.Sjmp("JNE", _LB_parsing_error_v) // JNE     _parse_error_v
   622  	self.Emit("ADDQ", jit.Imm(1), _IC)    // ADDQ    $1, IC
   623  	self.lspace("_global_2")
   624  	// skip the value
   625  	self.call_sf(_F_skip_one)            // CALL_SF skip_one
   626  	self.Emit("TESTQ", _AX, _AX)         // TESTQ   AX, AX
   627  	self.Sjmp("JS", _LB_parsing_error_v) // JS      _parse_error_v
   628  	// jump back to specified address
   629  	self.Emit("MOVQ", _VAR_pc, _R9) // MOVQ    pc, R9
   630  	self.Rjmp("JMP", _R9)           // JMP     (R9)
   631  }
   632  
   633  /** Memory Management Routines **/
   634  
   635  var (
   636  	_T_byte     = jit.Type(byteType)
   637  	_F_mallocgc = jit.Func(mallocgc)
   638  )
   639  
   640  func (self *_Assembler) malloc_AX(nb obj.Addr, ret obj.Addr) {
   641  	self.Emit("MOVQ", nb, _AX)      // MOVQ    ${nb}, AX
   642  	self.Emit("MOVQ", _T_byte, _BX) // MOVQ    ${type(byte)}, BX
   643  	self.Emit("XORL", _CX, _CX)     // XORL    CX, CX
   644  	self.call_go(_F_mallocgc)       // CALL_GO mallocgc
   645  	self.Emit("MOVQ", _AX, ret)     // MOVQ    AX, ${ret}
   646  }
   647  
   648  func (self *_Assembler) valloc(vt reflect.Type, ret obj.Addr) {
   649  	self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX) // MOVQ    ${vt.Size()}, AX
   650  	self.Emit("MOVQ", jit.Type(vt), _BX)              // MOVQ    ${vt}, BX
   651  	self.Emit("MOVB", jit.Imm(1), _CX)                // MOVB    $1, CX
   652  	self.call_go(_F_mallocgc)                         // CALL_GO mallocgc
   653  	self.Emit("MOVQ", _AX, ret)                       // MOVQ    AX, ${ret}
   654  }
   655  
   656  func (self *_Assembler) valloc_AX(vt reflect.Type) {
   657  	self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX) // MOVQ    ${vt.Size()}, AX
   658  	self.Emit("MOVQ", jit.Type(vt), _BX)              // MOVQ    ${vt}, BX
   659  	self.Emit("MOVB", jit.Imm(1), _CX)                // MOVB    $1, CX
   660  	self.call_go(_F_mallocgc)                         // CALL_GO mallocgc
   661  }
   662  
   663  func (self *_Assembler) vfollow(vt reflect.Type) {
   664  	self.Emit("MOVQ", jit.Ptr(_VP, 0), _AX)   // MOVQ   (VP), AX
   665  	self.Emit("TESTQ", _AX, _AX)              // TESTQ  AX, AX
   666  	self.Sjmp("JNZ", "_end_{n}")              // JNZ    _end_{n}
   667  	self.valloc_AX(vt)                        // VALLOC ${vt}, AX
   668  	self.WritePtrAX(1, jit.Ptr(_VP, 0), true) // MOVQ   AX, (VP)
   669  	self.Link("_end_{n}")                     // _end_{n}:
   670  	self.Emit("MOVQ", _AX, _VP)               // MOVQ   AX, VP
   671  }
   672  
   673  /** Value Parsing Routines **/
   674  
   675  var (
   676  	_F_vstring   = jit.Imm(int64(native.S_vstring))
   677  	_F_vnumber   = jit.Imm(int64(native.S_vnumber))
   678  	_F_vsigned   = jit.Imm(int64(native.S_vsigned))
   679  	_F_vunsigned = jit.Imm(int64(native.S_vunsigned))
   680  )
   681  
   682  func (self *_Assembler) check_err(vt reflect.Type, pin string, pin2 int) {
   683  	self.Emit("MOVQ", _VAR_st_Vt, _AX) // MOVQ st.Vt, AX
   684  	self.Emit("TESTQ", _AX, _AX)       // CMPQ AX, ${native.V_STRING}
   685  	// try to skip the value
   686  	if vt != nil {
   687  		self.Sjmp("JNS", "_check_err_{n}") // JNE  _parsing_error_v
   688  		self.Emit("MOVQ", jit.Type(vt), _ET)
   689  		self.Emit("MOVQ", _ET, _VAR_et)
   690  		if pin2 != -1 {
   691  			self.Emit("SUBQ", jit.Imm(1), _BX)
   692  			self.Emit("MOVQ", _BX, _VAR_ic)
   693  			self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
   694  			self.Xref(pin2, 4)
   695  			self.Emit("MOVQ", _R9, _VAR_pc)
   696  			self.Sjmp("JMP", _LB_skip_key_value)
   697  		} else {
   698  			self.Emit("MOVQ", _BX, _VAR_ic)
   699  			self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
   700  			self.Sref(pin, 4)
   701  			self.Emit("MOVQ", _R9, _VAR_pc)
   702  			self.Sjmp("JMP", _LB_skip_one)
   703  		}
   704  		self.Link("_check_err_{n}")
   705  	} else {
   706  		self.Sjmp("JS", _LB_parsing_error_v) // JNE  _parsing_error_v
   707  	}
   708  }
   709  
   710  func (self *_Assembler) check_eof(d int64) {
   711  	if d == 1 {
   712  		self.Emit("CMPQ", _IC, _IL)     // CMPQ IC, IL
   713  		self.Sjmp("JAE", _LB_eof_error) // JAE  _eof_error
   714  	} else {
   715  		self.Emit("LEAQ", jit.Ptr(_IC, d), _AX) // LEAQ ${d}(IC), AX
   716  		self.Emit("CMPQ", _AX, _IL)             // CMPQ AX, IL
   717  		self.Sjmp("JA", _LB_eof_error)          // JA   _eof_error
   718  	}
   719  }
   720  
   721  func (self *_Assembler) parse_string() {
   722  	self.Emit("MOVQ", _ARG_fv, _CX)
   723  	self.call_vf(_F_vstring)
   724  	self.check_err(nil, "", -1)
   725  }
   726  
   727  func (self *_Assembler) parse_number(vt reflect.Type, pin string, pin2 int) {
   728  	self.Emit("MOVQ", _IC, _BX) // save ic when call native func
   729  	self.call_vf(_F_vnumber)
   730  	self.check_err(vt, pin, pin2)
   731  }
   732  
   733  func (self *_Assembler) parse_signed(vt reflect.Type, pin string, pin2 int) {
   734  	self.Emit("MOVQ", _IC, _BX) // save ic when call native func
   735  	self.call_vf(_F_vsigned)
   736  	self.check_err(vt, pin, pin2)
   737  }
   738  
   739  func (self *_Assembler) parse_unsigned(vt reflect.Type, pin string, pin2 int) {
   740  	self.Emit("MOVQ", _IC, _BX) // save ic when call native func
   741  	self.call_vf(_F_vunsigned)
   742  	self.check_err(vt, pin, pin2)
   743  }
   744  
   745  // Pointer: DI, Size: SI, Return: R9
   746  func (self *_Assembler) copy_string() {
   747  	self.Link("_copy_string")
   748  	self.Emit("MOVQ", _DI, _VAR_bs_p)
   749  	self.Emit("MOVQ", _SI, _VAR_bs_n)
   750  	self.Emit("MOVQ", _R9, _VAR_bs_LR)
   751  	self.malloc_AX(_SI, _ARG_sv_p)
   752  	self.Emit("MOVQ", _VAR_bs_p, _BX)
   753  	self.Emit("MOVQ", _VAR_bs_n, _CX)
   754  	self.call_go(_F_memmove)
   755  	self.Emit("MOVQ", _ARG_sv_p, _DI)
   756  	self.Emit("MOVQ", _VAR_bs_n, _SI)
   757  	self.Emit("MOVQ", _VAR_bs_LR, _R9)
   758  	self.Rjmp("JMP", _R9)
   759  }
   760  
   761  // Pointer: DI, Size: SI, Return: R9
   762  func (self *_Assembler) escape_string() {
   763  	self.Link("_escape_string")
   764  	self.Emit("MOVQ", _DI, _VAR_bs_p)
   765  	self.Emit("MOVQ", _SI, _VAR_bs_n)
   766  	self.Emit("MOVQ", _R9, _VAR_bs_LR)
   767  	self.malloc_AX(_SI, _DX) // MALLOC SI, DX
   768  	self.Emit("MOVQ", _DX, _ARG_sv_p)
   769  	self.Emit("MOVQ", _VAR_bs_p, _DI)
   770  	self.Emit("MOVQ", _VAR_bs_n, _SI)
   771  	self.Emit("LEAQ", _VAR_sr, _CX)                          // LEAQ   sr, CX
   772  	self.Emit("XORL", _R8, _R8)                              // XORL   R8, R8
   773  	self.Emit("BTQ", jit.Imm(_F_disable_urc), _ARG_fv)       // BTQ    ${_F_disable_urc}, fv
   774  	self.Emit("SETCC", _R8)                                  // SETCC  R8
   775  	self.Emit("SHLQ", jit.Imm(types.B_UNICODE_REPLACE), _R8) // SHLQ   ${types.B_UNICODE_REPLACE}, R8
   776  	self.call_c(_F_unquote)                                  // CALL   unquote
   777  	self.Emit("MOVQ", _VAR_bs_n, _SI)                        // MOVQ   ${n}, SI
   778  	self.Emit("ADDQ", jit.Imm(1), _SI)                       // ADDQ   $1, SI
   779  	self.Emit("TESTQ", _AX, _AX)                             // TESTQ  AX, AX
   780  	self.Sjmp("JS", _LB_unquote_error)                       // JS     _unquote_error
   781  	self.Emit("MOVQ", _AX, _SI)
   782  	self.Emit("MOVQ", _ARG_sv_p, _DI)
   783  	self.Emit("MOVQ", _VAR_bs_LR, _R9)
   784  	self.Rjmp("JMP", _R9)
   785  }
   786  
   787  func (self *_Assembler) escape_string_twice() {
   788  	self.Link("_escape_string_twice")
   789  	self.Emit("MOVQ", _DI, _VAR_bs_p)
   790  	self.Emit("MOVQ", _SI, _VAR_bs_n)
   791  	self.Emit("MOVQ", _R9, _VAR_bs_LR)
   792  	self.malloc_AX(_SI, _DX) // MALLOC SI, DX
   793  	self.Emit("MOVQ", _DX, _ARG_sv_p)
   794  	self.Emit("MOVQ", _VAR_bs_p, _DI)
   795  	self.Emit("MOVQ", _VAR_bs_n, _SI)
   796  	self.Emit("LEAQ", _VAR_sr, _CX)                          // LEAQ   sr, CX
   797  	self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8)  // MOVL   ${types.F_DOUBLE_UNQUOTE}, R8
   798  	self.Emit("BTQ", jit.Imm(_F_disable_urc), _ARG_fv)       // BTQ    ${_F_disable_urc}, AX
   799  	self.Emit("XORL", _AX, _AX)                              // XORL   AX, AX
   800  	self.Emit("SETCC", _AX)                                  // SETCC  AX
   801  	self.Emit("SHLQ", jit.Imm(types.B_UNICODE_REPLACE), _AX) // SHLQ   ${types.B_UNICODE_REPLACE}, AX
   802  	self.Emit("ORQ", _AX, _R8)                               // ORQ    AX, R8
   803  	self.call_c(_F_unquote)                                  // CALL   unquote
   804  	self.Emit("MOVQ", _VAR_bs_n, _SI)                        // MOVQ   ${n}, SI
   805  	self.Emit("ADDQ", jit.Imm(3), _SI)                       // ADDQ   $3, SI
   806  	self.Emit("TESTQ", _AX, _AX)                             // TESTQ  AX, AX
   807  	self.Sjmp("JS", _LB_unquote_error)                       // JS     _unquote_error
   808  	self.Emit("MOVQ", _AX, _SI)
   809  	self.Emit("MOVQ", _ARG_sv_p, _DI)
   810  	self.Emit("MOVQ", _VAR_bs_LR, _R9)
   811  	self.Rjmp("JMP", _R9)
   812  }
   813  
   814  /** Range Checking Routines **/
   815  
   816  var (
   817  	_V_max_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_max_f32))))
   818  	_V_min_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_min_f32))))
   819  )
   820  
   821  var (
   822  	_Vp_max_f32 = new(float32)
   823  	_Vp_min_f32 = new(float32)
   824  )
   825  
   826  func init() {
   827  	*_Vp_max_f32 = math.MaxFloat32
   828  	*_Vp_min_f32 = -math.MaxFloat32
   829  }
   830  
   831  func (self *_Assembler) range_single_X0() {
   832  	self.Emit("CVTSD2SS", _VAR_st_Dv, _X0)        // CVTSD2SS _VAR_st_Dv, X0
   833  	self.Emit("MOVQ", _V_max_f32, _CX)            // MOVQ     _max_f32, CX
   834  	self.Emit("MOVQ", jit.Gitab(_I_float32), _ET) // MOVQ     ${itab(float32)}, ET
   835  	self.Emit("MOVQ", jit.Gtype(_T_float32), _EP) // MOVQ     ${type(float32)}, EP
   836  	self.Emit("UCOMISS", jit.Ptr(_CX, 0), _X0)    // UCOMISS  (CX), X0
   837  	self.Sjmp("JA", _LB_range_error)              // JA       _range_error
   838  	self.Emit("MOVQ", _V_min_f32, _CX)            // MOVQ     _min_f32, CX
   839  	self.Emit("UCOMISS", jit.Ptr(_CX, 0), _X0)    // UCOMISS  (CX), X0
   840  	self.Sjmp("JB", _LB_range_error)              // JB      _range_error
   841  }
   842  
   843  func (self *_Assembler) range_signed_CX(i *rt.GoItab, t *rt.GoType, a int64, b int64) {
   844  	self.Emit("MOVQ", _VAR_st_Iv, _CX)   // MOVQ st.Iv, CX
   845  	self.Emit("MOVQ", jit.Gitab(i), _ET) // MOVQ ${i}, ET
   846  	self.Emit("MOVQ", jit.Gtype(t), _EP) // MOVQ ${t}, EP
   847  	self.Emit("CMPQ", _CX, jit.Imm(a))   // CMPQ CX, ${a}
   848  	self.Sjmp("JL", _LB_range_error)     // JL   _range_error
   849  	self.Emit("CMPQ", _CX, jit.Imm(b))   // CMPQ CX, ${B}
   850  	self.Sjmp("JG", _LB_range_error)     // JG   _range_error
   851  }
   852  
   853  func (self *_Assembler) range_unsigned_CX(i *rt.GoItab, t *rt.GoType, v uint64) {
   854  	self.Emit("MOVQ", _VAR_st_Iv, _CX)        // MOVQ  st.Iv, CX
   855  	self.Emit("MOVQ", jit.Gitab(i), _ET)      // MOVQ  ${i}, ET
   856  	self.Emit("MOVQ", jit.Gtype(t), _EP)      // MOVQ  ${t}, EP
   857  	self.Emit("TESTQ", _CX, _CX)              // TESTQ CX, CX
   858  	self.Sjmp("JS", _LB_range_error)          // JS    _range_error
   859  	self.Emit("CMPQ", _CX, jit.Imm(int64(v))) // CMPQ  CX, ${a}
   860  	self.Sjmp("JA", _LB_range_error)          // JA    _range_error
   861  }
   862  
   863  /** String Manipulating Routines **/
   864  
   865  var (
   866  	_F_unquote = jit.Imm(int64(native.S_unquote))
   867  )
   868  
   869  func (self *_Assembler) slice_from(p obj.Addr, d int64) {
   870  	self.Emit("MOVQ", p, _SI) // MOVQ    ${p}, SI
   871  	self.slice_from_r(_SI, d) // SLICE_R SI, ${d}
   872  }
   873  
   874  func (self *_Assembler) slice_from_r(p obj.Addr, d int64) {
   875  	self.Emit("LEAQ", jit.Sib(_IP, p, 1, 0), _DI) // LEAQ (IP)(${p}), DI
   876  	self.Emit("NEGQ", p)                          // NEGQ ${p}
   877  	self.Emit("LEAQ", jit.Sib(_IC, p, 1, d), _SI) // LEAQ d(IC)(${p}), SI
   878  }
   879  
   880  func (self *_Assembler) unquote_once(p obj.Addr, n obj.Addr, stack bool, copy bool) {
   881  	self.slice_from(_VAR_st_Iv, -1)            // SLICE  st.Iv, $-1
   882  	self.Emit("CMPQ", _VAR_st_Ep, jit.Imm(-1)) // CMPQ   st.Ep, $-1
   883  	self.Sjmp("JE", "_noescape_{n}")           // JE     _escape_{n}
   884  	self.Byte(0x4c, 0x8d, 0x0d)                // LEAQ (PC), R9
   885  	self.Sref("_unquote_once_write_{n}", 4)
   886  	self.Sjmp("JMP", "_escape_string")
   887  	self.Link("_noescape_{n}")
   888  	if copy {
   889  		self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv)
   890  		self.Sjmp("JNC", "_unquote_once_write_{n}")
   891  		self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
   892  		self.Sref("_unquote_once_write_{n}", 4)
   893  		self.Sjmp("JMP", "_copy_string")
   894  	}
   895  	self.Link("_unquote_once_write_{n}")
   896  	self.Emit("MOVQ", _SI, n) // MOVQ   SI, ${n}
   897  	if stack {
   898  		self.Emit("MOVQ", _DI, p)
   899  	} else {
   900  		self.WriteRecNotAX(10, _DI, p, false, false)
   901  	}
   902  }
   903  
   904  func (self *_Assembler) unquote_twice(p obj.Addr, n obj.Addr, stack bool) {
   905  	self.Emit("CMPQ", _VAR_st_Ep, jit.Imm(-1))                 // CMPQ   st.Ep, $-1
   906  	self.Sjmp("JE", _LB_eof_error)                             // JE     _eof_error
   907  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, -3), jit.Imm('\\')) // CMPB   -3(IP)(IC), $'\\'
   908  	self.Sjmp("JNE", _LB_char_m3_error)                        // JNE    _char_m3_error
   909  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, -2), jit.Imm('"'))  // CMPB   -2(IP)(IC), $'"'
   910  	self.Sjmp("JNE", _LB_char_m2_error)                        // JNE    _char_m2_error
   911  	self.slice_from(_VAR_st_Iv, -3)                            // SLICE  st.Iv, $-3
   912  	self.Emit("MOVQ", _SI, _AX)                                // MOVQ   SI, AX
   913  	self.Emit("ADDQ", _VAR_st_Iv, _AX)                         // ADDQ   st.Iv, AX
   914  	self.Emit("CMPQ", _VAR_st_Ep, _AX)                         // CMPQ   st.Ep, AX
   915  	self.Sjmp("JE", "_noescape_{n}")                           // JE     _noescape_{n}
   916  	self.Byte(0x4c, 0x8d, 0x0d)                                // LEAQ (PC), R9
   917  	self.Sref("_unquote_twice_write_{n}", 4)
   918  	self.Sjmp("JMP", "_escape_string_twice")
   919  	self.Link("_noescape_{n}") // _noescape_{n}:
   920  	self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv)
   921  	self.Sjmp("JNC", "_unquote_twice_write_{n}")
   922  	self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
   923  	self.Sref("_unquote_twice_write_{n}", 4)
   924  	self.Sjmp("JMP", "_copy_string")
   925  	self.Link("_unquote_twice_write_{n}")
   926  	self.Emit("MOVQ", _SI, n) // MOVQ   SI, ${n}
   927  	if stack {
   928  		self.Emit("MOVQ", _DI, p)
   929  	} else {
   930  		self.WriteRecNotAX(12, _DI, p, false, false)
   931  	}
   932  	self.Link("_unquote_twice_end_{n}")
   933  }
   934  
   935  /** Memory Clearing Routines **/
   936  
   937  var (
   938  	_F_memclrHasPointers    = jit.Func(memclrHasPointers)
   939  	_F_memclrNoHeapPointers = jit.Func(memclrNoHeapPointers)
   940  )
   941  
   942  func (self *_Assembler) mem_clear_fn(ptrfree bool) {
   943  	if !ptrfree {
   944  		self.call_go(_F_memclrHasPointers)
   945  	} else {
   946  		self.call_go(_F_memclrNoHeapPointers)
   947  	}
   948  }
   949  
   950  func (self *_Assembler) mem_clear_rem(size int64, ptrfree bool) {
   951  	self.Emit("MOVQ", jit.Imm(size), _BX)           // MOVQ    ${size}, BX
   952  	self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)         // MOVQ    (ST), AX
   953  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _AX) // MOVQ    (ST)(AX), AX
   954  	self.Emit("SUBQ", _VP, _AX)                     // SUBQ    VP, AX
   955  	self.Emit("ADDQ", _AX, _BX)                     // ADDQ    AX, BX
   956  	self.Emit("MOVQ", _VP, _AX)                     // MOVQ    VP, (SP)
   957  	self.mem_clear_fn(ptrfree)                      // CALL_GO memclr{Has,NoHeap}Pointers
   958  }
   959  
   960  /** Map Assigning Routines **/
   961  
   962  var (
   963  	_F_mapassign           = jit.Func(mapassign)
   964  	_F_mapassign_fast32    = jit.Func(mapassign_fast32)
   965  	_F_mapassign_faststr   = jit.Func(mapassign_faststr)
   966  	_F_mapassign_fast64ptr = jit.Func(mapassign_fast64ptr)
   967  )
   968  
   969  var (
   970  	_F_decodeJsonUnmarshaler obj.Addr
   971  	_F_decodeTextUnmarshaler obj.Addr
   972  )
   973  
   974  func init() {
   975  	_F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler)
   976  	_F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler)
   977  }
   978  
   979  func (self *_Assembler) mapaccess_ptr(t reflect.Type) {
   980  	if rt.MapType(rt.UnpackType(t)).IndirectElem() {
   981  		self.vfollow(t.Elem())
   982  	}
   983  }
   984  
   985  func (self *_Assembler) mapassign_std(t reflect.Type, v obj.Addr) {
   986  	self.Emit("LEAQ", v, _AX)                    // LEAQ      ${v}, AX
   987  	self.mapassign_call_from_AX(t, _F_mapassign) // MAPASSIGN ${t}, mapassign
   988  }
   989  
   990  func (self *_Assembler) mapassign_str_fast(t reflect.Type, p obj.Addr, n obj.Addr) {
   991  	self.Emit("MOVQ", jit.Type(t), _AX) // MOVQ    ${t}, AX
   992  	self.Emit("MOVQ", _VP, _BX)         // MOVQ    VP, BX
   993  	self.Emit("MOVQ", p, _CX)           // MOVQ    ${p}, CX
   994  	self.Emit("MOVQ", n, _DI)           // MOVQ    ${n}, DI
   995  	self.call_go(_F_mapassign_faststr)  // CALL_GO ${fn}
   996  	self.Emit("MOVQ", _AX, _VP)         // MOVQ    AX, VP
   997  	self.mapaccess_ptr(t)
   998  }
   999  
  1000  func (self *_Assembler) mapassign_call_from_AX(t reflect.Type, fn obj.Addr) {
  1001  	self.Emit("MOVQ", _AX, _CX)
  1002  	self.Emit("MOVQ", jit.Type(t), _AX) // MOVQ    ${t}, AX
  1003  	self.Emit("MOVQ", _VP, _BX)         // MOVQ    VP, _BX
  1004  	self.call_go(fn)                    // CALL_GO ${fn}
  1005  	self.Emit("MOVQ", _AX, _VP)         // MOVQ    AX, VP
  1006  }
  1007  
  1008  func (self *_Assembler) mapassign_fastx(t reflect.Type, fn obj.Addr) {
  1009  	self.mapassign_call_from_AX(t, fn)
  1010  	self.mapaccess_ptr(t)
  1011  }
  1012  
  1013  func (self *_Assembler) mapassign_utext(t reflect.Type, addressable bool) {
  1014  	pv := false
  1015  	vk := t.Key()
  1016  	tk := t.Key()
  1017  
  1018  	/* deref pointer if needed */
  1019  	if vk.Kind() == reflect.Ptr {
  1020  		pv = true
  1021  		vk = vk.Elem()
  1022  	}
  1023  
  1024  	/* addressable value with pointer receiver */
  1025  	if addressable {
  1026  		pv = false
  1027  		tk = reflect.PtrTo(tk)
  1028  	}
  1029  
  1030  	/* allocate the key, and call the unmarshaler */
  1031  	self.valloc(vk, _BX) // VALLOC  ${vk}, BX
  1032  	// must spill vk pointer since next call_go may invoke GC
  1033  	self.Emit("MOVQ", _BX, _ARG_vk)
  1034  	self.Emit("MOVQ", jit.Type(tk), _AX)   // MOVQ    ${tk}, AX
  1035  	self.Emit("MOVQ", _ARG_sv_p, _CX)      // MOVQ    sv.p, CX
  1036  	self.Emit("MOVQ", _ARG_sv_n, _DI)      // MOVQ    sv.n, DI
  1037  	self.call_go(_F_decodeTextUnmarshaler) // CALL_GO decodeTextUnmarshaler
  1038  	self.Emit("TESTQ", _ET, _ET)           // TESTQ   ET, ET
  1039  	self.Sjmp("JNZ", _LB_error)            // JNZ     _error
  1040  	self.Emit("MOVQ", _ARG_vk, _AX)        // MOVQ    VAR.vk, AX
  1041  	self.Emit("MOVQ", jit.Imm(0), _ARG_vk)
  1042  
  1043  	/* select the correct assignment function */
  1044  	if !pv {
  1045  		self.mapassign_call_from_AX(t, _F_mapassign)
  1046  	} else {
  1047  		self.mapassign_fastx(t, _F_mapassign_fast64ptr)
  1048  	}
  1049  }
  1050  
  1051  /** External Unmarshaler Routines **/
  1052  
  1053  var (
  1054  	_F_skip_one    = jit.Imm(int64(native.S_skip_one))
  1055  	_F_skip_array  = jit.Imm(int64(native.S_skip_array))
  1056  	_F_skip_object = jit.Imm(int64(native.S_skip_object))
  1057  	_F_skip_number = jit.Imm(int64(native.S_skip_number))
  1058  )
  1059  
  1060  func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) {
  1061  	self.call_sf(_F_skip_one)                               // CALL_SF   skip_one
  1062  	self.Emit("TESTQ", _AX, _AX)                            // TESTQ     AX, AX
  1063  	self.Sjmp("JS", _LB_parsing_error_v)                    // JS        _parse_error_v
  1064  	self.slice_from_r(_AX, 0)                               // SLICE_R   AX, $0
  1065  	self.Emit("MOVQ", _DI, _ARG_sv_p)                       // MOVQ      DI, sv.p
  1066  	self.Emit("MOVQ", _SI, _ARG_sv_n)                       // MOVQ      SI, sv.n
  1067  	self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref) // UNMARSHAL json, ${t}, ${deref}
  1068  }
  1069  
  1070  func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) {
  1071  	self.parse_string()                                     // PARSE     STRING
  1072  	self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true)     // UNQUOTE   once, sv.p, sv.n
  1073  	self.unmarshal_func(t, _F_decodeTextUnmarshaler, deref) // UNMARSHAL text, ${t}, ${deref}
  1074  }
  1075  
  1076  func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool) {
  1077  	pt := t
  1078  	vk := t.Kind()
  1079  
  1080  	/* allocate the field if needed */
  1081  	if deref && vk == reflect.Ptr {
  1082  		self.Emit("MOVQ", _VP, _BX)                               // MOVQ   VP, BX
  1083  		self.Emit("MOVQ", jit.Ptr(_BX, 0), _BX)                   // MOVQ   (BX), BX
  1084  		self.Emit("TESTQ", _BX, _BX)                              // TESTQ  BX, BX
  1085  		self.Sjmp("JNZ", "_deref_{n}")                            // JNZ    _deref_{n}
  1086  		self.valloc(t.Elem(), _BX)                                // VALLOC ${t.Elem()}, BX
  1087  		self.WriteRecNotAX(3, _BX, jit.Ptr(_VP, 0), false, false) // MOVQ   BX, (VP)
  1088  		self.Link("_deref_{n}")                                   // _deref_{n}:
  1089  	} else {
  1090  		/* set value pointer */
  1091  		self.Emit("MOVQ", _VP, _BX) // MOVQ   (VP), BX
  1092  	}
  1093  
  1094  	/* set value type */
  1095  	self.Emit("MOVQ", jit.Type(pt), _AX) // MOVQ ${pt}, AX
  1096  
  1097  	/* set the source string and call the unmarshaler */
  1098  	self.Emit("MOVQ", _ARG_sv_p, _CX) // MOVQ    sv.p, CX
  1099  	self.Emit("MOVQ", _ARG_sv_n, _DI) // MOVQ    sv.n, DI
  1100  	self.call_go(fn)                  // CALL_GO ${fn}
  1101  	self.Emit("TESTQ", _ET, _ET)      // TESTQ   ET, ET
  1102  	self.Sjmp("JNZ", _LB_error)       // JNZ     _error
  1103  }
  1104  
  1105  /** Dynamic Decoding Routine **/
  1106  
  1107  var (
  1108  	_F_decodeTypedPointer obj.Addr
  1109  )
  1110  
  1111  func init() {
  1112  	_F_decodeTypedPointer = jit.Func(decodeTypedPointer)
  1113  }
  1114  
  1115  func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) {
  1116  	self.Emit("MOVQ", vp, _SI)      // MOVQ    ${vp}, SI
  1117  	self.Emit("MOVQ", vt, _DI)      // MOVQ    ${vt}, DI
  1118  	self.Emit("MOVQ", _ARG_sp, _AX) // MOVQ    sp, AX
  1119  	self.Emit("MOVQ", _ARG_sl, _BX) // MOVQ    sp, BX
  1120  	self.Emit("MOVQ", _IC, _CX)     // MOVQ    IC, CX
  1121  	self.Emit("MOVQ", _ST, _R8)     // MOVQ    ST, R8
  1122  	self.Emit("MOVQ", _ARG_fv, _R9) // MOVQ    fv, R9
  1123  	self.save(_REG_rt...)
  1124  	self.Emit("MOVQ", _F_decodeTypedPointer, _IL) // MOVQ ${fn}, R11
  1125  	self.Rjmp("CALL", _IL)                        // CALL R11
  1126  	self.load(_REG_rt...)
  1127  	self.Emit("MOVQ", _AX, _IC)                       // MOVQ    AX, IC
  1128  	self.Emit("MOVQ", _BX, _ET)                       // MOVQ    BX, ET
  1129  	self.Emit("MOVQ", _CX, _EP)                       // MOVQ    CX, EP
  1130  	self.Emit("TESTQ", _ET, _ET)                      // TESTQ   ET, ET
  1131  	self.Sjmp("JE", "_decode_dynamic_end_{n}")        // JE, _decode_dynamic_end_{n}
  1132  	self.Emit("MOVQ", _I_json_MismatchTypeError, _CX) // MOVQ _I_json_MismatchTypeError, CX
  1133  	self.Emit("CMPQ", _ET, _CX)                       // CMPQ ET, CX
  1134  	self.Sjmp("JNE", _LB_error)                       // JNE  LB_error
  1135  	self.Emit("MOVQ", _EP, _VAR_ic)                   // MOVQ EP, VAR_ic
  1136  	self.Emit("MOVQ", _ET, _VAR_et)                   // MOVQ ET, VAR_et
  1137  	self.Link("_decode_dynamic_end_{n}")
  1138  }
  1139  
  1140  /** OpCode Assembler Functions **/
  1141  
  1142  var (
  1143  	_F_memequal         = jit.Func(memequal)
  1144  	_F_memmove          = jit.Func(memmove)
  1145  	_F_growslice        = jit.Func(growslice)
  1146  	_F_makeslice        = jit.Func(makeslice)
  1147  	_F_makemap_small    = jit.Func(makemap_small)
  1148  	_F_mapassign_fast64 = jit.Func(mapassign_fast64)
  1149  )
  1150  
  1151  var (
  1152  	_F_lspace  = jit.Imm(int64(native.S_lspace))
  1153  	_F_strhash = jit.Imm(int64(caching.S_strhash))
  1154  )
  1155  
  1156  var (
  1157  	_F_b64decode   = jit.Imm(int64(_subr__b64decode))
  1158  	_F_decodeValue = jit.Imm(int64(_subr_decode_value))
  1159  )
  1160  
  1161  var (
  1162  	_F_FieldMap_GetCaseInsensitive obj.Addr
  1163  	_Empty_Slice                   = []byte{}
  1164  	_Zero_Base                     = int64(uintptr(((*rt.GoSlice)(unsafe.Pointer(&_Empty_Slice))).Ptr))
  1165  )
  1166  
  1167  const (
  1168  	_MODE_AVX2 = 1 << 2
  1169  )
  1170  
  1171  const (
  1172  	_Fe_ID   = int64(unsafe.Offsetof(caching.FieldEntry{}.ID))
  1173  	_Fe_Name = int64(unsafe.Offsetof(caching.FieldEntry{}.Name))
  1174  	_Fe_Hash = int64(unsafe.Offsetof(caching.FieldEntry{}.Hash))
  1175  )
  1176  
  1177  const (
  1178  	_Vk_Ptr       = int64(reflect.Ptr)
  1179  	_Gt_KindFlags = int64(unsafe.Offsetof(rt.GoType{}.KindFlags))
  1180  )
  1181  
  1182  func init() {
  1183  	_F_FieldMap_GetCaseInsensitive = jit.Func((*caching.FieldMap).GetCaseInsensitive)
  1184  }
  1185  
  1186  func (self *_Assembler) _asm_OP_any(_ *_Instr) {
  1187  	self.Emit("MOVQ", jit.Ptr(_VP, 8), _CX)                // MOVQ    8(VP), CX
  1188  	self.Emit("TESTQ", _CX, _CX)                           // TESTQ   CX, CX
  1189  	self.Sjmp("JZ", "_decode_{n}")                         // JZ      _decode_{n}
  1190  	self.Emit("CMPQ", _CX, _VP)                            // CMPQ    CX, VP
  1191  	self.Sjmp("JE", "_decode_{n}")                         // JE      _decode_{n}
  1192  	self.Emit("MOVQ", jit.Ptr(_VP, 0), _AX)                // MOVQ    (VP), AX
  1193  	self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX) // MOVBLZX _Gt_KindFlags(AX), DX
  1194  	self.Emit("ANDL", jit.Imm(rt.F_kind_mask), _DX)        // ANDL    ${F_kind_mask}, DX
  1195  	self.Emit("CMPL", _DX, jit.Imm(_Vk_Ptr))               // CMPL    DX, ${reflect.Ptr}
  1196  	self.Sjmp("JNE", "_decode_{n}")                        // JNE     _decode_{n}
  1197  	self.Emit("LEAQ", jit.Ptr(_VP, 8), _DI)                // LEAQ    8(VP), DI
  1198  	self.decode_dynamic(_AX, _DI)                          // DECODE  AX, DI
  1199  	self.Sjmp("JMP", "_decode_end_{n}")                    // JMP     _decode_end_{n}
  1200  	self.Link("_decode_{n}")                               // _decode_{n}:
  1201  	self.Emit("MOVQ", _ARG_fv, _DF)                        // MOVQ    fv, DF
  1202  	self.Emit("MOVQ", _ST, jit.Ptr(_SP, 0))                // MOVQ    _ST, (SP)
  1203  	self.call(_F_decodeValue)                              // CALL    decodeValue
  1204  	self.Emit("MOVQ", jit.Imm(0), jit.Ptr(_SP, 0))         // MOVQ    _ST, (SP)
  1205  	self.Emit("TESTQ", _EP, _EP)                           // TESTQ   EP, EP
  1206  	self.Sjmp("JNZ", _LB_parsing_error)                    // JNZ     _parsing_error
  1207  	self.Link("_decode_end_{n}")                           // _decode_end_{n}:
  1208  }
  1209  
  1210  func (self *_Assembler) _asm_OP_dyn(p *_Instr) {
  1211  	self.Emit("MOVQ", jit.Type(p.vt()), _ET)               // MOVQ    ${p.vt()}, ET
  1212  	self.Emit("CMPQ", jit.Ptr(_VP, 8), jit.Imm(0))         // CMPQ    8(VP), $0
  1213  	self.Sjmp("JE", _LB_type_error)                        // JE      _type_error
  1214  	self.Emit("MOVQ", jit.Ptr(_VP, 0), _CX)                // MOVQ    (VP), CX
  1215  	self.Emit("MOVQ", jit.Ptr(_CX, 8), _CX)                // MOVQ    8(CX), CX
  1216  	self.Emit("MOVBLZX", jit.Ptr(_CX, _Gt_KindFlags), _DX) // MOVBLZX _Gt_KindFlags(CX), DX
  1217  	self.Emit("ANDL", jit.Imm(rt.F_kind_mask), _DX)        // ANDL    ${F_kind_mask}, DX
  1218  	self.Emit("CMPL", _DX, jit.Imm(_Vk_Ptr))               // CMPL    DX, ${reflect.Ptr}
  1219  	self.Sjmp("JNE", _LB_type_error)                       // JNE     _type_error
  1220  	self.Emit("LEAQ", jit.Ptr(_VP, 8), _DI)                // LEAQ    8(VP), DI
  1221  	self.decode_dynamic(_CX, _DI)                          // DECODE  CX, DI
  1222  	self.Link("_decode_end_{n}")                           // _decode_end_{n}:
  1223  }
  1224  
  1225  func (self *_Assembler) _asm_OP_str(_ *_Instr) {
  1226  	self.parse_string()                                              // PARSE   STRING
  1227  	self.unquote_once(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false, true) // UNQUOTE once, (VP), 8(VP)
  1228  }
  1229  
  1230  func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
  1231  	self.parse_string()                             // PARSE  STRING
  1232  	self.slice_from(_VAR_st_Iv, -1)                 // SLICE  st.Iv, $-1
  1233  	self.Emit("MOVQ", _DI, jit.Ptr(_VP, 0))         // MOVQ   DI, (VP)
  1234  	self.Emit("MOVQ", _SI, jit.Ptr(_VP, 8))         // MOVQ   SI, 8(VP)
  1235  	self.Emit("SHRQ", jit.Imm(2), _SI)              // SHRQ   $2, SI
  1236  	self.Emit("LEAQ", jit.Sib(_SI, _SI, 2, 0), _SI) // LEAQ   (SI)(SI*2), SI
  1237  	self.Emit("MOVQ", _SI, jit.Ptr(_VP, 16))        // MOVQ   SI, 16(VP)
  1238  	self.malloc_AX(_SI, _SI)                        // MALLOC SI, SI
  1239  
  1240  	// TODO: due to base64x's bug, only use AVX mode now
  1241  	self.Emit("MOVL", jit.Imm(_MODE_JSON), _CX) //  MOVL $_MODE_JSON, CX
  1242  
  1243  	/* call the decoder */
  1244  	self.Emit("XORL", _DX, _DX) // XORL  DX, DX
  1245  	self.Emit("MOVQ", _VP, _DI) // MOVQ  VP, DI
  1246  
  1247  	self.Emit("MOVQ", jit.Ptr(_VP, 0), _R8)                  // MOVQ SI, (VP)
  1248  	self.WriteRecNotAX(4, _SI, jit.Ptr(_VP, 0), true, false) // XCHGQ SI, (VP)
  1249  	self.Emit("MOVQ", _R8, _SI)
  1250  
  1251  	self.Emit("XCHGQ", _DX, jit.Ptr(_VP, 8)) // XCHGQ DX, 8(VP)
  1252  	self.call_c(_F_b64decode)                // CALL  b64decode
  1253  	self.Emit("TESTQ", _AX, _AX)             // TESTQ AX, AX
  1254  	self.Sjmp("JS", _LB_base64_error)        // JS    _base64_error
  1255  	self.Emit("MOVQ", _AX, jit.Ptr(_VP, 8))  // MOVQ  AX, 8(VP)
  1256  }
  1257  
  1258  func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
  1259  	self.Emit("LEAQ", jit.Ptr(_IC, 4), _AX)                  // LEAQ 4(IC), AX
  1260  	self.Emit("CMPQ", _AX, _IL)                              // CMPQ AX, IL
  1261  	self.Sjmp("JA", _LB_eof_error)                           // JA   _eof_error
  1262  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('f')) // CMPB (IP)(IC), $'f'
  1263  	self.Sjmp("JE", "_false_{n}")                            // JE   _false_{n}
  1264  	self.Emit("MOVL", jit.Imm(_IM_true), _CX)                // MOVL $"true", CX
  1265  	self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0))          // CMPL CX, (IP)(IC)
  1266  	self.Sjmp("JE", "_bool_true_{n}")
  1267  	// try to skip the value
  1268  	self.Emit("MOVQ", _IC, _VAR_ic)
  1269  	self.Emit("MOVQ", _T_bool, _ET)
  1270  	self.Emit("MOVQ", _ET, _VAR_et)
  1271  	self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
  1272  	self.Sref("_end_{n}", 4)
  1273  	self.Emit("MOVQ", _R9, _VAR_pc)
  1274  	self.Sjmp("JMP", _LB_skip_one)
  1275  
  1276  	self.Link("_bool_true_{n}")
  1277  	self.Emit("MOVQ", _AX, _IC)                     // MOVQ AX, IC
  1278  	self.Emit("MOVB", jit.Imm(1), jit.Ptr(_VP, 0))  // MOVB $1, (VP)
  1279  	self.Sjmp("JMP", "_end_{n}")                    // JMP  _end_{n}
  1280  	self.Link("_false_{n}")                         // _false_{n}:
  1281  	self.Emit("ADDQ", jit.Imm(1), _AX)              // ADDQ $1, AX
  1282  	self.Emit("ADDQ", jit.Imm(1), _IC)              // ADDQ $1, IC
  1283  	self.Emit("CMPQ", _AX, _IL)                     // CMPQ AX, IL
  1284  	self.Sjmp("JA", _LB_eof_error)                  // JA   _eof_error
  1285  	self.Emit("MOVL", jit.Imm(_IM_alse), _CX)       // MOVL $"alse", CX
  1286  	self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPL CX, (IP)(IC)
  1287  	self.Sjmp("JNE", _LB_im_error)                  // JNE  _im_error
  1288  	self.Emit("MOVQ", _AX, _IC)                     // MOVQ AX, IC
  1289  	self.Emit("XORL", _AX, _AX)                     // XORL AX, AX
  1290  	self.Emit("MOVB", _AX, jit.Ptr(_VP, 0))         // MOVB AX, (VP)
  1291  	self.Link("_end_{n}")                           // _end_{n}:
  1292  }
  1293  
  1294  func (self *_Assembler) _asm_OP_num(_ *_Instr) {
  1295  	self.Emit("MOVQ", jit.Imm(0), _VAR_fl)
  1296  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
  1297  	self.Emit("MOVQ", _IC, _BX)
  1298  	self.Sjmp("JNE", "_skip_number_{n}")
  1299  	self.Emit("MOVQ", jit.Imm(1), _VAR_fl)
  1300  	self.Emit("ADDQ", jit.Imm(1), _IC)
  1301  	self.Link("_skip_number_{n}")
  1302  
  1303  	/* call skip_number */
  1304  	self.Emit("LEAQ", _ARG_s, _DI)  // LEAQ  s<>+0(FP), DI
  1305  	self.Emit("MOVQ", _IC, _ARG_ic) // MOVQ  IC, ic<>+16(FP)
  1306  	self.Emit("LEAQ", _ARG_ic, _SI) // LEAQ  ic<>+16(FP), SI
  1307  	self.callc(_F_skip_number)      // CALL  _F_skip_number
  1308  	self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ  ic<>+16(FP), IC
  1309  	self.Emit("TESTQ", _AX, _AX)    // TESTQ AX, AX
  1310  	self.Sjmp("JNS", "_num_next_{n}")
  1311  
  1312  	/* call skip one */
  1313  	self.Emit("MOVQ", _BX, _VAR_ic)
  1314  	self.Emit("MOVQ", _T_number, _ET)
  1315  	self.Emit("MOVQ", _ET, _VAR_et)
  1316  	self.Byte(0x4c, 0x8d, 0x0d)
  1317  	self.Sref("_num_end_{n}", 4)
  1318  	self.Emit("MOVQ", _R9, _VAR_pc)
  1319  	self.Sjmp("JMP", _LB_skip_one)
  1320  
  1321  	/* assgin string */
  1322  	self.Link("_num_next_{n}")
  1323  	self.slice_from_r(_AX, 0)
  1324  	self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv)
  1325  	self.Sjmp("JNC", "_num_write_{n}")
  1326  	self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
  1327  	self.Sref("_num_write_{n}", 4)
  1328  	self.Sjmp("JMP", "_copy_string")
  1329  	self.Link("_num_write_{n}")
  1330  	self.Emit("MOVQ", _SI, jit.Ptr(_VP, 8)) // MOVQ  SI, 8(VP)
  1331  	self.WriteRecNotAX(13, _DI, jit.Ptr(_VP, 0), false, false)
  1332  	self.Emit("CMPQ", _VAR_fl, jit.Imm(1))
  1333  	self.Sjmp("JNE", "_num_end_{n}")
  1334  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
  1335  	self.Sjmp("JNE", _LB_char_0_error)
  1336  	self.Emit("ADDQ", jit.Imm(1), _IC)
  1337  	self.Link("_num_end_{n}")
  1338  }
  1339  
  1340  func (self *_Assembler) _asm_OP_i8(_ *_Instr) {
  1341  	var pin = "_i8_end_{n}"
  1342  	self.parse_signed(int8Type, pin, -1)                               // PARSE int8
  1343  	self.range_signed_CX(_I_int8, _T_int8, math.MinInt8, math.MaxInt8) // RANGE int8
  1344  	self.Emit("MOVB", _CX, jit.Ptr(_VP, 0))                            // MOVB  CX, (VP)
  1345  	self.Link(pin)
  1346  }
  1347  
  1348  func (self *_Assembler) _asm_OP_i16(_ *_Instr) {
  1349  	var pin = "_i16_end_{n}"
  1350  	self.parse_signed(int16Type, pin, -1)                                  // PARSE int16
  1351  	self.range_signed_CX(_I_int16, _T_int16, math.MinInt16, math.MaxInt16) // RANGE int16
  1352  	self.Emit("MOVW", _CX, jit.Ptr(_VP, 0))                                // MOVW  CX, (VP)
  1353  	self.Link(pin)
  1354  }
  1355  
  1356  func (self *_Assembler) _asm_OP_i32(_ *_Instr) {
  1357  	var pin = "_i32_end_{n}"
  1358  	self.parse_signed(int32Type, pin, -1)                                  // PARSE int32
  1359  	self.range_signed_CX(_I_int32, _T_int32, math.MinInt32, math.MaxInt32) // RANGE int32
  1360  	self.Emit("MOVL", _CX, jit.Ptr(_VP, 0))                                // MOVL  CX, (VP)
  1361  	self.Link(pin)
  1362  }
  1363  
  1364  func (self *_Assembler) _asm_OP_i64(_ *_Instr) {
  1365  	var pin = "_i64_end_{n}"
  1366  	self.parse_signed(int64Type, pin, -1)   // PARSE int64
  1367  	self.Emit("MOVQ", _VAR_st_Iv, _AX)      // MOVQ  st.Iv, AX
  1368  	self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ  AX, (VP)
  1369  	self.Link(pin)
  1370  }
  1371  
  1372  func (self *_Assembler) _asm_OP_u8(_ *_Instr) {
  1373  	var pin = "_u8_end_{n}"
  1374  	self.parse_unsigned(uint8Type, pin, -1)                   // PARSE uint8
  1375  	self.range_unsigned_CX(_I_uint8, _T_uint8, math.MaxUint8) // RANGE uint8
  1376  	self.Emit("MOVB", _CX, jit.Ptr(_VP, 0))                   // MOVB  CX, (VP)
  1377  	self.Link(pin)
  1378  }
  1379  
  1380  func (self *_Assembler) _asm_OP_u16(_ *_Instr) {
  1381  	var pin = "_u16_end_{n}"
  1382  	self.parse_unsigned(uint16Type, pin, -1)                     // PARSE uint16
  1383  	self.range_unsigned_CX(_I_uint16, _T_uint16, math.MaxUint16) // RANGE uint16
  1384  	self.Emit("MOVW", _CX, jit.Ptr(_VP, 0))                      // MOVW  CX, (VP)
  1385  	self.Link(pin)
  1386  }
  1387  
  1388  func (self *_Assembler) _asm_OP_u32(_ *_Instr) {
  1389  	var pin = "_u32_end_{n}"
  1390  	self.parse_unsigned(uint32Type, pin, -1)                     // PARSE uint32
  1391  	self.range_unsigned_CX(_I_uint32, _T_uint32, math.MaxUint32) // RANGE uint32
  1392  	self.Emit("MOVL", _CX, jit.Ptr(_VP, 0))                      // MOVL  CX, (VP)
  1393  	self.Link(pin)
  1394  }
  1395  
  1396  func (self *_Assembler) _asm_OP_u64(_ *_Instr) {
  1397  	var pin = "_u64_end_{n}"
  1398  	self.parse_unsigned(uint64Type, pin, -1) // PARSE uint64
  1399  	self.Emit("MOVQ", _VAR_st_Iv, _AX)       // MOVQ  st.Iv, AX
  1400  	self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))  // MOVQ  AX, (VP)
  1401  	self.Link(pin)
  1402  }
  1403  
  1404  func (self *_Assembler) _asm_OP_f32(_ *_Instr) {
  1405  	var pin = "_f32_end_{n}"
  1406  	self.parse_number(float32Type, pin, -1)  // PARSE NUMBER
  1407  	self.range_single_X0()                   // RANGE float32
  1408  	self.Emit("MOVSS", _X0, jit.Ptr(_VP, 0)) // MOVSS X0, (VP)
  1409  	self.Link(pin)
  1410  }
  1411  
  1412  func (self *_Assembler) _asm_OP_f64(_ *_Instr) {
  1413  	var pin = "_f64_end_{n}"
  1414  	self.parse_number(float64Type, pin, -1)  // PARSE NUMBER
  1415  	self.Emit("MOVSD", _VAR_st_Dv, _X0)      // MOVSD st.Dv, X0
  1416  	self.Emit("MOVSD", _X0, jit.Ptr(_VP, 0)) // MOVSD X0, (VP)
  1417  	self.Link(pin)
  1418  }
  1419  
  1420  func (self *_Assembler) _asm_OP_unquote(_ *_Instr) {
  1421  	self.check_eof(2)
  1422  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('\\'))   // CMPB    (IP)(IC), $'\\'
  1423  	self.Sjmp("JNE", _LB_char_0_error)                          // JNE     _char_0_error
  1424  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 1), jit.Imm('"'))    // CMPB    1(IP)(IC), $'"'
  1425  	self.Sjmp("JNE", _LB_char_1_error)                          // JNE     _char_1_error
  1426  	self.Emit("ADDQ", jit.Imm(2), _IC)                          // ADDQ    $2, IC
  1427  	self.parse_string()                                         // PARSE   STRING
  1428  	self.unquote_twice(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false) // UNQUOTE twice, (VP), 8(VP)
  1429  }
  1430  
  1431  func (self *_Assembler) _asm_OP_nil_1(_ *_Instr) {
  1432  	self.Emit("XORL", _AX, _AX)             // XORL AX, AX
  1433  	self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP)
  1434  }
  1435  
  1436  func (self *_Assembler) _asm_OP_nil_2(_ *_Instr) {
  1437  	self.Emit("PXOR", _X0, _X0)              // PXOR  X0, X0
  1438  	self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)
  1439  }
  1440  
  1441  func (self *_Assembler) _asm_OP_nil_3(_ *_Instr) {
  1442  	self.Emit("XORL", _AX, _AX)              // XORL  AX, AX
  1443  	self.Emit("PXOR", _X0, _X0)              // PXOR  X0, X0
  1444  	self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)
  1445  	self.Emit("MOVQ", _AX, jit.Ptr(_VP, 16)) // MOVOU AX, 16(VP)
  1446  }
  1447  
  1448  func (self *_Assembler) _asm_OP_deref(p *_Instr) {
  1449  	self.vfollow(p.vt())
  1450  }
  1451  
  1452  func (self *_Assembler) _asm_OP_index(p *_Instr) {
  1453  	self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ ${p.vi()}, AX
  1454  	self.Emit("ADDQ", _AX, _VP)              // ADDQ _AX, _VP
  1455  }
  1456  
  1457  func (self *_Assembler) _asm_OP_is_null(p *_Instr) {
  1458  	self.Emit("LEAQ", jit.Ptr(_IC, 4), _AX)                       // LEAQ    4(IC), AX
  1459  	self.Emit("CMPQ", _AX, _IL)                                   // CMPQ    AX, IL
  1460  	self.Sjmp("JA", "_not_null_{n}")                              // JA      _not_null_{n}
  1461  	self.Emit("CMPL", jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null)) // CMPL    (IP)(IC), $"null"
  1462  	self.Emit("CMOVQEQ", _AX, _IC)                                // CMOVQEQ AX, IC
  1463  	self.Xjmp("JE", p.vi())                                       // JE      {p.vi()}
  1464  	self.Link("_not_null_{n}")                                    // _not_null_{n}:
  1465  }
  1466  
  1467  func (self *_Assembler) _asm_OP_is_null_quote(p *_Instr) {
  1468  	self.Emit("LEAQ", jit.Ptr(_IC, 5), _AX)                       // LEAQ    4(IC), AX
  1469  	self.Emit("CMPQ", _AX, _IL)                                   // CMPQ    AX, IL
  1470  	self.Sjmp("JA", "_not_null_quote_{n}")                        // JA      _not_null_quote_{n}
  1471  	self.Emit("CMPL", jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null)) // CMPL    (IP)(IC), $"null"
  1472  	self.Sjmp("JNE", "_not_null_quote_{n}")                       // JNE     _not_null_quote_{n}
  1473  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 4), jit.Imm('"'))      // CMPB    4(IP)(IC), $'"'
  1474  	self.Emit("CMOVQEQ", _AX, _IC)                                // CMOVQEQ AX, IC
  1475  	self.Xjmp("JE", p.vi())                                       // JE      {p.vi()}
  1476  	self.Link("_not_null_quote_{n}")                              // _not_null_quote_{n}:
  1477  }
  1478  
  1479  func (self *_Assembler) _asm_OP_map_init(_ *_Instr) {
  1480  	self.Emit("MOVQ", jit.Ptr(_VP, 0), _AX)    // MOVQ    (VP), AX
  1481  	self.Emit("TESTQ", _AX, _AX)               // TESTQ   AX, AX
  1482  	self.Sjmp("JNZ", "_end_{n}")               // JNZ     _end_{n}
  1483  	self.call_go(_F_makemap_small)             // CALL_GO makemap_small
  1484  	self.WritePtrAX(6, jit.Ptr(_VP, 0), false) // MOVQ    AX, (VP)
  1485  	self.Link("_end_{n}")                      // _end_{n}:
  1486  	self.Emit("MOVQ", _AX, _VP)                // MOVQ    AX, VP
  1487  }
  1488  
  1489  func (self *_Assembler) _asm_OP_map_key_i8(p *_Instr) {
  1490  	self.parse_signed(int8Type, "", p.vi())                            // PARSE     int8
  1491  	self.range_signed_CX(_I_int8, _T_int8, math.MinInt8, math.MaxInt8) // RANGE     int8
  1492  	self.match_char('"')
  1493  	self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN int8, mapassign, st.Iv
  1494  }
  1495  
  1496  func (self *_Assembler) _asm_OP_map_key_i16(p *_Instr) {
  1497  	self.parse_signed(int16Type, "", p.vi())                               // PARSE     int16
  1498  	self.range_signed_CX(_I_int16, _T_int16, math.MinInt16, math.MaxInt16) // RANGE     int16
  1499  	self.match_char('"')
  1500  	self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN int16, mapassign, st.Iv
  1501  }
  1502  
  1503  func (self *_Assembler) _asm_OP_map_key_i32(p *_Instr) {
  1504  	self.parse_signed(int32Type, "", p.vi())                               // PARSE     int32
  1505  	self.range_signed_CX(_I_int32, _T_int32, math.MinInt32, math.MaxInt32) // RANGE     int32
  1506  	self.match_char('"')
  1507  	if vt := p.vt(); !mapfast(vt) {
  1508  		self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN int32, mapassign, st.Iv
  1509  	} else {
  1510  		self.Emit("MOVQ", _CX, _AX)                   // MOVQ CX, AX
  1511  		self.mapassign_fastx(vt, _F_mapassign_fast32) // MAPASSIGN int32, mapassign_fast32
  1512  	}
  1513  }
  1514  
  1515  func (self *_Assembler) _asm_OP_map_key_i64(p *_Instr) {
  1516  	self.parse_signed(int64Type, "", p.vi()) // PARSE     int64
  1517  	self.match_char('"')
  1518  	if vt := p.vt(); !mapfast(vt) {
  1519  		self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN int64, mapassign, st.Iv
  1520  	} else {
  1521  		self.Emit("MOVQ", _VAR_st_Iv, _AX)            // MOVQ      st.Iv, AX
  1522  		self.mapassign_fastx(vt, _F_mapassign_fast64) // MAPASSIGN int64, mapassign_fast64
  1523  	}
  1524  }
  1525  
  1526  func (self *_Assembler) _asm_OP_map_key_u8(p *_Instr) {
  1527  	self.parse_unsigned(uint8Type, "", p.vi())                // PARSE     uint8
  1528  	self.range_unsigned_CX(_I_uint8, _T_uint8, math.MaxUint8) // RANGE     uint8
  1529  	self.match_char('"')
  1530  	self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN uint8, vt.Iv
  1531  }
  1532  
  1533  func (self *_Assembler) _asm_OP_map_key_u16(p *_Instr) {
  1534  	self.parse_unsigned(uint16Type, "", p.vi())                  // PARSE     uint16
  1535  	self.range_unsigned_CX(_I_uint16, _T_uint16, math.MaxUint16) // RANGE     uint16
  1536  	self.match_char('"')
  1537  	self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN uint16, vt.Iv
  1538  }
  1539  
  1540  func (self *_Assembler) _asm_OP_map_key_u32(p *_Instr) {
  1541  	self.parse_unsigned(uint32Type, "", p.vi())                  // PARSE     uint32
  1542  	self.range_unsigned_CX(_I_uint32, _T_uint32, math.MaxUint32) // RANGE     uint32
  1543  	self.match_char('"')
  1544  	if vt := p.vt(); !mapfast(vt) {
  1545  		self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN uint32, vt.Iv
  1546  	} else {
  1547  		self.Emit("MOVQ", _CX, _AX)                   // MOVQ CX, AX
  1548  		self.mapassign_fastx(vt, _F_mapassign_fast32) // MAPASSIGN uint32, mapassign_fast32
  1549  	}
  1550  }
  1551  
  1552  func (self *_Assembler) _asm_OP_map_key_u64(p *_Instr) {
  1553  	self.parse_unsigned(uint64Type, "", p.vi()) // PARSE     uint64
  1554  	self.match_char('"')
  1555  	if vt := p.vt(); !mapfast(vt) {
  1556  		self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN uint64, vt.Iv
  1557  	} else {
  1558  		self.Emit("MOVQ", _VAR_st_Iv, _AX)            // MOVQ      st.Iv, AX
  1559  		self.mapassign_fastx(vt, _F_mapassign_fast64) // MAPASSIGN uint64, mapassign_fast64
  1560  	}
  1561  }
  1562  
  1563  func (self *_Assembler) _asm_OP_map_key_f32(p *_Instr) {
  1564  	self.parse_number(float32Type, "", p.vi()) // PARSE     NUMBER
  1565  	self.range_single_X0()                     // RANGE     float32
  1566  	self.Emit("MOVSS", _X0, _VAR_st_Dv)        // MOVSS     X0, st.Dv
  1567  	self.match_char('"')
  1568  	self.mapassign_std(p.vt(), _VAR_st_Dv) // MAPASSIGN ${p.vt()}, mapassign, st.Dv
  1569  }
  1570  
  1571  func (self *_Assembler) _asm_OP_map_key_f64(p *_Instr) {
  1572  	self.parse_number(float64Type, "", p.vi()) // PARSE     NUMBER
  1573  	self.match_char('"')
  1574  	self.mapassign_std(p.vt(), _VAR_st_Dv) // MAPASSIGN ${p.vt()}, mapassign, st.Dv
  1575  }
  1576  
  1577  func (self *_Assembler) _asm_OP_map_key_str(p *_Instr) {
  1578  	self.parse_string()                                 // PARSE     STRING
  1579  	self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true) // UNQUOTE   once, sv.p, sv.n
  1580  	if vt := p.vt(); !mapfast(vt) {
  1581  		self.valloc(vt.Key(), _DI)
  1582  		self.Emit("MOVOU", _ARG_sv, _X0)
  1583  		self.Emit("MOVOU", _X0, jit.Ptr(_DI, 0))
  1584  		self.mapassign_std(vt, jit.Ptr(_DI, 0)) // MAPASSIGN string, DI, SI
  1585  	} else {
  1586  		self.mapassign_str_fast(vt, _ARG_sv_p, _ARG_sv_n) // MAPASSIGN string, DI, SI
  1587  	}
  1588  }
  1589  
  1590  func (self *_Assembler) _asm_OP_map_key_utext(p *_Instr) {
  1591  	self.parse_string()                                 // PARSE     STRING
  1592  	self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true) // UNQUOTE   once, sv.p, sv.n
  1593  	self.mapassign_utext(p.vt(), false)                 // MAPASSIGN utext, ${p.vt()}, false
  1594  }
  1595  
  1596  func (self *_Assembler) _asm_OP_map_key_utext_p(p *_Instr) {
  1597  	self.parse_string()                                 // PARSE     STRING
  1598  	self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true) // UNQUOTE   once, sv.p, sv.n
  1599  	self.mapassign_utext(p.vt(), true)                  // MAPASSIGN utext, ${p.vt()}, true
  1600  }
  1601  
  1602  func (self *_Assembler) _asm_OP_array_skip(_ *_Instr) {
  1603  	self.call_sf(_F_skip_array)          // CALL_SF skip_array
  1604  	self.Emit("TESTQ", _AX, _AX)         // TESTQ   AX, AX
  1605  	self.Sjmp("JS", _LB_parsing_error_v) // JS      _parse_error_v
  1606  }
  1607  
  1608  func (self *_Assembler) _asm_OP_array_clear(p *_Instr) {
  1609  	self.mem_clear_rem(p.i64(), true)
  1610  }
  1611  
  1612  func (self *_Assembler) _asm_OP_array_clear_p(p *_Instr) {
  1613  	self.mem_clear_rem(p.i64(), false)
  1614  }
  1615  
  1616  func (self *_Assembler) _asm_OP_slice_init(p *_Instr) {
  1617  	self.Emit("XORL", _AX, _AX)                // XORL    AX, AX
  1618  	self.Emit("MOVQ", _AX, jit.Ptr(_VP, 8))    // MOVQ    AX, 8(VP)
  1619  	self.Emit("MOVQ", jit.Ptr(_VP, 16), _BX)   // MOVQ    16(VP), BX
  1620  	self.Emit("TESTQ", _BX, _BX)               // TESTQ   BX, BX
  1621  	self.Sjmp("JNZ", "_done_{n}")              // JNZ     _done_{n}
  1622  	self.Emit("MOVQ", jit.Imm(_MinSlice), _CX) // MOVQ    ${_MinSlice}, CX
  1623  	self.Emit("MOVQ", _CX, jit.Ptr(_VP, 16))   // MOVQ    CX, 16(VP)
  1624  	self.Emit("MOVQ", jit.Type(p.vt()), _AX)   // MOVQ    ${p.vt()}, DX
  1625  	self.call_go(_F_makeslice)                 // CALL_GO makeslice
  1626  	self.WritePtrAX(7, jit.Ptr(_VP, 0), false) // MOVQ    AX, (VP)
  1627  	self.Emit("XORL", _AX, _AX)                // XORL    AX, AX
  1628  	self.Emit("MOVQ", _AX, jit.Ptr(_VP, 8))    // MOVQ    AX, 8(VP)
  1629  	self.Link("_done_{n}")                     // _done_{n}
  1630  }
  1631  
  1632  func (self *_Assembler) _asm_OP_check_empty(p *_Instr) {
  1633  	rbracket := p.vb()
  1634  	if rbracket == ']' {
  1635  		self.check_eof(1)
  1636  		self.Emit("LEAQ", jit.Ptr(_IC, 1), _AX)                              // LEAQ    1(IC), AX
  1637  		self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(rbracket))) // CMPB    (IP)(IC), ']'
  1638  		self.Sjmp("JNE", "_not_empty_array_{n}")                             // JNE     _not_empty_array_{n}
  1639  		self.Emit("MOVQ", _AX, _IC)                                          // MOVQ    AX, IC
  1640  		self.Emit("MOVQ", jit.Imm(_Zero_Base), _AX)
  1641  		self.WritePtrAX(9, jit.Ptr(_VP, 0), false)
  1642  		self.Emit("PXOR", _X0, _X0)              // PXOR    X0, X0
  1643  		self.Emit("MOVOU", _X0, jit.Ptr(_VP, 8)) // MOVOU   X0, 8(VP)
  1644  		self.Xjmp("JMP", p.vi())                 // JMP     {p.vi()}
  1645  		self.Link("_not_empty_array_{n}")
  1646  	} else {
  1647  		panic("only implement check empty array here!")
  1648  	}
  1649  }
  1650  
  1651  func (self *_Assembler) _asm_OP_slice_append(p *_Instr) {
  1652  	self.Emit("MOVQ", jit.Ptr(_VP, 8), _AX)    // MOVQ    8(VP), AX
  1653  	self.Emit("CMPQ", _AX, jit.Ptr(_VP, 16))   // CMPQ    AX, 16(VP)
  1654  	self.Sjmp("JB", "_index_{n}")              // JB      _index_{n}
  1655  	self.Emit("MOVQ", _AX, _SI)                // MOVQ    AX, SI
  1656  	self.Emit("SHLQ", jit.Imm(1), _SI)         // SHLQ    $1, SI
  1657  	self.Emit("MOVQ", jit.Type(p.vt()), _AX)   // MOVQ    ${p.vt()}, AX
  1658  	self.Emit("MOVQ", jit.Ptr(_VP, 0), _BX)    // MOVQ   (VP), BX
  1659  	self.Emit("MOVQ", jit.Ptr(_VP, 8), _CX)    // MOVQ    8(VP), CX
  1660  	self.Emit("MOVQ", jit.Ptr(_VP, 16), _DI)   // MOVQ    16(VP), DI
  1661  	self.call_go(_F_growslice)                 // CALL_GO growslice
  1662  	self.WritePtrAX(8, jit.Ptr(_VP, 0), false) // MOVQ    AX, (VP)
  1663  	self.Emit("MOVQ", _BX, jit.Ptr(_VP, 8))    // MOVQ    BX, 8(VP)
  1664  	self.Emit("MOVQ", _CX, jit.Ptr(_VP, 16))   // MOVQ    CX, 16(VP)
  1665  
  1666  	// because growslice not zero memory {oldcap, newlen} when append et not has ptrdata.
  1667  	// but we should zero it, avoid decode it as random values.
  1668  	if rt.UnpackType(p.vt()).PtrData == 0 {
  1669  		self.Emit("MOVQ", _CX, _DI) // MOVQ    CX, DI
  1670  		self.Emit("SUBQ", _BX, _DI) // MOVQ    BX, DI
  1671  
  1672  		self.Emit("ADDQ", jit.Imm(1), jit.Ptr(_VP, 8))   // ADDQ    $1, 8(VP)
  1673  		self.Emit("MOVQ", _AX, _VP)                      // MOVQ    AX, VP
  1674  		self.Emit("MOVQ", jit.Imm(int64(p.vlen())), _CX) // MOVQ    ${p.vlen()}, CX
  1675  		self.Emit("MOVQ", _BX, _AX)                      // MOVQ    BX, AX
  1676  		self.From("MULQ", _CX)                           // MULQ    CX
  1677  		self.Emit("ADDQ", _AX, _VP)                      // ADDQ    AX, VP
  1678  
  1679  		self.Emit("MOVQ", _DI, _AX) // MOVQ    SI, AX
  1680  		self.From("MULQ", _CX)      // MULQ    BX
  1681  		self.Emit("MOVQ", _AX, _BX) // ADDQ    AX, BX
  1682  		self.Emit("MOVQ", _VP, _AX) // MOVQ    VP, AX
  1683  		self.mem_clear_fn(true)     // CALL_GO memclr{Has,NoHeap}
  1684  		self.Sjmp("JMP", "_append_slice_end_{n}")
  1685  	}
  1686  
  1687  	self.Emit("MOVQ", _BX, _AX)                      // MOVQ    BX, AX
  1688  	self.Link("_index_{n}")                          // _index_{n}:
  1689  	self.Emit("ADDQ", jit.Imm(1), jit.Ptr(_VP, 8))   // ADDQ    $1, 8(VP)
  1690  	self.Emit("MOVQ", jit.Ptr(_VP, 0), _VP)          // MOVQ    (VP), VP
  1691  	self.Emit("MOVQ", jit.Imm(int64(p.vlen())), _CX) // MOVQ    ${p.vlen()}, CX
  1692  	self.From("MULQ", _CX)                           // MULQ    CX
  1693  	self.Emit("ADDQ", _AX, _VP)                      // ADDQ    AX, VP
  1694  	self.Link("_append_slice_end_{n}")
  1695  }
  1696  
  1697  func (self *_Assembler) _asm_OP_object_skip(_ *_Instr) {
  1698  	self.call_sf(_F_skip_object)         // CALL_SF skip_object
  1699  	self.Emit("TESTQ", _AX, _AX)         // TESTQ   AX, AX
  1700  	self.Sjmp("JS", _LB_parsing_error_v) // JS      _parse_error_v
  1701  }
  1702  
  1703  func (self *_Assembler) _asm_OP_object_next(_ *_Instr) {
  1704  	self.call_sf(_F_skip_one)            // CALL_SF skip_one
  1705  	self.Emit("TESTQ", _AX, _AX)         // TESTQ   AX, AX
  1706  	self.Sjmp("JS", _LB_parsing_error_v) // JS      _parse_error_v
  1707  }
  1708  
  1709  func (self *_Assembler) _asm_OP_struct_field(p *_Instr) {
  1710  	assert_eq(caching.FieldEntrySize, 32, "invalid field entry size")
  1711  	self.Emit("MOVQ", jit.Imm(-1), _AX)                      // MOVQ    $-1, AX
  1712  	self.Emit("MOVQ", _AX, _VAR_sr)                          // MOVQ    AX, sr
  1713  	self.parse_string()                                      // PARSE   STRING
  1714  	self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, false)     // UNQUOTE once, sv.p, sv.n
  1715  	self.Emit("LEAQ", _ARG_sv, _AX)                          // LEAQ    sv, AX
  1716  	self.Emit("XORL", _BX, _BX)                              // XORL    BX, BX
  1717  	self.call_go(_F_strhash)                                 // CALL_GO strhash
  1718  	self.Emit("MOVQ", _AX, _R9)                              // MOVQ    AX, R9
  1719  	self.Emit("MOVQ", jit.Imm(freezeFields(p.vf())), _CX)    // MOVQ    ${p.vf()}, CX
  1720  	self.Emit("MOVQ", jit.Ptr(_CX, caching.FieldMap_b), _SI) // MOVQ    FieldMap.b(CX), SI
  1721  	self.Emit("MOVQ", jit.Ptr(_CX, caching.FieldMap_N), _CX) // MOVQ    FieldMap.N(CX), CX
  1722  	self.Emit("TESTQ", _CX, _CX)                             // TESTQ   CX, CX
  1723  	self.Sjmp("JZ", "_try_lowercase_{n}")                    // JZ      _try_lowercase_{n}
  1724  	self.Link("_loop_{n}")                                   // _loop_{n}:
  1725  	self.Emit("XORL", _DX, _DX)                              // XORL    DX, DX
  1726  	self.From("DIVQ", _CX)                                   // DIVQ    CX
  1727  	self.Emit("LEAQ", jit.Ptr(_DX, 1), _AX)                  // LEAQ    1(DX), AX
  1728  	self.Emit("SHLQ", jit.Imm(5), _DX)                       // SHLQ    $5, DX
  1729  	self.Emit("LEAQ", jit.Sib(_SI, _DX, 1, 0), _DI)          // LEAQ    (SI)(DX), DI
  1730  	self.Emit("MOVQ", jit.Ptr(_DI, _Fe_Hash), _R8)           // MOVQ    FieldEntry.Hash(DI), R8
  1731  	self.Emit("TESTQ", _R8, _R8)                             // TESTQ   R8, R8
  1732  	self.Sjmp("JZ", "_try_lowercase_{n}")                    // JZ      _try_lowercase_{n}
  1733  	self.Emit("CMPQ", _R8, _R9)                              // CMPQ    R8, R9
  1734  	self.Sjmp("JNE", "_loop_{n}")                            // JNE     _loop_{n}
  1735  	self.Emit("MOVQ", jit.Ptr(_DI, _Fe_Name+8), _DX)         // MOVQ    FieldEntry.Name+8(DI), DX
  1736  	self.Emit("CMPQ", _DX, _ARG_sv_n)                        // CMPQ    DX, sv.n
  1737  	self.Sjmp("JNE", "_loop_{n}")                            // JNE     _loop_{n}
  1738  	self.Emit("MOVQ", jit.Ptr(_DI, _Fe_ID), _R8)             // MOVQ    FieldEntry.ID(DI), R8
  1739  	self.Emit("MOVQ", _AX, _VAR_ss_AX)                       // MOVQ    AX, ss.AX
  1740  	self.Emit("MOVQ", _CX, _VAR_ss_CX)                       // MOVQ    CX, ss.CX
  1741  	self.Emit("MOVQ", _SI, _VAR_ss_SI)                       // MOVQ    SI, ss.SI
  1742  	self.Emit("MOVQ", _R8, _VAR_ss_R8)                       // MOVQ    R8, ss.R8
  1743  	self.Emit("MOVQ", _R9, _VAR_ss_R9)                       // MOVQ    R9, ss.R9
  1744  	self.Emit("MOVQ", _ARG_sv_p, _AX)                        // MOVQ    _VAR_sv_p, AX
  1745  	self.Emit("MOVQ", jit.Ptr(_DI, _Fe_Name), _CX)           // MOVQ    FieldEntry.Name(DI), CX
  1746  	self.Emit("MOVQ", _CX, _BX)                              // MOVQ    CX, 8(SP)
  1747  	self.Emit("MOVQ", _DX, _CX)                              // MOVQ    DX, 16(SP)
  1748  	self.call_go(_F_memequal)                                // CALL_GO memequal
  1749  	self.Emit("MOVB", _AX, _DX)                              // MOVB    24(SP), DX
  1750  	self.Emit("MOVQ", _VAR_ss_AX, _AX)                       // MOVQ    ss.AX, AX
  1751  	self.Emit("MOVQ", _VAR_ss_CX, _CX)                       // MOVQ    ss.CX, CX
  1752  	self.Emit("MOVQ", _VAR_ss_SI, _SI)                       // MOVQ    ss.SI, SI
  1753  	self.Emit("MOVQ", _VAR_ss_R9, _R9)                       // MOVQ    ss.R9, R9
  1754  	self.Emit("TESTB", _DX, _DX)                             // TESTB   DX, DX
  1755  	self.Sjmp("JZ", "_loop_{n}")                             // JZ      _loop_{n}
  1756  	self.Emit("MOVQ", _VAR_ss_R8, _R8)                       // MOVQ    ss.R8, R8
  1757  	self.Emit("MOVQ", _R8, _VAR_sr)                          // MOVQ    R8, sr
  1758  	self.Sjmp("JMP", "_end_{n}")                             // JMP     _end_{n}
  1759  	self.Link("_try_lowercase_{n}")                          // _try_lowercase_{n}:
  1760  	self.Emit("MOVQ", jit.Imm(referenceFields(p.vf())), _AX) // MOVQ    ${p.vf()}, AX
  1761  	self.Emit("MOVQ", _ARG_sv_p, _BX)                        // MOVQ   sv, BX
  1762  	self.Emit("MOVQ", _ARG_sv_n, _CX)                        // MOVQ   sv, CX
  1763  	self.call_go(_F_FieldMap_GetCaseInsensitive)             // CALL_GO FieldMap::GetCaseInsensitive
  1764  	self.Emit("MOVQ", _AX, _VAR_sr)                          // MOVQ    AX, _VAR_sr
  1765  	self.Emit("TESTQ", _AX, _AX)                             // TESTQ   AX, AX
  1766  	self.Sjmp("JNS", "_end_{n}")                             // JNS     _end_{n}
  1767  	self.Emit("BTQ", jit.Imm(_F_disable_unknown), _ARG_fv)   // BTQ     ${_F_disable_unknown}, fv
  1768  	self.Sjmp("JC", _LB_field_error)                         // JC      _field_error
  1769  	self.Link("_end_{n}")                                    // _end_{n}:
  1770  }
  1771  
  1772  func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) {
  1773  	self.unmarshal_json(p.vt(), true)
  1774  }
  1775  
  1776  func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) {
  1777  	self.unmarshal_json(p.vt(), false)
  1778  }
  1779  
  1780  func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) {
  1781  	self.unmarshal_text(p.vt(), true)
  1782  }
  1783  
  1784  func (self *_Assembler) _asm_OP_unmarshal_text_p(p *_Instr) {
  1785  	self.unmarshal_text(p.vt(), false)
  1786  }
  1787  
  1788  func (self *_Assembler) _asm_OP_lspace(_ *_Instr) {
  1789  	self.lspace("_{n}")
  1790  }
  1791  
  1792  func (self *_Assembler) lspace(subfix string) {
  1793  	var label = "_lspace" + subfix
  1794  	self.Emit("CMPQ", _IC, _IL)                        // CMPQ    IC, IL
  1795  	self.Sjmp("JAE", _LB_eof_error)                    // JAE     _eof_error
  1796  	self.Emit("MOVQ", jit.Imm(_BM_space), _DX)         // MOVQ    _BM_space, DX
  1797  	self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
  1798  	self.Emit("CMPQ", _AX, jit.Imm(' '))               // CMPQ    AX, $' '
  1799  	self.Sjmp("JA", label)                             // JA      _nospace_{n}
  1800  	self.Emit("BTQ", _AX, _DX)                         // BTQ     AX, DX
  1801  	self.Sjmp("JNC", label)                            // JNC     _nospace_{n}
  1802  
  1803  	/* test up to 4 characters */
  1804  	for i := 0; i < 3; i++ {
  1805  		self.Emit("ADDQ", jit.Imm(1), _IC)                 // ADDQ    $1, IC
  1806  		self.Emit("CMPQ", _IC, _IL)                        // CMPQ    IC, IL
  1807  		self.Sjmp("JAE", _LB_eof_error)                    // JAE     _eof_error
  1808  		self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
  1809  		self.Emit("CMPQ", _AX, jit.Imm(' '))               // CMPQ    AX, $' '
  1810  		self.Sjmp("JA", label)                             // JA      _nospace_{n}
  1811  		self.Emit("BTQ", _AX, _DX)                         // BTQ     AX, DX
  1812  		self.Sjmp("JNC", label)                            // JNC     _nospace_{n}
  1813  	}
  1814  
  1815  	/* handle over to the native function */
  1816  	self.Emit("MOVQ", _IP, _DI)          // MOVQ    IP, DI
  1817  	self.Emit("MOVQ", _IL, _SI)          // MOVQ    IL, SI
  1818  	self.Emit("MOVQ", _IC, _DX)          // MOVQ    IC, DX
  1819  	self.callc(_F_lspace)                // CALL    lspace
  1820  	self.Emit("TESTQ", _AX, _AX)         // TESTQ   AX, AX
  1821  	self.Sjmp("JS", _LB_parsing_error_v) // JS      _parsing_error_v
  1822  	self.Emit("CMPQ", _AX, _IL)          // CMPQ    AX, IL
  1823  	self.Sjmp("JAE", _LB_eof_error)      // JAE     _eof_error
  1824  	self.Emit("MOVQ", _AX, _IC)          // MOVQ    AX, IC
  1825  	self.Link(label)                     // _nospace_{n}:
  1826  }
  1827  
  1828  func (self *_Assembler) _asm_OP_match_char(p *_Instr) {
  1829  	self.match_char(p.vb())
  1830  }
  1831  
  1832  func (self *_Assembler) match_char(char byte) {
  1833  	self.check_eof(1)
  1834  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(char))) // CMPB (IP)(IC), ${p.vb()}
  1835  	self.Sjmp("JNE", _LB_char_0_error)                               // JNE  _char_0_error
  1836  	self.Emit("ADDQ", jit.Imm(1), _IC)                               // ADDQ $1, IC
  1837  }
  1838  
  1839  func (self *_Assembler) _asm_OP_check_char(p *_Instr) {
  1840  	self.check_eof(1)
  1841  	self.Emit("LEAQ", jit.Ptr(_IC, 1), _AX)                            // LEAQ    1(IC), AX
  1842  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb()))) // CMPB    (IP)(IC), ${p.vb()}
  1843  	self.Emit("CMOVQEQ", _AX, _IC)                                     // CMOVQEQ AX, IC
  1844  	self.Xjmp("JE", p.vi())                                            // JE      {p.vi()}
  1845  }
  1846  
  1847  func (self *_Assembler) _asm_OP_check_char_0(p *_Instr) {
  1848  	self.check_eof(1)
  1849  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb()))) // CMPB    (IP)(IC), ${p.vb()}
  1850  	self.Xjmp("JE", p.vi())                                            // JE      {p.vi()}
  1851  }
  1852  
  1853  func (self *_Assembler) _asm_OP_add(p *_Instr) {
  1854  	self.Emit("ADDQ", jit.Imm(int64(p.vi())), _IC) // ADDQ ${p.vi()}, IC
  1855  }
  1856  
  1857  func (self *_Assembler) _asm_OP_load(_ *_Instr) {
  1858  	self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)         // MOVQ (ST), AX
  1859  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _VP) // MOVQ (ST)(AX), VP
  1860  }
  1861  
  1862  func (self *_Assembler) _asm_OP_save(_ *_Instr) {
  1863  	self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX)                           // MOVQ (ST), CX
  1864  	self.Emit("CMPQ", _CX, jit.Imm(_MaxStackBytes))                   // CMPQ CX, ${_MaxStackBytes}
  1865  	self.Sjmp("JAE", _LB_stack_error)                                 // JA   _stack_error
  1866  	self.WriteRecNotAX(0, _VP, jit.Sib(_ST, _CX, 1, 8), false, false) // MOVQ VP, 8(ST)(CX)
  1867  	self.Emit("ADDQ", jit.Imm(8), _CX)                                // ADDQ $8, CX
  1868  	self.Emit("MOVQ", _CX, jit.Ptr(_ST, 0))                           // MOVQ CX, (ST)
  1869  }
  1870  
  1871  func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
  1872  	self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)         // MOVQ (ST), AX
  1873  	self.Emit("SUBQ", jit.Imm(8), _AX)              // SUBQ $8, AX
  1874  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _VP) // MOVQ 8(ST)(AX), VP
  1875  	self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0))         // MOVQ AX, (ST)
  1876  	self.Emit("XORL", _BX, _BX)                     // XORL BX, BX
  1877  	self.Emit("MOVQ", _BX, jit.Sib(_ST, _AX, 1, 8)) // MOVQ BX, 8(ST)(AX)
  1878  }
  1879  
  1880  func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
  1881  	self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)          // MOVQ  (ST), AX
  1882  	self.Emit("SUBQ", jit.Imm(16), _AX)              // SUBQ  $16, AX
  1883  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _VP)  // MOVQ  8(ST)(AX), VP
  1884  	self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0))          // MOVQ  AX, (ST)
  1885  	self.Emit("PXOR", _X0, _X0)                      // PXOR  X0, X0
  1886  	self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX)
  1887  }
  1888  
  1889  func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
  1890  	self.Emit("MOVQ", jit.Type(p.vt()), _AX) // MOVQ   ${p.vt()}, AX
  1891  	self.decode_dynamic(_AX, _VP)            // DECODE AX, VP
  1892  }
  1893  
  1894  func (self *_Assembler) _asm_OP_goto(p *_Instr) {
  1895  	self.Xjmp("JMP", p.vi())
  1896  }
  1897  
  1898  func (self *_Assembler) _asm_OP_switch(p *_Instr) {
  1899  	self.Emit("MOVQ", _VAR_sr, _AX)          // MOVQ sr, AX
  1900  	self.Emit("CMPQ", _AX, jit.Imm(p.i64())) // CMPQ AX, ${len(p.vs())}
  1901  	self.Sjmp("JAE", "_default_{n}")         // JAE  _default_{n}
  1902  
  1903  	/* jump table selector */
  1904  	self.Byte(0x48, 0x8d, 0x3d)                        // LEAQ    ?(PC), DI
  1905  	self.Sref("_switch_table_{n}", 4)                  // ....    &_switch_table_{n}
  1906  	self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX) // MOVLQSX (DI)(AX*4), AX
  1907  	self.Emit("ADDQ", _DI, _AX)                        // ADDQ    DI, AX
  1908  	self.Rjmp("JMP", _AX)                              // JMP     AX
  1909  	self.Link("_switch_table_{n}")                     // _switch_table_{n}:
  1910  
  1911  	/* generate the jump table */
  1912  	for i, v := range p.vs() {
  1913  		self.Xref(v, int64(-i)*4)
  1914  	}
  1915  
  1916  	/* default case */
  1917  	self.Link("_default_{n}")
  1918  	self.NOP()
  1919  }
  1920  
  1921  func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
  1922  	self.Emit("MOVQ", jit.Imm(int64(p2.op())), _CX) // MOVQ $(p2.op()), 16(SP)
  1923  	self.Emit("MOVQ", jit.Imm(int64(p1.op())), _BX) // MOVQ $(p1.op()), 8(SP)
  1924  	self.Emit("MOVQ", jit.Imm(int64(i)), _AX)       // MOVQ $(i), (SP)
  1925  	self.call_go(_F_println)
  1926  }