github.com/goshafaq/sonic@v0.0.0-20231026082336-871835fb94c6/internal/decoder/assembler_stkabi_amd64.go (about)

     1  //go:build go1.16 && !go1.17
     2  // +build go1.16,!go1.17
     3  
     4  /*
     5   * Copyright 2021 ByteDance Inc.
     6   *
     7   * Licensed under the Apache License, Version 2.0 (the "License");
     8   * you may not use this file except in compliance with the License.
     9   * You may obtain a copy of the License at
    10   *
    11   *     http://www.apache.org/licenses/LICENSE-2.0
    12   *
    13   * Unless required by applicable law or agreed to in writing, software
    14   * distributed under the License is distributed on an "AS IS" BASIS,
    15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    16   * See the License for the specific language governing permissions and
    17   * limitations under the License.
    18   */
    19  
    20  package decoder
    21  
    22  import (
    23  	"encoding/json"
    24  	"fmt"
    25  	"math"
    26  	"reflect"
    27  	"unsafe"
    28  
    29  	"github.com/goshafaq/sonic/internal/caching"
    30  	"github.com/goshafaq/sonic/internal/jit"
    31  	"github.com/goshafaq/sonic/internal/native"
    32  	"github.com/goshafaq/sonic/internal/native/types"
    33  	"github.com/goshafaq/sonic/internal/rt"
    34  	"github.com/twitchyliquid64/golang-asm/obj"
    35  )
    36  
    37  /** Register Allocations
    38   *
    39   *  State Registers:
    40   *
    41   *      %rbx : stack base
    42   *      %r12 : input pointer
    43   *      %r13 : input length
    44   *      %r14 : input cursor
    45   *      %r15 : value pointer
    46   *
    47   *  Error Registers:
    48   *
    49   *      %r10 : error type register
    50   *      %r11 : error pointer register
    51   */
    52  
    53  /** Function Prototype & Stack Map
    54   *
    55   *  func (s string, ic int, vp unsafe.Pointer, sb *_Stack, fv uint64, sv string) (rc int, err error)
    56   *
    57   *  s.buf  :   (FP)
    58   *  s.len  :  8(FP)
    59   *  ic     : 16(FP)
    60   *  vp     : 24(FP)
    61   *  sb     : 32(FP)
    62   *  fv     : 40(FP)
    63   *  sv     : 56(FP)
    64   *  err.vt : 72(FP)
    65   *  err.vp : 80(FP)
    66   */
    67  
    68  const (
    69  	_FP_args   = 96  // 96 bytes to pass arguments and return values for this function
    70  	_FP_fargs  = 80  // 80 bytes for passing arguments to other Go functions
    71  	_FP_saves  = 40  // 40 bytes for saving the registers before CALL instructions
    72  	_FP_locals = 144 // 144 bytes for local variables
    73  )
    74  
    75  const (
    76  	_FP_offs = _FP_fargs + _FP_saves + _FP_locals
    77  	_FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer
    78  	_FP_base = _FP_size + 8 // 8 bytes for the return address
    79  )
    80  
    81  const (
    82  	_IM_null = 0x6c6c756e // 'null'
    83  	_IM_true = 0x65757274 // 'true'
    84  	_IM_alse = 0x65736c61 // 'alse' ('false' without the 'f')
    85  )
    86  
    87  const (
    88  	_BM_space = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n')
    89  )
    90  
    91  const (
    92  	_MODE_JSON = 1 << 3 // base64 mode
    93  )
    94  
    95  const (
    96  	_LB_error           = "_error"
    97  	_LB_im_error        = "_im_error"
    98  	_LB_eof_error       = "_eof_error"
    99  	_LB_type_error      = "_type_error"
   100  	_LB_field_error     = "_field_error"
   101  	_LB_range_error     = "_range_error"
   102  	_LB_stack_error     = "_stack_error"
   103  	_LB_base64_error    = "_base64_error"
   104  	_LB_unquote_error   = "_unquote_error"
   105  	_LB_parsing_error   = "_parsing_error"
   106  	_LB_parsing_error_v = "_parsing_error_v"
   107  	_LB_mismatch_error  = "_mismatch_error"
   108  )
   109  
   110  const (
   111  	_LB_char_0_error  = "_char_0_error"
   112  	_LB_char_1_error  = "_char_1_error"
   113  	_LB_char_2_error  = "_char_2_error"
   114  	_LB_char_3_error  = "_char_3_error"
   115  	_LB_char_4_error  = "_char_4_error"
   116  	_LB_char_m2_error = "_char_m2_error"
   117  	_LB_char_m3_error = "_char_m3_error"
   118  )
   119  
   120  const (
   121  	_LB_skip_one       = "_skip_one"
   122  	_LB_skip_key_value = "_skip_key_value"
   123  )
   124  
   125  var (
   126  	_AX = jit.Reg("AX")
   127  	_CX = jit.Reg("CX")
   128  	_DX = jit.Reg("DX")
   129  	_DI = jit.Reg("DI")
   130  	_SI = jit.Reg("SI")
   131  	_BP = jit.Reg("BP")
   132  	_SP = jit.Reg("SP")
   133  	_R8 = jit.Reg("R8")
   134  	_R9 = jit.Reg("R9")
   135  	_X0 = jit.Reg("X0")
   136  	_X1 = jit.Reg("X1")
   137  )
   138  
   139  var (
   140  	_ST = jit.Reg("BX")
   141  	_IP = jit.Reg("R12")
   142  	_IL = jit.Reg("R13")
   143  	_IC = jit.Reg("R14")
   144  	_VP = jit.Reg("R15")
   145  )
   146  
   147  var (
   148  	_R10 = jit.Reg("R10") // used for gcWriteBarrier
   149  	_DF  = jit.Reg("R10") // reuse R10 in generic decoder for flags
   150  	_ET  = jit.Reg("R10")
   151  	_EP  = jit.Reg("R11")
   152  )
   153  
   154  var (
   155  	_ARG_s  = _ARG_sp
   156  	_ARG_sp = jit.Ptr(_SP, _FP_base)
   157  	_ARG_sl = jit.Ptr(_SP, _FP_base+8)
   158  	_ARG_ic = jit.Ptr(_SP, _FP_base+16)
   159  	_ARG_vp = jit.Ptr(_SP, _FP_base+24)
   160  	_ARG_sb = jit.Ptr(_SP, _FP_base+32)
   161  	_ARG_fv = jit.Ptr(_SP, _FP_base+40)
   162  )
   163  
   164  var (
   165  	_VAR_sv   = _VAR_sv_p
   166  	_VAR_sv_p = jit.Ptr(_SP, _FP_base+48)
   167  	_VAR_sv_n = jit.Ptr(_SP, _FP_base+56)
   168  	_VAR_vk   = jit.Ptr(_SP, _FP_base+64)
   169  )
   170  
   171  var (
   172  	_RET_rc = jit.Ptr(_SP, _FP_base+72)
   173  	_RET_et = jit.Ptr(_SP, _FP_base+80)
   174  	_RET_ep = jit.Ptr(_SP, _FP_base+88)
   175  )
   176  
   177  var (
   178  	_VAR_st = _VAR_st_Vt
   179  	_VAR_sr = jit.Ptr(_SP, _FP_fargs+_FP_saves)
   180  )
   181  
   182  var (
   183  	_VAR_st_Vt = jit.Ptr(_SP, _FP_fargs+_FP_saves+0)
   184  	_VAR_st_Dv = jit.Ptr(_SP, _FP_fargs+_FP_saves+8)
   185  	_VAR_st_Iv = jit.Ptr(_SP, _FP_fargs+_FP_saves+16)
   186  	_VAR_st_Ep = jit.Ptr(_SP, _FP_fargs+_FP_saves+24)
   187  	_VAR_st_Db = jit.Ptr(_SP, _FP_fargs+_FP_saves+32)
   188  	_VAR_st_Dc = jit.Ptr(_SP, _FP_fargs+_FP_saves+40)
   189  )
   190  
   191  var (
   192  	_VAR_ss_AX = jit.Ptr(_SP, _FP_fargs+_FP_saves+48)
   193  	_VAR_ss_CX = jit.Ptr(_SP, _FP_fargs+_FP_saves+56)
   194  	_VAR_ss_SI = jit.Ptr(_SP, _FP_fargs+_FP_saves+64)
   195  	_VAR_ss_R8 = jit.Ptr(_SP, _FP_fargs+_FP_saves+72)
   196  	_VAR_ss_R9 = jit.Ptr(_SP, _FP_fargs+_FP_saves+80)
   197  )
   198  
   199  var (
   200  	_VAR_bs_p  = jit.Ptr(_SP, _FP_fargs+_FP_saves+88)
   201  	_VAR_bs_n  = jit.Ptr(_SP, _FP_fargs+_FP_saves+96)
   202  	_VAR_bs_LR = jit.Ptr(_SP, _FP_fargs+_FP_saves+104)
   203  )
   204  
   205  var _VAR_fl = jit.Ptr(_SP, _FP_fargs+_FP_saves+112)
   206  
   207  var (
   208  	_VAR_et = jit.Ptr(_SP, _FP_fargs+_FP_saves+120) // save dismatched type
   209  	_VAR_ic = jit.Ptr(_SP, _FP_fargs+_FP_saves+128) // save dismatched position
   210  	_VAR_pc = jit.Ptr(_SP, _FP_fargs+_FP_saves+136) // save skip return pc
   211  )
   212  
   213  type _Assembler struct {
   214  	jit.BaseAssembler
   215  	p    _Program
   216  	name string
   217  }
   218  
   219  func newAssembler(p _Program) *_Assembler {
   220  	return new(_Assembler).Init(p)
   221  }
   222  
   223  /** Assembler Interface **/
   224  
   225  func (self *_Assembler) Load() _Decoder {
   226  	return ptodec(self.BaseAssembler.Load("decode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
   227  }
   228  
   229  func (self *_Assembler) Init(p _Program) *_Assembler {
   230  	self.p = p
   231  	self.BaseAssembler.Init(self.compile)
   232  	return self
   233  }
   234  
   235  func (self *_Assembler) compile() {
   236  	self.prologue()
   237  	self.instrs()
   238  	self.epilogue()
   239  	self.copy_string()
   240  	self.escape_string()
   241  	self.escape_string_twice()
   242  	self.skip_one()
   243  	self.skip_key_value()
   244  	self.mismatch_error()
   245  	self.type_error()
   246  	self.field_error()
   247  	self.range_error()
   248  	self.stack_error()
   249  	self.base64_error()
   250  	self.parsing_error()
   251  }
   252  
   253  /** Assembler Stages **/
   254  
   255  var _OpFuncTab = [256]func(*_Assembler, *_Instr){
   256  	_OP_any:              (*_Assembler)._asm_OP_any,
   257  	_OP_dyn:              (*_Assembler)._asm_OP_dyn,
   258  	_OP_str:              (*_Assembler)._asm_OP_str,
   259  	_OP_bin:              (*_Assembler)._asm_OP_bin,
   260  	_OP_bool:             (*_Assembler)._asm_OP_bool,
   261  	_OP_num:              (*_Assembler)._asm_OP_num,
   262  	_OP_i8:               (*_Assembler)._asm_OP_i8,
   263  	_OP_i16:              (*_Assembler)._asm_OP_i16,
   264  	_OP_i32:              (*_Assembler)._asm_OP_i32,
   265  	_OP_i64:              (*_Assembler)._asm_OP_i64,
   266  	_OP_u8:               (*_Assembler)._asm_OP_u8,
   267  	_OP_u16:              (*_Assembler)._asm_OP_u16,
   268  	_OP_u32:              (*_Assembler)._asm_OP_u32,
   269  	_OP_u64:              (*_Assembler)._asm_OP_u64,
   270  	_OP_f32:              (*_Assembler)._asm_OP_f32,
   271  	_OP_f64:              (*_Assembler)._asm_OP_f64,
   272  	_OP_unquote:          (*_Assembler)._asm_OP_unquote,
   273  	_OP_nil_1:            (*_Assembler)._asm_OP_nil_1,
   274  	_OP_nil_2:            (*_Assembler)._asm_OP_nil_2,
   275  	_OP_nil_3:            (*_Assembler)._asm_OP_nil_3,
   276  	_OP_deref:            (*_Assembler)._asm_OP_deref,
   277  	_OP_index:            (*_Assembler)._asm_OP_index,
   278  	_OP_is_null:          (*_Assembler)._asm_OP_is_null,
   279  	_OP_is_null_quote:    (*_Assembler)._asm_OP_is_null_quote,
   280  	_OP_map_init:         (*_Assembler)._asm_OP_map_init,
   281  	_OP_map_key_i8:       (*_Assembler)._asm_OP_map_key_i8,
   282  	_OP_map_key_i16:      (*_Assembler)._asm_OP_map_key_i16,
   283  	_OP_map_key_i32:      (*_Assembler)._asm_OP_map_key_i32,
   284  	_OP_map_key_i64:      (*_Assembler)._asm_OP_map_key_i64,
   285  	_OP_map_key_u8:       (*_Assembler)._asm_OP_map_key_u8,
   286  	_OP_map_key_u16:      (*_Assembler)._asm_OP_map_key_u16,
   287  	_OP_map_key_u32:      (*_Assembler)._asm_OP_map_key_u32,
   288  	_OP_map_key_u64:      (*_Assembler)._asm_OP_map_key_u64,
   289  	_OP_map_key_f32:      (*_Assembler)._asm_OP_map_key_f32,
   290  	_OP_map_key_f64:      (*_Assembler)._asm_OP_map_key_f64,
   291  	_OP_map_key_str:      (*_Assembler)._asm_OP_map_key_str,
   292  	_OP_map_key_utext:    (*_Assembler)._asm_OP_map_key_utext,
   293  	_OP_map_key_utext_p:  (*_Assembler)._asm_OP_map_key_utext_p,
   294  	_OP_array_skip:       (*_Assembler)._asm_OP_array_skip,
   295  	_OP_array_clear:      (*_Assembler)._asm_OP_array_clear,
   296  	_OP_array_clear_p:    (*_Assembler)._asm_OP_array_clear_p,
   297  	_OP_slice_init:       (*_Assembler)._asm_OP_slice_init,
   298  	_OP_slice_append:     (*_Assembler)._asm_OP_slice_append,
   299  	_OP_object_skip:      (*_Assembler)._asm_OP_object_skip,
   300  	_OP_object_next:      (*_Assembler)._asm_OP_object_next,
   301  	_OP_struct_field:     (*_Assembler)._asm_OP_struct_field,
   302  	_OP_unmarshal:        (*_Assembler)._asm_OP_unmarshal,
   303  	_OP_unmarshal_p:      (*_Assembler)._asm_OP_unmarshal_p,
   304  	_OP_unmarshal_text:   (*_Assembler)._asm_OP_unmarshal_text,
   305  	_OP_unmarshal_text_p: (*_Assembler)._asm_OP_unmarshal_text_p,
   306  	_OP_lspace:           (*_Assembler)._asm_OP_lspace,
   307  	_OP_match_char:       (*_Assembler)._asm_OP_match_char,
   308  	_OP_check_char:       (*_Assembler)._asm_OP_check_char,
   309  	_OP_load:             (*_Assembler)._asm_OP_load,
   310  	_OP_save:             (*_Assembler)._asm_OP_save,
   311  	_OP_drop:             (*_Assembler)._asm_OP_drop,
   312  	_OP_drop_2:           (*_Assembler)._asm_OP_drop_2,
   313  	_OP_recurse:          (*_Assembler)._asm_OP_recurse,
   314  	_OP_goto:             (*_Assembler)._asm_OP_goto,
   315  	_OP_switch:           (*_Assembler)._asm_OP_switch,
   316  	_OP_check_char_0:     (*_Assembler)._asm_OP_check_char_0,
   317  	_OP_dismatch_err:     (*_Assembler)._asm_OP_dismatch_err,
   318  	_OP_go_skip:          (*_Assembler)._asm_OP_go_skip,
   319  	_OP_add:              (*_Assembler)._asm_OP_add,
   320  	_OP_check_empty:      (*_Assembler)._asm_OP_check_empty,
   321  }
   322  
   323  func (self *_Assembler) instr(v *_Instr) {
   324  	if fn := _OpFuncTab[v.op()]; fn != nil {
   325  		fn(self, v)
   326  	} else {
   327  		panic(fmt.Sprintf("invalid opcode: %d", v.op()))
   328  	}
   329  }
   330  
   331  func (self *_Assembler) instrs() {
   332  	for i, v := range self.p {
   333  		self.Mark(i)
   334  		self.instr(&v)
   335  		self.debug_instr(i, &v)
   336  	}
   337  }
   338  
   339  func (self *_Assembler) epilogue() {
   340  	self.Mark(len(self.p))
   341  	self.Emit("XORL", _EP, _EP)                    // XORL EP, EP
   342  	self.Emit("MOVQ", _VAR_et, _ET)                // MOVQ VAR_et, ET
   343  	self.Emit("TESTQ", _ET, _ET)                   // TESTQ ET, ET
   344  	self.Sjmp("JNZ", _LB_mismatch_error)           // JNZ _LB_mismatch_error
   345  	self.Link(_LB_error)                           // _error:
   346  	self.Emit("MOVQ", _IC, _RET_rc)                // MOVQ IC, rc<>+40(FP)
   347  	self.Emit("MOVQ", _ET, _RET_et)                // MOVQ ET, et<>+48(FP)
   348  	self.Emit("MOVQ", _EP, _RET_ep)                // MOVQ EP, ep<>+56(FP)
   349  	self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP
   350  	self.Emit("ADDQ", jit.Imm(_FP_size), _SP)      // ADDQ $_FP_size, SP
   351  	self.Emit("RET")                               // RET
   352  }
   353  
   354  func (self *_Assembler) prologue() {
   355  	self.Emit("SUBQ", jit.Imm(_FP_size), _SP)      // SUBQ $_FP_size, SP
   356  	self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP)
   357  	self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP
   358  	self.Emit("MOVQ", _ARG_sp, _IP)                // MOVQ s.p<>+0(FP), IP
   359  	self.Emit("MOVQ", _ARG_sl, _IL)                // MOVQ s.l<>+8(FP), IL
   360  	self.Emit("MOVQ", _ARG_ic, _IC)                // MOVQ ic<>+16(FP), IC
   361  	self.Emit("MOVQ", _ARG_vp, _VP)                // MOVQ vp<>+24(FP), VP
   362  	self.Emit("MOVQ", _ARG_sb, _ST)                // MOVQ vp<>+32(FP), ST
   363  	// initialize digital buffer first
   364  	self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_st_Dc) // MOVQ $_MaxDigitNums, ss.Dcap
   365  	self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX)     // LEAQ _DbufOffset(ST), AX
   366  	self.Emit("MOVQ", _AX, _VAR_st_Db)                    // MOVQ AX, ss.Dbuf
   367  	self.Emit("XORL", _AX, _AX)                           // XORL AX, AX
   368  	self.Emit("MOVQ", _AX, _VAR_et)                       // MOVQ AX, ss.Dp
   369  }
   370  
   371  /** Function Calling Helpers **/
   372  
   373  var _REG_go = []obj.Addr{
   374  	_ST,
   375  	_VP,
   376  	_IP,
   377  	_IL,
   378  	_IC,
   379  }
   380  
   381  func (self *_Assembler) save(r ...obj.Addr) {
   382  	for i, v := range r {
   383  		if i > _FP_saves/8-1 {
   384  			panic("too many registers to save")
   385  		} else {
   386  			self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs+int64(i)*8))
   387  		}
   388  	}
   389  }
   390  
   391  func (self *_Assembler) load(r ...obj.Addr) {
   392  	for i, v := range r {
   393  		if i > _FP_saves/8-1 {
   394  			panic("too many registers to load")
   395  		} else {
   396  			self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs+int64(i)*8), v)
   397  		}
   398  	}
   399  }
   400  
   401  func (self *_Assembler) call(fn obj.Addr) {
   402  	self.Emit("MOVQ", fn, _AX) // MOVQ ${fn}, AX
   403  	self.Rjmp("CALL", _AX)     // CALL AX
   404  }
   405  
   406  func (self *_Assembler) call_go(fn obj.Addr) {
   407  	self.save(_REG_go...) // SAVE $REG_go
   408  	self.call(fn)         // CALL ${fn}
   409  	self.load(_REG_go...) // LOAD $REG_go
   410  }
   411  
   412  func (self *_Assembler) call_sf(fn obj.Addr) {
   413  	self.Emit("LEAQ", _ARG_s, _DI)                   // LEAQ s<>+0(FP), DI
   414  	self.Emit("MOVQ", _IC, _ARG_ic)                  // MOVQ IC, ic<>+16(FP)
   415  	self.Emit("LEAQ", _ARG_ic, _SI)                  // LEAQ ic<>+16(FP), SI
   416  	self.Emit("LEAQ", jit.Ptr(_ST, _FsmOffset), _DX) // LEAQ _FsmOffset(ST), DX
   417  	self.Emit("MOVQ", _ARG_fv, _CX)
   418  	self.call(fn)                   // CALL ${fn}
   419  	self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC
   420  }
   421  
   422  func (self *_Assembler) call_vf(fn obj.Addr) {
   423  	self.Emit("LEAQ", _ARG_s, _DI)  // LEAQ s<>+0(FP), DI
   424  	self.Emit("MOVQ", _IC, _ARG_ic) // MOVQ IC, ic<>+16(FP)
   425  	self.Emit("LEAQ", _ARG_ic, _SI) // LEAQ ic<>+16(FP), SI
   426  	self.Emit("LEAQ", _VAR_st, _DX) // LEAQ st, DX
   427  	self.call(fn)                   // CALL ${fn}
   428  	self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC
   429  }
   430  
   431  /** Assembler Error Handlers **/
   432  
   433  var (
   434  	_F_convT64        = jit.Func(convT64)
   435  	_F_error_wrap     = jit.Func(error_wrap)
   436  	_F_error_type     = jit.Func(error_type)
   437  	_F_error_field    = jit.Func(error_field)
   438  	_F_error_value    = jit.Func(error_value)
   439  	_F_error_mismatch = jit.Func(error_mismatch)
   440  )
   441  
   442  var (
   443  	_I_int8, _T_int8       = rtype(reflect.TypeOf(int8(0)))
   444  	_I_int16, _T_int16     = rtype(reflect.TypeOf(int16(0)))
   445  	_I_int32, _T_int32     = rtype(reflect.TypeOf(int32(0)))
   446  	_I_uint8, _T_uint8     = rtype(reflect.TypeOf(uint8(0)))
   447  	_I_uint16, _T_uint16   = rtype(reflect.TypeOf(uint16(0)))
   448  	_I_uint32, _T_uint32   = rtype(reflect.TypeOf(uint32(0)))
   449  	_I_float32, _T_float32 = rtype(reflect.TypeOf(float32(0)))
   450  )
   451  
   452  var (
   453  	_T_error                    = rt.UnpackType(errorType)
   454  	_I_base64_CorruptInputError = jit.Itab(_T_error, base64CorruptInputError)
   455  )
   456  
   457  var (
   458  	_V_stackOverflow              = jit.Imm(int64(uintptr(unsafe.Pointer(&stackOverflow))))
   459  	_I_json_UnsupportedValueError = jit.Itab(_T_error, reflect.TypeOf(new(json.UnsupportedValueError)))
   460  	_I_json_MismatchTypeError     = jit.Itab(_T_error, reflect.TypeOf(new(MismatchTypeError)))
   461  )
   462  
   463  func (self *_Assembler) type_error() {
   464  	self.Link(_LB_type_error)                // _type_error:
   465  	self.Emit("MOVQ", _ET, jit.Ptr(_SP, 0))  // MOVQ    ET, (SP)
   466  	self.call_go(_F_error_type)              // CALL_GO error_type
   467  	self.Emit("MOVQ", jit.Ptr(_SP, 8), _ET)  // MOVQ    8(SP), ET
   468  	self.Emit("MOVQ", jit.Ptr(_SP, 16), _EP) // MOVQ    16(SP), EP
   469  	self.Sjmp("JMP", _LB_error)              // JMP     _error
   470  }
   471  
   472  func (self *_Assembler) mismatch_error() {
   473  	self.Link(_LB_mismatch_error)                     // _type_error:
   474  	self.Emit("MOVQ", _VAR_et, _ET)                   // MOVQ _VAR_et, ET
   475  	self.Emit("MOVQ", _VAR_ic, _EP)                   // MOVQ _VAR_ic, EP
   476  	self.Emit("MOVQ", _I_json_MismatchTypeError, _AX) // MOVQ _I_json_MismatchTypeError, AX
   477  	self.Emit("CMPQ", _ET, _AX)                       // CMPQ ET, AX
   478  	self.Sjmp("JE", _LB_error)                        // JE _LB_error
   479  	self.Emit("MOVQ", _ARG_sp, _AX)
   480  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ    AX, (SP)
   481  	self.Emit("MOVQ", _ARG_sl, _CX)
   482  	self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ    CX, 8(SP)
   483  	self.Emit("MOVQ", _VAR_ic, _AX)
   484  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ    AX, 16(SP)
   485  	self.Emit("MOVQ", _VAR_et, _CX)
   486  	self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24)) // MOVQ    CX, 24(SP)
   487  	self.call_go(_F_error_mismatch)          // CALL_GO error_type
   488  	self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET) // MOVQ    32(SP), ET
   489  	self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP) // MOVQ    40(SP), EP
   490  	self.Sjmp("JMP", _LB_error)              // JMP     _error
   491  }
   492  
   493  func (self *_Assembler) _asm_OP_dismatch_err(p *_Instr) {
   494  	self.Emit("MOVQ", _IC, _VAR_ic)
   495  	self.Emit("MOVQ", jit.Type(p.vt()), _ET)
   496  	self.Emit("MOVQ", _ET, _VAR_et)
   497  }
   498  
   499  func (self *_Assembler) _asm_OP_go_skip(p *_Instr) {
   500  	self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
   501  	self.Xref(p.vi(), 4)
   502  	self.Emit("MOVQ", _R9, _VAR_pc)
   503  	self.Sjmp("JMP", _LB_skip_one) // JMP     _skip_one
   504  }
   505  
   506  func (self *_Assembler) skip_one() {
   507  	self.Link(_LB_skip_one)              // _skip:
   508  	self.Emit("MOVQ", _VAR_ic, _IC)      // MOVQ    _VAR_ic, IC
   509  	self.call_sf(_F_skip_one)            // CALL_SF skip_one
   510  	self.Emit("TESTQ", _AX, _AX)         // TESTQ   AX, AX
   511  	self.Sjmp("JS", _LB_parsing_error_v) // JS      _parse_error_v
   512  	self.Emit("MOVQ", _VAR_pc, _R9)      // MOVQ    pc, R9
   513  	self.Rjmp("JMP", _R9)                // JMP     (R9)
   514  }
   515  
   516  func (self *_Assembler) skip_key_value() {
   517  	self.Link(_LB_skip_key_value) // _skip:
   518  	// skip the key
   519  	self.Emit("MOVQ", _VAR_ic, _IC)      // MOVQ    _VAR_ic, IC
   520  	self.call_sf(_F_skip_one)            // CALL_SF skip_one
   521  	self.Emit("TESTQ", _AX, _AX)         // TESTQ   AX, AX
   522  	self.Sjmp("JS", _LB_parsing_error_v) // JS      _parse_error_v
   523  	// match char ':'
   524  	self.lspace("_global_1")
   525  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(':'))
   526  	self.Sjmp("JNE", _LB_parsing_error_v) // JNE     _parse_error_v
   527  	self.Emit("ADDQ", jit.Imm(1), _IC)    // ADDQ    $1, IC
   528  	self.lspace("_global_2")
   529  	// skip the value
   530  	self.call_sf(_F_skip_one)            // CALL_SF skip_one
   531  	self.Emit("TESTQ", _AX, _AX)         // TESTQ   AX, AX
   532  	self.Sjmp("JS", _LB_parsing_error_v) // JS      _parse_error_v
   533  	// jump back to specified address
   534  	self.Emit("MOVQ", _VAR_pc, _R9) // MOVQ    pc, R9
   535  	self.Rjmp("JMP", _R9)           // JMP     (R9)
   536  }
   537  
   538  func (self *_Assembler) field_error() {
   539  	self.Link(_LB_field_error)               // _field_error:
   540  	self.Emit("MOVOU", _VAR_sv, _X0)         // MOVOU   sv, X0
   541  	self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0)) // MOVOU   X0, (SP)
   542  	self.call_go(_F_error_field)             // CALL_GO error_field
   543  	self.Emit("MOVQ", jit.Ptr(_SP, 16), _ET) // MOVQ    16(SP), ET
   544  	self.Emit("MOVQ", jit.Ptr(_SP, 24), _EP) // MOVQ    24(SP), EP
   545  	self.Sjmp("JMP", _LB_error)              // JMP     _error
   546  }
   547  
   548  func (self *_Assembler) range_error() {
   549  	self.Link(_LB_range_error)               // _range_error:
   550  	self.slice_from(_VAR_st_Ep, 0)           // SLICE   st.Ep, $0
   551  	self.Emit("MOVQ", _DI, jit.Ptr(_SP, 0))  // MOVQ    DI, (SP)
   552  	self.Emit("MOVQ", _SI, jit.Ptr(_SP, 8))  // MOVQ    SI, 8(SP)
   553  	self.Emit("MOVQ", _ET, jit.Ptr(_SP, 16)) // MOVQ    ET, 16(SP)
   554  	self.Emit("MOVQ", _EP, jit.Ptr(_SP, 24)) // MOVQ    EP, 24(SP)
   555  	self.call_go(_F_error_value)             // CALL_GO error_value
   556  	self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET) // MOVQ    32(SP), ET
   557  	self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP) // MOVQ    40(SP), EP
   558  	self.Sjmp("JMP", _LB_error)              // JMP     _error
   559  }
   560  
   561  func (self *_Assembler) stack_error() {
   562  	self.Link(_LB_stack_error)                            // _stack_error:
   563  	self.Emit("MOVQ", _V_stackOverflow, _EP)              // MOVQ ${_V_stackOverflow}, EP
   564  	self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ ${_I_json_UnsupportedValueError}, ET
   565  	self.Sjmp("JMP", _LB_error)                           // JMP  _error
   566  }
   567  
   568  func (self *_Assembler) base64_error() {
   569  	self.Link(_LB_base64_error)
   570  	self.Emit("NEGQ", _AX)                              // NEGQ    AX
   571  	self.Emit("SUBQ", jit.Imm(1), _AX)                  // SUBQ    $1, AX
   572  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))             // MOVQ    AX, (SP)
   573  	self.call_go(_F_convT64)                            // CALL_GO convT64
   574  	self.Emit("MOVQ", jit.Ptr(_SP, 8), _EP)             // MOVQ    8(SP), EP
   575  	self.Emit("MOVQ", _I_base64_CorruptInputError, _ET) // MOVQ    ${itab(base64.CorruptInputError)}, ET
   576  	self.Sjmp("JMP", _LB_error)                         // JMP     _error
   577  }
   578  
   579  func (self *_Assembler) parsing_error() {
   580  	self.Link(_LB_eof_error)                                       // _eof_error:
   581  	self.Emit("MOVQ", _IL, _IC)                                    // MOVQ    IL, IC
   582  	self.Emit("MOVL", jit.Imm(int64(types.ERR_EOF)), _EP)          // MOVL    ${types.ERR_EOF}, EP
   583  	self.Sjmp("JMP", _LB_parsing_error)                            // JMP     _parsing_error
   584  	self.Link(_LB_unquote_error)                                   // _unquote_error:
   585  	self.Emit("SUBQ", _VAR_sr, _SI)                                // SUBQ    sr, SI
   586  	self.Emit("SUBQ", _SI, _IC)                                    // SUBQ    IL, IC
   587  	self.Link(_LB_parsing_error_v)                                 // _parsing_error_v:
   588  	self.Emit("MOVQ", _AX, _EP)                                    // MOVQ    AX, EP
   589  	self.Emit("NEGQ", _EP)                                         // NEGQ    EP
   590  	self.Sjmp("JMP", _LB_parsing_error)                            // JMP     _parsing_error
   591  	self.Link(_LB_char_m3_error)                                   // _char_m3_error:
   592  	self.Emit("SUBQ", jit.Imm(1), _IC)                             // SUBQ    $1, IC
   593  	self.Link(_LB_char_m2_error)                                   // _char_m2_error:
   594  	self.Emit("SUBQ", jit.Imm(2), _IC)                             // SUBQ    $2, IC
   595  	self.Sjmp("JMP", _LB_char_0_error)                             // JMP     _char_0_error
   596  	self.Link(_LB_im_error)                                        // _im_error:
   597  	self.Emit("CMPB", _CX, jit.Sib(_IP, _IC, 1, 0))                // CMPB    CX, (IP)(IC)
   598  	self.Sjmp("JNE", _LB_char_0_error)                             // JNE     _char_0_error
   599  	self.Emit("SHRL", jit.Imm(8), _CX)                             // SHRL    $8, CX
   600  	self.Emit("CMPB", _CX, jit.Sib(_IP, _IC, 1, 1))                // CMPB    CX, 1(IP)(IC)
   601  	self.Sjmp("JNE", _LB_char_1_error)                             // JNE     _char_1_error
   602  	self.Emit("SHRL", jit.Imm(8), _CX)                             // SHRL    $8, CX
   603  	self.Emit("CMPB", _CX, jit.Sib(_IP, _IC, 1, 2))                // CMPB    CX, 2(IP)(IC)
   604  	self.Sjmp("JNE", _LB_char_2_error)                             // JNE     _char_2_error
   605  	self.Sjmp("JMP", _LB_char_3_error)                             // JNE     _char_3_error
   606  	self.Link(_LB_char_4_error)                                    // _char_4_error:
   607  	self.Emit("ADDQ", jit.Imm(1), _IC)                             // ADDQ    $1, IC
   608  	self.Link(_LB_char_3_error)                                    // _char_3_error:
   609  	self.Emit("ADDQ", jit.Imm(1), _IC)                             // ADDQ    $1, IC
   610  	self.Link(_LB_char_2_error)                                    // _char_2_error:
   611  	self.Emit("ADDQ", jit.Imm(1), _IC)                             // ADDQ    $1, IC
   612  	self.Link(_LB_char_1_error)                                    // _char_1_error:
   613  	self.Emit("ADDQ", jit.Imm(1), _IC)                             // ADDQ    $1, IC
   614  	self.Link(_LB_char_0_error)                                    // _char_0_error:
   615  	self.Emit("MOVL", jit.Imm(int64(types.ERR_INVALID_CHAR)), _EP) // MOVL    ${types.ERR_INVALID_CHAR}, EP
   616  	self.Link(_LB_parsing_error)                                   // _parsing_error:
   617  	self.Emit("MOVOU", _ARG_s, _X0)                                // MOVOU   s, X0
   618  	self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0))                       // MOVOU   X0, (SP)
   619  	self.Emit("MOVQ", _IC, jit.Ptr(_SP, 16))                       // MOVQ    IC, 16(SP)
   620  	self.Emit("MOVQ", _EP, jit.Ptr(_SP, 24))                       // MOVQ    EP, 24(SP)
   621  	self.call_go(_F_error_wrap)                                    // CALL_GO error_wrap
   622  	self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET)                       // MOVQ    32(SP), ET
   623  	self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP)                       // MOVQ    40(SP), EP
   624  	self.Sjmp("JMP", _LB_error)                                    // JMP     _error
   625  }
   626  
   627  /** Memory Management Routines **/
   628  
   629  var (
   630  	_T_byte     = jit.Type(byteType)
   631  	_F_mallocgc = jit.Func(mallocgc)
   632  )
   633  
   634  func (self *_Assembler) malloc(nb obj.Addr, ret obj.Addr) {
   635  	self.Emit("XORL", _AX, _AX)              // XORL    AX, AX
   636  	self.Emit("MOVQ", _T_byte, _CX)          // MOVQ    ${type(byte)}, CX
   637  	self.Emit("MOVQ", nb, jit.Ptr(_SP, 0))   // MOVQ    ${nb}, (SP)
   638  	self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8))  // MOVQ    CX, 8(SP)
   639  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ    AX, 16(SP)
   640  	self.call_go(_F_mallocgc)                // CALL_GO mallocgc
   641  	self.Emit("MOVQ", jit.Ptr(_SP, 24), ret) // MOVQ    24(SP), ${ret}
   642  }
   643  
   644  func (self *_Assembler) valloc(vt reflect.Type, ret obj.Addr) {
   645  	self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX) // MOVQ    ${vt.Size()}, AX
   646  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))           // MOVQ    AX, (SP)
   647  	self.Emit("MOVQ", jit.Type(vt), _AX)              // MOVQ    ${vt}, AX
   648  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))           // MOVQ    AX, 8(SP)
   649  	self.Emit("MOVB", jit.Imm(1), jit.Ptr(_SP, 16))   // MOVB    $1, 16(SP)
   650  	self.call_go(_F_mallocgc)                         // CALL_GO mallocgc
   651  	self.Emit("MOVQ", jit.Ptr(_SP, 24), ret)          // MOVQ    24(SP), ${ret}
   652  }
   653  
   654  func (self *_Assembler) vfollow(vt reflect.Type) {
   655  	self.Emit("MOVQ", jit.Ptr(_VP, 0), _AX)    // MOVQ   (VP), AX
   656  	self.Emit("TESTQ", _AX, _AX)               // TESTQ  AX, AX
   657  	self.Sjmp("JNZ", "_end_{n}")               // JNZ    _end_{n}
   658  	self.valloc(vt, _AX)                       // VALLOC ${vt}, AX
   659  	self.WritePtrAX(1, jit.Ptr(_VP, 0), false) // MOVQ   AX, (VP)
   660  	self.Link("_end_{n}")                      // _end_{n}:
   661  	self.Emit("MOVQ", _AX, _VP)                // MOVQ   AX, VP
   662  }
   663  
   664  /** Value Parsing Routines **/
   665  
   666  var (
   667  	_F_vstring   = jit.Imm(int64(native.S_vstring))
   668  	_F_vnumber   = jit.Imm(int64(native.S_vnumber))
   669  	_F_vsigned   = jit.Imm(int64(native.S_vsigned))
   670  	_F_vunsigned = jit.Imm(int64(native.S_vunsigned))
   671  )
   672  
   673  func (self *_Assembler) check_err(vt reflect.Type, pin string, pin2 int) {
   674  	self.Emit("MOVQ", _VAR_st_Vt, _AX) // MOVQ st.Vt, AX
   675  	self.Emit("TESTQ", _AX, _AX)       // CMPQ AX, ${native.V_STRING}
   676  	// try to skip the value
   677  	if vt != nil {
   678  		self.Sjmp("JNS", "_check_err_{n}") // JNE  _parsing_error_v
   679  		self.Emit("MOVQ", jit.Type(vt), _ET)
   680  		self.Emit("MOVQ", _ET, _VAR_et)
   681  		if pin2 != -1 {
   682  			self.Emit("SUBQ", jit.Imm(1), _BP)
   683  			self.Emit("MOVQ", _BP, _VAR_ic)
   684  			self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
   685  			self.Xref(pin2, 4)
   686  			self.Emit("MOVQ", _R9, _VAR_pc)
   687  			self.Sjmp("JMP", _LB_skip_key_value)
   688  		} else {
   689  			self.Emit("MOVQ", _BP, _VAR_ic)
   690  			self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
   691  			self.Sref(pin, 4)
   692  			self.Emit("MOVQ", _R9, _VAR_pc)
   693  			self.Sjmp("JMP", _LB_skip_one)
   694  		}
   695  		self.Link("_check_err_{n}")
   696  	} else {
   697  		self.Sjmp("JS", _LB_parsing_error_v) // JNE  _parsing_error_v
   698  	}
   699  }
   700  
   701  func (self *_Assembler) check_eof(d int64) {
   702  	if d == 1 {
   703  		self.Emit("CMPQ", _IC, _IL)     // CMPQ IC, IL
   704  		self.Sjmp("JAE", _LB_eof_error) // JAE  _eof_error
   705  	} else {
   706  		self.Emit("LEAQ", jit.Ptr(_IC, d), _AX) // LEAQ ${d}(IC), AX
   707  		self.Emit("CMPQ", _AX, _IL)             // CMPQ AX, IL
   708  		self.Sjmp("JA", _LB_eof_error)          // JA   _eof_error
   709  	}
   710  }
   711  
   712  func (self *_Assembler) parse_string() { // parse_string has a validate flag params in the last
   713  	self.Emit("MOVQ", _ARG_fv, _CX)
   714  	self.call_vf(_F_vstring)
   715  	self.check_err(nil, "", -1)
   716  }
   717  
   718  func (self *_Assembler) parse_number(vt reflect.Type, pin string, pin2 int) {
   719  	self.Emit("MOVQ", _IC, _BP)
   720  	self.call_vf(_F_vnumber) // call  vnumber
   721  	self.check_err(vt, pin, pin2)
   722  }
   723  
   724  func (self *_Assembler) parse_signed(vt reflect.Type, pin string, pin2 int) {
   725  	self.Emit("MOVQ", _IC, _BP)
   726  	self.call_vf(_F_vsigned)
   727  	self.check_err(vt, pin, pin2)
   728  }
   729  
   730  func (self *_Assembler) parse_unsigned(vt reflect.Type, pin string, pin2 int) {
   731  	self.Emit("MOVQ", _IC, _BP)
   732  	self.call_vf(_F_vunsigned)
   733  	self.check_err(vt, pin, pin2)
   734  }
   735  
   736  // Pointer: DI, Size: SI, Return: R9
   737  func (self *_Assembler) copy_string() {
   738  	self.Link("_copy_string")
   739  	self.Emit("MOVQ", _DI, _VAR_bs_p)
   740  	self.Emit("MOVQ", _SI, _VAR_bs_n)
   741  	self.Emit("MOVQ", _R9, _VAR_bs_LR)
   742  	self.malloc(_SI, _AX)
   743  	self.Emit("MOVQ", _AX, _VAR_sv_p)
   744  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))
   745  	self.Emit("MOVQ", _VAR_bs_p, _DI)
   746  	self.Emit("MOVQ", _DI, jit.Ptr(_SP, 8))
   747  	self.Emit("MOVQ", _VAR_bs_n, _SI)
   748  	self.Emit("MOVQ", _SI, jit.Ptr(_SP, 16))
   749  	self.call_go(_F_memmove)
   750  	self.Emit("MOVQ", _VAR_sv_p, _DI)
   751  	self.Emit("MOVQ", _VAR_bs_n, _SI)
   752  	self.Emit("MOVQ", _VAR_bs_LR, _R9)
   753  	self.Rjmp("JMP", _R9)
   754  }
   755  
   756  // Pointer: DI, Size: SI, Return: R9
   757  func (self *_Assembler) escape_string() {
   758  	self.Link("_escape_string")
   759  	self.Emit("MOVQ", _DI, _VAR_bs_p)
   760  	self.Emit("MOVQ", _SI, _VAR_bs_n)
   761  	self.Emit("MOVQ", _R9, _VAR_bs_LR)
   762  	self.malloc(_SI, _DX) // MALLOC SI, DX
   763  	self.Emit("MOVQ", _DX, _VAR_sv_p)
   764  	self.Emit("MOVQ", _VAR_bs_p, _DI)
   765  	self.Emit("MOVQ", _VAR_bs_n, _SI)
   766  	self.Emit("LEAQ", _VAR_sr, _CX)                          // LEAQ   sr, CX
   767  	self.Emit("XORL", _R8, _R8)                              // XORL   R8, R8
   768  	self.Emit("BTQ", jit.Imm(_F_disable_urc), _ARG_fv)       // BTQ    ${_F_disable_urc}, fv
   769  	self.Emit("SETCC", _R8)                                  // SETCC  R8
   770  	self.Emit("SHLQ", jit.Imm(types.B_UNICODE_REPLACE), _R8) // SHLQ   ${types.B_UNICODE_REPLACE}, R8
   771  	self.call(_F_unquote)                                    // CALL   unquote
   772  	self.Emit("MOVQ", _VAR_bs_n, _SI)                        // MOVQ   ${n}, SI
   773  	self.Emit("ADDQ", jit.Imm(1), _SI)                       // ADDQ   $1, SI
   774  	self.Emit("TESTQ", _AX, _AX)                             // TESTQ  AX, AX
   775  	self.Sjmp("JS", _LB_unquote_error)                       // JS     _unquote_error
   776  	self.Emit("MOVQ", _AX, _SI)
   777  	self.Emit("MOVQ", _VAR_sv_p, _DI)
   778  	self.Emit("MOVQ", _VAR_bs_LR, _R9)
   779  	self.Rjmp("JMP", _R9)
   780  }
   781  
   782  func (self *_Assembler) escape_string_twice() {
   783  	self.Link("_escape_string_twice")
   784  	self.Emit("MOVQ", _DI, _VAR_bs_p)
   785  	self.Emit("MOVQ", _SI, _VAR_bs_n)
   786  	self.Emit("MOVQ", _R9, _VAR_bs_LR)
   787  	self.malloc(_SI, _DX) // MALLOC SI, DX
   788  	self.Emit("MOVQ", _DX, _VAR_sv_p)
   789  	self.Emit("MOVQ", _VAR_bs_p, _DI)
   790  	self.Emit("MOVQ", _VAR_bs_n, _SI)
   791  	self.Emit("LEAQ", _VAR_sr, _CX)                          // LEAQ   sr, CX
   792  	self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8)  // MOVL   ${types.F_DOUBLE_UNQUOTE}, R8
   793  	self.Emit("BTQ", jit.Imm(_F_disable_urc), _ARG_fv)       // BTQ    ${_F_disable_urc}, AX
   794  	self.Emit("XORL", _AX, _AX)                              // XORL   AX, AX
   795  	self.Emit("SETCC", _AX)                                  // SETCC  AX
   796  	self.Emit("SHLQ", jit.Imm(types.B_UNICODE_REPLACE), _AX) // SHLQ   ${types.B_UNICODE_REPLACE}, AX
   797  	self.Emit("ORQ", _AX, _R8)                               // ORQ    AX, R8
   798  	self.call(_F_unquote)                                    // CALL   unquote
   799  	self.Emit("MOVQ", _VAR_bs_n, _SI)                        // MOVQ   ${n}, SI
   800  	self.Emit("ADDQ", jit.Imm(3), _SI)                       // ADDQ   $3, SI
   801  	self.Emit("TESTQ", _AX, _AX)                             // TESTQ  AX, AX
   802  	self.Sjmp("JS", _LB_unquote_error)                       // JS     _unquote_error
   803  	self.Emit("MOVQ", _AX, _SI)
   804  	self.Emit("MOVQ", _VAR_sv_p, _DI)
   805  	self.Emit("MOVQ", _VAR_bs_LR, _R9)
   806  	self.Rjmp("JMP", _R9)
   807  }
   808  
   809  /** Range Checking Routines **/
   810  
   811  var (
   812  	_V_max_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_max_f32))))
   813  	_V_min_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_min_f32))))
   814  )
   815  
   816  var (
   817  	_Vp_max_f32 = new(float32)
   818  	_Vp_min_f32 = new(float32)
   819  )
   820  
   821  func init() {
   822  	*_Vp_max_f32 = math.MaxFloat32
   823  	*_Vp_min_f32 = -math.MaxFloat32
   824  }
   825  
   826  func (self *_Assembler) range_single() {
   827  	self.Emit("CVTSD2SS", _VAR_st_Dv, _X0)        // CVTSD2SS st.Dv, X0
   828  	self.Emit("MOVQ", _V_max_f32, _AX)            // MOVQ     _max_f32, AX
   829  	self.Emit("MOVQ", jit.Gitab(_I_float32), _ET) // MOVQ     ${itab(float32)}, ET
   830  	self.Emit("MOVQ", jit.Gtype(_T_float32), _EP) // MOVQ     ${type(float32)}, EP
   831  	self.Emit("UCOMISS", jit.Ptr(_AX, 0), _X0)    // UCOMISS  (AX), X0
   832  	self.Sjmp("JA", _LB_range_error)              // JA       _range_error
   833  	self.Emit("MOVQ", _V_min_f32, _AX)            // MOVQ     _min_f32, AX
   834  	self.Emit("UCOMISS", jit.Ptr(_AX, 0), _X0)    // UCOMISS  (AX), X0
   835  	self.Sjmp("JB", _LB_range_error)              // JB       _range_error
   836  }
   837  
   838  func (self *_Assembler) range_signed(i *rt.GoItab, t *rt.GoType, a int64, b int64) {
   839  	self.Emit("MOVQ", _VAR_st_Iv, _AX)   // MOVQ st.Iv, AX
   840  	self.Emit("MOVQ", jit.Gitab(i), _ET) // MOVQ ${i}, ET
   841  	self.Emit("MOVQ", jit.Gtype(t), _EP) // MOVQ ${t}, EP
   842  	self.Emit("CMPQ", _AX, jit.Imm(a))   // CMPQ AX, ${a}
   843  	self.Sjmp("JL", _LB_range_error)     // JL   _range_error
   844  	self.Emit("CMPQ", _AX, jit.Imm(b))   // CMPQ AX, ${B}
   845  	self.Sjmp("JG", _LB_range_error)     // JG   _range_error
   846  }
   847  
   848  func (self *_Assembler) range_unsigned(i *rt.GoItab, t *rt.GoType, v uint64) {
   849  	self.Emit("MOVQ", _VAR_st_Iv, _AX)        // MOVQ  st.Iv, AX
   850  	self.Emit("MOVQ", jit.Gitab(i), _ET)      // MOVQ  ${i}, ET
   851  	self.Emit("MOVQ", jit.Gtype(t), _EP)      // MOVQ  ${t}, EP
   852  	self.Emit("TESTQ", _AX, _AX)              // TESTQ AX, AX
   853  	self.Sjmp("JS", _LB_range_error)          // JS    _range_error
   854  	self.Emit("CMPQ", _AX, jit.Imm(int64(v))) // CMPQ  AX, ${a}
   855  	self.Sjmp("JA", _LB_range_error)          // JA    _range_error
   856  }
   857  
   858  /** String Manipulating Routines **/
   859  
   860  var (
   861  	_F_unquote = jit.Imm(int64(native.S_unquote))
   862  )
   863  
   864  func (self *_Assembler) slice_from(p obj.Addr, d int64) {
   865  	self.Emit("MOVQ", p, _SI) // MOVQ    ${p}, SI
   866  	self.slice_from_r(_SI, d) // SLICE_R SI, ${d}
   867  }
   868  
   869  func (self *_Assembler) slice_from_r(p obj.Addr, d int64) {
   870  	self.Emit("LEAQ", jit.Sib(_IP, p, 1, 0), _DI) // LEAQ (IP)(${p}), DI
   871  	self.Emit("NEGQ", p)                          // NEGQ ${p}
   872  	self.Emit("LEAQ", jit.Sib(_IC, p, 1, d), _SI) // LEAQ d(IC)(${p}), SI
   873  }
   874  
   875  func (self *_Assembler) unquote_once(p obj.Addr, n obj.Addr, stack bool, copy bool) {
   876  	self.slice_from(_VAR_st_Iv, -1)            // SLICE  st.Iv, $-1
   877  	self.Emit("CMPQ", _VAR_st_Ep, jit.Imm(-1)) // CMPQ   st.Ep, $-1
   878  	self.Sjmp("JE", "_noescape_{n}")           // JE     _noescape_{n}
   879  	self.Byte(0x4c, 0x8d, 0x0d)                // LEAQ (PC), R9
   880  	self.Sref("_unquote_once_write_{n}", 4)
   881  	self.Sjmp("JMP", "_escape_string")
   882  	self.Link("_noescape_{n}") // _noescape_{n}:
   883  	if copy {
   884  		self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv)
   885  		self.Sjmp("JNC", "_unquote_once_write_{n}")
   886  		self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
   887  		self.Sref("_unquote_once_write_{n}", 4)
   888  		self.Sjmp("JMP", "_copy_string")
   889  	}
   890  	self.Link("_unquote_once_write_{n}")
   891  	self.Emit("MOVQ", _SI, n) // MOVQ   SI, ${n}
   892  	if stack {
   893  		self.Emit("MOVQ", _DI, p)
   894  	} else {
   895  		self.WriteRecNotAX(10, _DI, p, false, false)
   896  	}
   897  }
   898  
   899  func (self *_Assembler) unquote_twice(p obj.Addr, n obj.Addr, stack bool) {
   900  	self.Emit("CMPQ", _VAR_st_Ep, jit.Imm(-1))                 // CMPQ   st.Ep, $-1
   901  	self.Sjmp("JE", _LB_eof_error)                             // JE     _eof_error
   902  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, -3), jit.Imm('\\')) // CMPB   -3(IP)(IC), $'\\'
   903  	self.Sjmp("JNE", _LB_char_m3_error)                        // JNE    _char_m3_error
   904  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, -2), jit.Imm('"'))  // CMPB   -2(IP)(IC), $'"'
   905  	self.Sjmp("JNE", _LB_char_m2_error)                        // JNE    _char_m2_error
   906  	self.slice_from(_VAR_st_Iv, -3)                            // SLICE  st.Iv, $-3
   907  	self.Emit("MOVQ", _SI, _AX)                                // MOVQ   SI, AX
   908  	self.Emit("ADDQ", _VAR_st_Iv, _AX)                         // ADDQ   st.Iv, AX
   909  	self.Emit("CMPQ", _VAR_st_Ep, _AX)                         // CMPQ   st.Ep, AX
   910  	self.Sjmp("JE", "_noescape_{n}")                           // JE     _noescape_{n}
   911  	self.Byte(0x4c, 0x8d, 0x0d)                                // LEAQ (PC), R9
   912  	self.Sref("_unquote_twice_write_{n}", 4)
   913  	self.Sjmp("JMP", "_escape_string_twice")
   914  	self.Link("_noescape_{n}") // _noescape_{n}:
   915  	self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv)
   916  	self.Sjmp("JNC", "_unquote_twice_write_{n}")
   917  	self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
   918  	self.Sref("_unquote_twice_write_{n}", 4)
   919  	self.Sjmp("JMP", "_copy_string")
   920  	self.Link("_unquote_twice_write_{n}")
   921  	self.Emit("MOVQ", _SI, n) // MOVQ   SI, ${n}
   922  	if stack {
   923  		self.Emit("MOVQ", _DI, p)
   924  	} else {
   925  		self.WriteRecNotAX(12, _DI, p, false, false)
   926  	}
   927  }
   928  
   929  /** Memory Clearing Routines **/
   930  
   931  var (
   932  	_F_memclrHasPointers    = jit.Func(memclrHasPointers)
   933  	_F_memclrNoHeapPointers = jit.Func(memclrNoHeapPointers)
   934  )
   935  
   936  func (self *_Assembler) mem_clear_fn(ptrfree bool) {
   937  	if !ptrfree {
   938  		self.call_go(_F_memclrHasPointers)
   939  	} else {
   940  		self.call_go(_F_memclrNoHeapPointers)
   941  	}
   942  }
   943  
   944  func (self *_Assembler) mem_clear_rem(size int64, ptrfree bool) {
   945  	self.Emit("MOVQ", jit.Imm(size), _CX)           // MOVQ    ${size}, CX
   946  	self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)         // MOVQ    (ST), AX
   947  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _AX) // MOVQ    (ST)(AX), AX
   948  	self.Emit("SUBQ", _VP, _AX)                     // SUBQ    VP, AX
   949  	self.Emit("ADDQ", _AX, _CX)                     // ADDQ    AX, CX
   950  	self.Emit("MOVQ", _VP, jit.Ptr(_SP, 0))         // MOVQ    VP, (SP)
   951  	self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8))         // MOVQ    CX, 8(SP)
   952  	self.mem_clear_fn(ptrfree)                      // CALL_GO memclr{Has,NoHeap}Pointers
   953  }
   954  
   955  /** Map Assigning Routines **/
   956  
   957  var (
   958  	_F_mapassign           = jit.Func(mapassign)
   959  	_F_mapassign_fast32    = jit.Func(mapassign_fast32)
   960  	_F_mapassign_faststr   = jit.Func(mapassign_faststr)
   961  	_F_mapassign_fast64ptr = jit.Func(mapassign_fast64ptr)
   962  )
   963  
   964  var (
   965  	_F_decodeJsonUnmarshaler obj.Addr
   966  	_F_decodeTextUnmarshaler obj.Addr
   967  )
   968  
   969  func init() {
   970  	_F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler)
   971  	_F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler)
   972  }
   973  
   974  func (self *_Assembler) mapaccess_ptr(t reflect.Type) {
   975  	if rt.MapType(rt.UnpackType(t)).IndirectElem() {
   976  		self.vfollow(t.Elem())
   977  	}
   978  }
   979  
   980  func (self *_Assembler) mapassign_std(t reflect.Type, v obj.Addr) {
   981  	self.Emit("LEAQ", v, _AX)            // LEAQ      ${v}, AX
   982  	self.mapassign_call(t, _F_mapassign) // MAPASSIGN ${t}, mapassign
   983  }
   984  
   985  func (self *_Assembler) mapassign_str_fast(t reflect.Type, p obj.Addr, n obj.Addr) {
   986  	self.Emit("MOVQ", jit.Type(t), _AX)      // MOVQ    ${t}, AX
   987  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))  // MOVQ    AX, (SP)
   988  	self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8))  // MOVQ    VP, 8(SP)
   989  	self.Emit("MOVQ", p, jit.Ptr(_SP, 16))   // MOVQ    ${p}, 16(SP)
   990  	self.Emit("MOVQ", n, jit.Ptr(_SP, 24))   // MOVQ    ${n}, 24(SP)
   991  	self.call_go(_F_mapassign_faststr)       // CALL_GO ${fn}
   992  	self.Emit("MOVQ", jit.Ptr(_SP, 32), _VP) // MOVQ    32(SP), VP
   993  	self.mapaccess_ptr(t)
   994  }
   995  
   996  func (self *_Assembler) mapassign_call(t reflect.Type, fn obj.Addr) {
   997  	self.Emit("MOVQ", jit.Type(t), _SI)      // MOVQ    ${t}, SI
   998  	self.Emit("MOVQ", _SI, jit.Ptr(_SP, 0))  // MOVQ    SI, (SP)
   999  	self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8))  // MOVQ    VP, 8(SP)
  1000  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ    AX, 16(SP)
  1001  	self.call_go(fn)                         // CALL_GO ${fn}
  1002  	self.Emit("MOVQ", jit.Ptr(_SP, 24), _VP) // MOVQ    24(SP), VP
  1003  }
  1004  
  1005  func (self *_Assembler) mapassign_fastx(t reflect.Type, fn obj.Addr) {
  1006  	self.mapassign_call(t, fn)
  1007  	self.mapaccess_ptr(t)
  1008  }
  1009  
  1010  func (self *_Assembler) mapassign_utext(t reflect.Type, addressable bool) {
  1011  	pv := false
  1012  	vk := t.Key()
  1013  	tk := t.Key()
  1014  
  1015  	/* deref pointer if needed */
  1016  	if vk.Kind() == reflect.Ptr {
  1017  		pv = true
  1018  		vk = vk.Elem()
  1019  	}
  1020  
  1021  	/* addressable value with pointer receiver */
  1022  	if addressable {
  1023  		pv = false
  1024  		tk = reflect.PtrTo(tk)
  1025  	}
  1026  
  1027  	/* allocate the key, and call the unmarshaler */
  1028  	self.valloc(vk, _DI) // VALLOC  ${vk}, DI
  1029  	// must spill vk pointer since next call_go may invoke GC
  1030  	self.Emit("MOVQ", _DI, _VAR_vk)
  1031  	self.Emit("MOVQ", jit.Type(tk), _AX)      // MOVQ    ${tk}, AX
  1032  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))   // MOVQ    AX, (SP)
  1033  	self.Emit("MOVQ", _DI, jit.Ptr(_SP, 8))   // MOVQ    DI, 8(SP)
  1034  	self.Emit("MOVOU", _VAR_sv, _X0)          // MOVOU   sv, X0
  1035  	self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16)) // MOVOU   X0, 16(SP)
  1036  	self.call_go(_F_decodeTextUnmarshaler)    // CALL_GO decodeTextUnmarshaler
  1037  	self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET)  // MOVQ    32(SP), ET
  1038  	self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP)  // MOVQ    40(SP), EP
  1039  	self.Emit("TESTQ", _ET, _ET)              // TESTQ   ET, ET
  1040  	self.Sjmp("JNZ", _LB_error)               // JNZ     _error
  1041  	self.Emit("MOVQ", _VAR_vk, _AX)
  1042  
  1043  	/* select the correct assignment function */
  1044  	if !pv {
  1045  		self.mapassign_call(t, _F_mapassign)
  1046  	} else {
  1047  		self.mapassign_fastx(t, _F_mapassign_fast64ptr)
  1048  	}
  1049  }
  1050  
  1051  /** External Unmarshaler Routines **/
  1052  
  1053  var (
  1054  	_F_skip_one    = jit.Imm(int64(native.S_skip_one))
  1055  	_F_skip_number = jit.Imm(int64(native.S_skip_number))
  1056  )
  1057  
  1058  func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) {
  1059  	self.call_sf(_F_skip_one)                               // CALL_SF   skip_one
  1060  	self.Emit("TESTQ", _AX, _AX)                            // TESTQ     AX, AX
  1061  	self.Sjmp("JS", _LB_parsing_error_v)                    // JS        _parse_error_v
  1062  	self.slice_from_r(_AX, 0)                               // SLICE_R   AX, $0
  1063  	self.Emit("MOVQ", _DI, _VAR_sv_p)                       // MOVQ      DI, sv.p
  1064  	self.Emit("MOVQ", _SI, _VAR_sv_n)                       // MOVQ      SI, sv.n
  1065  	self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref) // UNMARSHAL json, ${t}, ${deref}
  1066  }
  1067  
  1068  func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) {
  1069  	self.parse_string()                                     // PARSE     STRING
  1070  	self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true)     // UNQUOTE   once, sv.p, sv.n
  1071  	self.unmarshal_func(t, _F_decodeTextUnmarshaler, deref) // UNMARSHAL text, ${t}, ${deref}
  1072  }
  1073  
  1074  func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool) {
  1075  	pt := t
  1076  	vk := t.Kind()
  1077  
  1078  	/* allocate the field if needed */
  1079  	if deref && vk == reflect.Ptr {
  1080  		self.Emit("MOVQ", _VP, _AX)                // MOVQ   VP, AX
  1081  		self.Emit("MOVQ", jit.Ptr(_AX, 0), _AX)    // MOVQ   (AX), AX
  1082  		self.Emit("TESTQ", _AX, _AX)               // TESTQ  AX, AX
  1083  		self.Sjmp("JNZ", "_deref_{n}")             // JNZ    _deref_{n}
  1084  		self.valloc(t.Elem(), _AX)                 // VALLOC ${t.Elem()}, AX
  1085  		self.WritePtrAX(3, jit.Ptr(_VP, 0), false) // MOVQ   AX, (VP)
  1086  		self.Link("_deref_{n}")                    // _deref_{n}:
  1087  	}
  1088  
  1089  	/* set value type */
  1090  	self.Emit("MOVQ", jit.Type(pt), _CX)    // MOVQ ${pt}, CX
  1091  	self.Emit("MOVQ", _CX, jit.Ptr(_SP, 0)) // MOVQ CX, (SP)
  1092  
  1093  	/* set value pointer */
  1094  	if deref && vk == reflect.Ptr {
  1095  		self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
  1096  	} else {
  1097  		self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8)) // MOVQ VP, 8(SP)
  1098  	}
  1099  
  1100  	/* set the source string and call the unmarshaler */
  1101  	self.Emit("MOVOU", _VAR_sv, _X0)          // MOVOU   sv, X0
  1102  	self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16)) // MOVOU   X0, 16(SP)
  1103  	self.call_go(fn)                          // CALL_GO ${fn}
  1104  	self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET)  // MOVQ    32(SP), ET
  1105  	self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP)  // MOVQ    40(SP), EP
  1106  	self.Emit("TESTQ", _ET, _ET)              // TESTQ   ET, ET
  1107  	self.Sjmp("JNZ", _LB_error)               // JNZ     _error
  1108  }
  1109  
  1110  /** Dynamic Decoding Routine **/
  1111  
  1112  var (
  1113  	_F_decodeTypedPointer obj.Addr
  1114  )
  1115  
  1116  func init() {
  1117  	_F_decodeTypedPointer = jit.Func(decodeTypedPointer)
  1118  }
  1119  
  1120  func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) {
  1121  	self.Emit("MOVQ", _ARG_fv, _CX)                   // MOVQ    fv, CX
  1122  	self.Emit("MOVOU", _ARG_sp, _X0)                  // MOVOU   sp, X0
  1123  	self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0))          // MOVOU   X0, (SP)
  1124  	self.Emit("MOVQ", _IC, jit.Ptr(_SP, 16))          // MOVQ    IC, 16(SP)
  1125  	self.Emit("MOVQ", vt, jit.Ptr(_SP, 24))           // MOVQ    ${vt}, 24(SP)
  1126  	self.Emit("MOVQ", vp, jit.Ptr(_SP, 32))           // MOVQ    ${vp}, 32(SP)
  1127  	self.Emit("MOVQ", _ST, jit.Ptr(_SP, 40))          // MOVQ    ST, 40(SP)
  1128  	self.Emit("MOVQ", _CX, jit.Ptr(_SP, 48))          // MOVQ    CX, 48(SP)
  1129  	self.call_go(_F_decodeTypedPointer)               // CALL_GO decodeTypedPointer
  1130  	self.Emit("MOVQ", jit.Ptr(_SP, 64), _ET)          // MOVQ    64(SP), ET
  1131  	self.Emit("MOVQ", jit.Ptr(_SP, 72), _EP)          // MOVQ    72(SP), EP
  1132  	self.Emit("MOVQ", jit.Ptr(_SP, 56), _IC)          // MOVQ    56(SP), IC
  1133  	self.Emit("TESTQ", _ET, _ET)                      // TESTQ   ET, ET
  1134  	self.Sjmp("JE", "_decode_dynamic_end_{n}")        // JE, _decode_dynamic_end_{n}
  1135  	self.Emit("MOVQ", _I_json_MismatchTypeError, _AX) // MOVQ _I_json_MismatchTypeError, AX
  1136  	self.Emit("CMPQ", _ET, _AX)                       // CMPQ ET, AX
  1137  	self.Sjmp("JNE", _LB_error)                       // JNE  LB_error
  1138  	self.Emit("MOVQ", _EP, _VAR_ic)                   // MOVQ EP, VAR_ic
  1139  	self.Emit("MOVQ", _ET, _VAR_et)                   // MOVQ ET, VAR_et
  1140  	self.Link("_decode_dynamic_end_{n}")
  1141  
  1142  }
  1143  
  1144  /** OpCode Assembler Functions **/
  1145  
  1146  var (
  1147  	_F_memequal         = jit.Func(memequal)
  1148  	_F_memmove          = jit.Func(memmove)
  1149  	_F_growslice        = jit.Func(growslice)
  1150  	_F_makeslice        = jit.Func(makeslice)
  1151  	_F_makemap_small    = jit.Func(makemap_small)
  1152  	_F_mapassign_fast64 = jit.Func(mapassign_fast64)
  1153  )
  1154  
  1155  var (
  1156  	_F_lspace  = jit.Imm(int64(native.S_lspace))
  1157  	_F_strhash = jit.Imm(int64(caching.S_strhash))
  1158  )
  1159  
  1160  var (
  1161  	_F_b64decode   = jit.Imm(int64(_subr__b64decode))
  1162  	_F_decodeValue = jit.Imm(int64(_subr_decode_value))
  1163  )
  1164  
  1165  var (
  1166  	_F_skip_array  = jit.Imm(int64(native.S_skip_array))
  1167  	_F_skip_object = jit.Imm(int64(native.S_skip_object))
  1168  )
  1169  
  1170  var (
  1171  	_F_FieldMap_GetCaseInsensitive obj.Addr
  1172  	_Empty_Slice                   = make([]byte, 0)
  1173  	_Zero_Base                     = int64(uintptr(((*rt.GoSlice)(unsafe.Pointer(&_Empty_Slice))).Ptr))
  1174  )
  1175  
  1176  const (
  1177  	_MODE_AVX2 = 1 << 2
  1178  )
  1179  
  1180  const (
  1181  	_Fe_ID   = int64(unsafe.Offsetof(caching.FieldEntry{}.ID))
  1182  	_Fe_Name = int64(unsafe.Offsetof(caching.FieldEntry{}.Name))
  1183  	_Fe_Hash = int64(unsafe.Offsetof(caching.FieldEntry{}.Hash))
  1184  )
  1185  
  1186  const (
  1187  	_Vk_Ptr       = int64(reflect.Ptr)
  1188  	_Gt_KindFlags = int64(unsafe.Offsetof(rt.GoType{}.KindFlags))
  1189  )
  1190  
  1191  func init() {
  1192  	_F_FieldMap_GetCaseInsensitive = jit.Func((*caching.FieldMap).GetCaseInsensitive)
  1193  }
  1194  
  1195  func (self *_Assembler) _asm_OP_any(_ *_Instr) {
  1196  	self.Emit("MOVQ", jit.Ptr(_VP, 8), _CX)                // MOVQ    8(VP), CX
  1197  	self.Emit("TESTQ", _CX, _CX)                           // TESTQ   CX, CX
  1198  	self.Sjmp("JZ", "_decode_{n}")                         // JZ      _decode_{n}
  1199  	self.Emit("CMPQ", _CX, _VP)                            // CMPQ    CX, VP
  1200  	self.Sjmp("JE", "_decode_{n}")                         // JE      _decode_{n}
  1201  	self.Emit("MOVQ", jit.Ptr(_VP, 0), _AX)                // MOVQ    (VP), AX
  1202  	self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX) // MOVBLZX _Gt_KindFlags(AX), DX
  1203  	self.Emit("ANDL", jit.Imm(rt.F_kind_mask), _DX)        // ANDL    ${F_kind_mask}, DX
  1204  	self.Emit("CMPL", _DX, jit.Imm(_Vk_Ptr))               // CMPL    DX, ${reflect.Ptr}
  1205  	self.Sjmp("JNE", "_decode_{n}")                        // JNE     _decode_{n}
  1206  	self.Emit("LEAQ", jit.Ptr(_VP, 8), _DI)                // LEAQ    8(VP), DI
  1207  	self.decode_dynamic(_AX, _DI)                          // DECODE  AX, DI
  1208  	self.Sjmp("JMP", "_decode_end_{n}")                    // JMP     _decode_end_{n}
  1209  	self.Link("_decode_{n}")                               // _decode_{n}:
  1210  	self.Emit("MOVQ", _ARG_fv, _DF)                        // MOVQ    fv, DF
  1211  	self.Emit("MOVQ", _ST, jit.Ptr(_SP, 0))                // MOVQ    _ST, (SP)
  1212  	self.call(_F_decodeValue)                              // CALL    decodeValue
  1213  	self.Emit("TESTQ", _EP, _EP)                           // TESTQ   EP, EP
  1214  	self.Sjmp("JNZ", _LB_parsing_error)                    // JNZ     _parsing_error
  1215  	self.Link("_decode_end_{n}")                           // _decode_end_{n}:
  1216  }
  1217  
  1218  func (self *_Assembler) _asm_OP_dyn(p *_Instr) {
  1219  	self.Emit("MOVQ", jit.Type(p.vt()), _ET)               // MOVQ    ${p.vt()}, ET
  1220  	self.Emit("CMPQ", jit.Ptr(_VP, 8), jit.Imm(0))         // CMPQ    8(VP), $0
  1221  	self.Sjmp("JE", _LB_type_error)                        // JE      _type_error
  1222  	self.Emit("MOVQ", jit.Ptr(_VP, 0), _AX)                // MOVQ    (VP), AX
  1223  	self.Emit("MOVQ", jit.Ptr(_AX, 8), _AX)                // MOVQ    8(AX), AX
  1224  	self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX) // MOVBLZX _Gt_KindFlags(AX), DX
  1225  	self.Emit("ANDL", jit.Imm(rt.F_kind_mask), _DX)        // ANDL    ${F_kind_mask}, DX
  1226  	self.Emit("CMPL", _DX, jit.Imm(_Vk_Ptr))               // CMPL    DX, ${reflect.Ptr}
  1227  	self.Sjmp("JNE", _LB_type_error)                       // JNE     _type_error
  1228  	self.Emit("LEAQ", jit.Ptr(_VP, 8), _DI)                // LEAQ    8(VP), DI
  1229  	self.decode_dynamic(_AX, _DI)                          // DECODE  AX, DI
  1230  	self.Link("_decode_end_{n}")                           // _decode_end_{n}:
  1231  }
  1232  
  1233  func (self *_Assembler) _asm_OP_str(_ *_Instr) {
  1234  	self.parse_string()                                              // PARSE   STRING
  1235  	self.unquote_once(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false, true) // UNQUOTE once, (VP), 8(VP)
  1236  }
  1237  
  1238  func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
  1239  	self.parse_string()                             // PARSE  STRING
  1240  	self.slice_from(_VAR_st_Iv, -1)                 // SLICE  st.Iv, $-1
  1241  	self.Emit("MOVQ", _DI, jit.Ptr(_VP, 0))         // MOVQ   DI, (VP)
  1242  	self.Emit("MOVQ", _SI, jit.Ptr(_VP, 8))         // MOVQ   SI, 8(VP)
  1243  	self.Emit("SHRQ", jit.Imm(2), _SI)              // SHRQ   $2, SI
  1244  	self.Emit("LEAQ", jit.Sib(_SI, _SI, 2, 0), _SI) // LEAQ   (SI)(SI*2), SI
  1245  	self.Emit("MOVQ", _SI, jit.Ptr(_VP, 16))        // MOVQ   SI, 16(VP)
  1246  	self.malloc(_SI, _SI)                           // MALLOC SI, SI
  1247  
  1248  	// TODO: due to base64x's bug, only use AVX mode now
  1249  	self.Emit("MOVL", jit.Imm(_MODE_JSON), _CX) //  MOVL $_MODE_JSON, CX
  1250  
  1251  	/* call the decoder */
  1252  	self.Emit("XORL", _DX, _DX) // XORL  DX, DX
  1253  	self.Emit("MOVQ", _VP, _DI) // MOVQ  VP, DI
  1254  
  1255  	self.Emit("MOVQ", jit.Ptr(_VP, 0), _R9)                  // MOVQ SI, (VP)
  1256  	self.WriteRecNotAX(4, _SI, jit.Ptr(_VP, 0), true, false) // XCHGQ SI, (VP)
  1257  	self.Emit("MOVQ", _R9, _SI)
  1258  
  1259  	self.Emit("XCHGQ", _DX, jit.Ptr(_VP, 8)) // XCHGQ DX, 8(VP)
  1260  	self.call(_F_b64decode)                  // CALL  b64decode
  1261  	self.Emit("TESTQ", _AX, _AX)             // TESTQ AX, AX
  1262  	self.Sjmp("JS", _LB_base64_error)        // JS    _base64_error
  1263  	self.Emit("MOVQ", _AX, jit.Ptr(_VP, 8))  // MOVQ  AX, 8(VP)
  1264  }
  1265  
  1266  func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
  1267  	self.Emit("LEAQ", jit.Ptr(_IC, 4), _AX)                  // LEAQ 4(IC), AX
  1268  	self.Emit("CMPQ", _AX, _IL)                              // CMPQ AX, IL
  1269  	self.Sjmp("JA", _LB_eof_error)                           // JA   _eof_error
  1270  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('f')) // CMPB (IP)(IC), $'f'
  1271  	self.Sjmp("JE", "_false_{n}")                            // JE   _false_{n}
  1272  	self.Emit("MOVL", jit.Imm(_IM_true), _CX)                // MOVL $"true", CX
  1273  	self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0))          // CMPL CX, (IP)(IC)
  1274  	self.Sjmp("JE", "_bool_true_{n}")
  1275  
  1276  	// try to skip the value
  1277  	self.Emit("MOVQ", _IC, _VAR_ic)
  1278  	self.Emit("MOVQ", _T_bool, _ET)
  1279  	self.Emit("MOVQ", _ET, _VAR_et)
  1280  	self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
  1281  	self.Sref("_end_{n}", 4)
  1282  	self.Emit("MOVQ", _R9, _VAR_pc)
  1283  	self.Sjmp("JMP", _LB_skip_one)
  1284  
  1285  	self.Link("_bool_true_{n}")
  1286  	self.Emit("MOVQ", _AX, _IC)                     // MOVQ AX, IC
  1287  	self.Emit("MOVB", jit.Imm(1), jit.Ptr(_VP, 0))  // MOVB $1, (VP)
  1288  	self.Sjmp("JMP", "_end_{n}")                    // JMP  _end_{n}
  1289  	self.Link("_false_{n}")                         // _false_{n}:
  1290  	self.Emit("ADDQ", jit.Imm(1), _AX)              // ADDQ $1, AX
  1291  	self.Emit("ADDQ", jit.Imm(1), _IC)              // ADDQ $1, IC
  1292  	self.Emit("CMPQ", _AX, _IL)                     // CMPQ AX, IL
  1293  	self.Sjmp("JA", _LB_eof_error)                  // JA   _eof_error
  1294  	self.Emit("MOVL", jit.Imm(_IM_alse), _CX)       // MOVL $"alse", CX
  1295  	self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPL CX, (IP)(IC)
  1296  	self.Sjmp("JNE", _LB_im_error)                  // JNE  _im_error
  1297  	self.Emit("MOVQ", _AX, _IC)                     // MOVQ AX, IC
  1298  	self.Emit("XORL", _AX, _AX)                     // XORL AX, AX
  1299  	self.Emit("MOVB", _AX, jit.Ptr(_VP, 0))         // MOVB AX, (VP)
  1300  	self.Link("_end_{n}")                           // _end_{n}:
  1301  }
  1302  
  1303  func (self *_Assembler) _asm_OP_num(_ *_Instr) {
  1304  	self.Emit("MOVQ", jit.Imm(0), _VAR_fl)
  1305  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
  1306  	self.Emit("MOVQ", _IC, _BP)
  1307  	self.Sjmp("JNE", "_skip_number_{n}")
  1308  	self.Emit("MOVQ", jit.Imm(1), _VAR_fl)
  1309  	self.Emit("ADDQ", jit.Imm(1), _IC)
  1310  	self.Link("_skip_number_{n}")
  1311  
  1312  	/* call skip_number */
  1313  	self.call_sf(_F_skip_number) // CALL_SF skip_one
  1314  	self.Emit("TESTQ", _AX, _AX) // TESTQ   AX, AX
  1315  	self.Sjmp("JNS", "_num_next_{n}")
  1316  
  1317  	/* call skip one */
  1318  	self.Emit("MOVQ", _BP, _VAR_ic)
  1319  	self.Emit("MOVQ", _T_number, _ET)
  1320  	self.Emit("MOVQ", _ET, _VAR_et)
  1321  	self.Byte(0x4c, 0x8d, 0x0d)
  1322  	self.Sref("_num_end_{n}", 4)
  1323  	self.Emit("MOVQ", _R9, _VAR_pc)
  1324  	self.Sjmp("JMP", _LB_skip_one)
  1325  
  1326  	/* assgin string */
  1327  	self.Link("_num_next_{n}")
  1328  	self.slice_from_r(_AX, 0)
  1329  	self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv)
  1330  	self.Sjmp("JNC", "_num_write_{n}")
  1331  	self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
  1332  	self.Sref("_num_write_{n}", 4)
  1333  	self.Sjmp("JMP", "_copy_string")
  1334  	self.Link("_num_write_{n}")
  1335  	self.Emit("MOVQ", _SI, jit.Ptr(_VP, 8)) // MOVQ  SI, 8(VP)
  1336  	self.WriteRecNotAX(13, _DI, jit.Ptr(_VP, 0), false, false)
  1337  
  1338  	/* check if quoted */
  1339  	self.Emit("CMPQ", _VAR_fl, jit.Imm(1))
  1340  	self.Sjmp("JNE", "_num_end_{n}")
  1341  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
  1342  	self.Sjmp("JNE", _LB_char_0_error)
  1343  	self.Emit("ADDQ", jit.Imm(1), _IC)
  1344  	self.Link("_num_end_{n}")
  1345  }
  1346  
  1347  func (self *_Assembler) _asm_OP_i8(ins *_Instr) {
  1348  	var pin = "_i8_end_{n}"
  1349  	self.parse_signed(int8Type, pin, -1)                            // PARSE int8
  1350  	self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8) // RANGE int8
  1351  	self.Emit("MOVB", _AX, jit.Ptr(_VP, 0))                         // MOVB  AX, (VP)
  1352  	self.Link(pin)
  1353  }
  1354  
  1355  func (self *_Assembler) _asm_OP_i16(ins *_Instr) {
  1356  	var pin = "_i16_end_{n}"
  1357  	self.parse_signed(int16Type, pin, -1)                               // PARSE int16
  1358  	self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16) // RANGE int16
  1359  	self.Emit("MOVW", _AX, jit.Ptr(_VP, 0))                             // MOVW  AX, (VP)
  1360  	self.Link(pin)
  1361  }
  1362  
  1363  func (self *_Assembler) _asm_OP_i32(ins *_Instr) {
  1364  	var pin = "_i32_end_{n}"
  1365  	self.parse_signed(int32Type, pin, -1)                               // PARSE int32
  1366  	self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32) // RANGE int32
  1367  	self.Emit("MOVL", _AX, jit.Ptr(_VP, 0))                             // MOVL  AX, (VP)
  1368  	self.Link(pin)
  1369  }
  1370  
  1371  func (self *_Assembler) _asm_OP_i64(ins *_Instr) {
  1372  	var pin = "_i64_end_{n}"
  1373  	self.parse_signed(int64Type, pin, -1)   // PARSE int64
  1374  	self.Emit("MOVQ", _VAR_st_Iv, _AX)      // MOVQ  st.Iv, AX
  1375  	self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ  AX, (VP)
  1376  	self.Link(pin)
  1377  }
  1378  
  1379  func (self *_Assembler) _asm_OP_u8(ins *_Instr) {
  1380  	var pin = "_u8_end_{n}"
  1381  	self.parse_unsigned(uint8Type, pin, -1)                // PARSE uint8
  1382  	self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8) // RANGE uint8
  1383  	self.Emit("MOVB", _AX, jit.Ptr(_VP, 0))                // MOVB  AX, (VP)
  1384  	self.Link(pin)
  1385  }
  1386  
  1387  func (self *_Assembler) _asm_OP_u16(ins *_Instr) {
  1388  	var pin = "_u16_end_{n}"
  1389  	self.parse_unsigned(uint16Type, pin, -1)                  // PARSE uint16
  1390  	self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16) // RANGE uint16
  1391  	self.Emit("MOVW", _AX, jit.Ptr(_VP, 0))                   // MOVW  AX, (VP)
  1392  	self.Link(pin)
  1393  }
  1394  
  1395  func (self *_Assembler) _asm_OP_u32(ins *_Instr) {
  1396  	var pin = "_u32_end_{n}"
  1397  	self.parse_unsigned(uint32Type, pin, -1)                  // PARSE uint32
  1398  	self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32) // RANGE uint32
  1399  	self.Emit("MOVL", _AX, jit.Ptr(_VP, 0))                   // MOVL  AX, (VP)
  1400  	self.Link(pin)
  1401  }
  1402  
  1403  func (self *_Assembler) _asm_OP_u64(ins *_Instr) {
  1404  	var pin = "_u64_end_{n}"
  1405  	self.parse_unsigned(uint64Type, pin, -1) // PARSE uint64
  1406  	self.Emit("MOVQ", _VAR_st_Iv, _AX)       // MOVQ  st.Iv, AX
  1407  	self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))  // MOVQ  AX, (VP)
  1408  	self.Link(pin)
  1409  }
  1410  
  1411  func (self *_Assembler) _asm_OP_f32(ins *_Instr) {
  1412  	var pin = "_f32_end_{n}"
  1413  	self.parse_number(float32Type, pin, -1)  // PARSE NUMBER
  1414  	self.range_single()                      // RANGE float32
  1415  	self.Emit("MOVSS", _X0, jit.Ptr(_VP, 0)) // MOVSS X0, (VP)
  1416  	self.Link(pin)
  1417  }
  1418  
  1419  func (self *_Assembler) _asm_OP_f64(ins *_Instr) {
  1420  	var pin = "_f64_end_{n}"
  1421  	self.parse_number(float64Type, pin, -1)  // PARSE NUMBER
  1422  	self.Emit("MOVSD", _VAR_st_Dv, _X0)      // MOVSD st.Dv, X0
  1423  	self.Emit("MOVSD", _X0, jit.Ptr(_VP, 0)) // MOVSD X0, (VP)
  1424  	self.Link(pin)
  1425  }
  1426  
  1427  func (self *_Assembler) _asm_OP_unquote(ins *_Instr) {
  1428  	self.check_eof(2)
  1429  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('\\'))   // CMPB    (IP)(IC), $'\\'
  1430  	self.Sjmp("JNE", _LB_char_0_error)                          // JNE     _char_0_error
  1431  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 1), jit.Imm('"'))    // CMPB    1(IP)(IC), $'"'
  1432  	self.Sjmp("JNE", _LB_char_1_error)                          // JNE     _char_1_error
  1433  	self.Emit("ADDQ", jit.Imm(2), _IC)                          // ADDQ    $2, IC
  1434  	self.parse_string()                                         // PARSE   STRING
  1435  	self.unquote_twice(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false) // UNQUOTE twice, (VP), 8(VP)
  1436  }
  1437  
  1438  func (self *_Assembler) _asm_OP_nil_1(_ *_Instr) {
  1439  	self.Emit("XORL", _AX, _AX)             // XORL AX, AX
  1440  	self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP)
  1441  }
  1442  
  1443  func (self *_Assembler) _asm_OP_nil_2(_ *_Instr) {
  1444  	self.Emit("PXOR", _X0, _X0)              // PXOR  X0, X0
  1445  	self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)
  1446  }
  1447  
  1448  func (self *_Assembler) _asm_OP_nil_3(_ *_Instr) {
  1449  	self.Emit("XORL", _AX, _AX)              // XORL  AX, AX
  1450  	self.Emit("PXOR", _X0, _X0)              // PXOR  X0, X0
  1451  	self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)
  1452  	self.Emit("MOVQ", _AX, jit.Ptr(_VP, 16)) // MOVOU X0, 16(VP)
  1453  }
  1454  
  1455  func (self *_Assembler) _asm_OP_deref(p *_Instr) {
  1456  	self.vfollow(p.vt())
  1457  }
  1458  
  1459  func (self *_Assembler) _asm_OP_index(p *_Instr) {
  1460  	self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ ${p.vi()}, AX
  1461  	self.Emit("ADDQ", _AX, _VP)              // ADDQ _AX, _VP
  1462  }
  1463  
  1464  func (self *_Assembler) _asm_OP_is_null(p *_Instr) {
  1465  	self.Emit("LEAQ", jit.Ptr(_IC, 4), _AX)                       // LEAQ    4(IC), AX
  1466  	self.Emit("CMPQ", _AX, _IL)                                   // CMPQ    AX, IL
  1467  	self.Sjmp("JA", "_not_null_{n}")                              // JA      _not_null_{n}
  1468  	self.Emit("CMPL", jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null)) // CMPL    (IP)(IC), $"null"
  1469  	self.Emit("CMOVQEQ", _AX, _IC)                                // CMOVQEQ AX, IC
  1470  	self.Xjmp("JE", p.vi())                                       // JE      {p.vi()}
  1471  	self.Link("_not_null_{n}")                                    // _not_null_{n}:
  1472  }
  1473  
  1474  func (self *_Assembler) _asm_OP_is_null_quote(p *_Instr) {
  1475  	self.Emit("LEAQ", jit.Ptr(_IC, 5), _AX)                       // LEAQ    4(IC), AX
  1476  	self.Emit("CMPQ", _AX, _IL)                                   // CMPQ    AX, IL
  1477  	self.Sjmp("JA", "_not_null_quote_{n}")                        // JA      _not_null_quote_{n}
  1478  	self.Emit("CMPL", jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null)) // CMPL    (IP)(IC), $"null"
  1479  	self.Sjmp("JNE", "_not_null_quote_{n}")                       // JNE     _not_null_quote_{n}
  1480  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 4), jit.Imm('"'))      // CMPB    4(IP)(IC), $'"'
  1481  	self.Emit("CMOVQEQ", _AX, _IC)                                // CMOVQEQ AX, IC
  1482  	self.Xjmp("JE", p.vi())                                       // JE      {p.vi()}
  1483  	self.Link("_not_null_quote_{n}")                              // _not_null_quote_{n}:
  1484  }
  1485  
  1486  func (self *_Assembler) _asm_OP_map_init(_ *_Instr) {
  1487  	self.Emit("MOVQ", jit.Ptr(_VP, 0), _AX)    // MOVQ    (VP), AX
  1488  	self.Emit("TESTQ", _AX, _AX)               // TESTQ   AX, AX
  1489  	self.Sjmp("JNZ", "_end_{n}")               // JNZ     _end_{n}
  1490  	self.call_go(_F_makemap_small)             // CALL_GO makemap_small
  1491  	self.Emit("MOVQ", jit.Ptr(_SP, 0), _AX)    // MOVQ    (SP), AX
  1492  	self.WritePtrAX(6, jit.Ptr(_VP, 0), false) // MOVQ    AX, (VP)
  1493  	self.Link("_end_{n}")                      // _end_{n}:
  1494  	self.Emit("MOVQ", _AX, _VP)                // MOVQ    AX, VP
  1495  }
  1496  
  1497  func (self *_Assembler) _asm_OP_map_key_i8(p *_Instr) {
  1498  	self.parse_signed(int8Type, "", p.vi())                         // PARSE     int8
  1499  	self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8) // RANGE     int8
  1500  	self.match_char('"')
  1501  	self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN int8, mapassign, st.Iv
  1502  }
  1503  
  1504  func (self *_Assembler) _asm_OP_map_key_i16(p *_Instr) {
  1505  	self.parse_signed(int16Type, "", p.vi())                            // PARSE     int16
  1506  	self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16) // RANGE     int16
  1507  	self.match_char('"')
  1508  	self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN int16, mapassign, st.Iv
  1509  }
  1510  
  1511  func (self *_Assembler) _asm_OP_map_key_i32(p *_Instr) {
  1512  	self.parse_signed(int32Type, "", p.vi())                            // PARSE     int32
  1513  	self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32) // RANGE     int32
  1514  	self.match_char('"')
  1515  	if vt := p.vt(); !mapfast(vt) {
  1516  		self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN int32, mapassign, st.Iv
  1517  	} else {
  1518  		self.mapassign_fastx(vt, _F_mapassign_fast32) // MAPASSIGN int32, mapassign_fast32
  1519  	}
  1520  }
  1521  
  1522  func (self *_Assembler) _asm_OP_map_key_i64(p *_Instr) {
  1523  	self.parse_signed(int64Type, "", p.vi()) // PARSE     int64
  1524  	self.match_char('"')
  1525  	if vt := p.vt(); !mapfast(vt) {
  1526  		self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN int64, mapassign, st.Iv
  1527  	} else {
  1528  		self.Emit("MOVQ", _VAR_st_Iv, _AX)            // MOVQ      st.Iv, AX
  1529  		self.mapassign_fastx(vt, _F_mapassign_fast64) // MAPASSIGN int64, mapassign_fast64
  1530  	}
  1531  }
  1532  
  1533  func (self *_Assembler) _asm_OP_map_key_u8(p *_Instr) {
  1534  	self.parse_unsigned(uint8Type, "", p.vi())             // PARSE     uint8
  1535  	self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8) // RANGE     uint8
  1536  	self.match_char('"')
  1537  	self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN uint8, vt.Iv
  1538  }
  1539  
  1540  func (self *_Assembler) _asm_OP_map_key_u16(p *_Instr) {
  1541  	self.parse_unsigned(uint16Type, "", p.vi())               // PARSE     uint16
  1542  	self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16) // RANGE     uint16
  1543  	self.match_char('"')
  1544  	self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN uint16, vt.Iv
  1545  }
  1546  
  1547  func (self *_Assembler) _asm_OP_map_key_u32(p *_Instr) {
  1548  	self.parse_unsigned(uint32Type, "", p.vi())               // PARSE     uint32
  1549  	self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32) // RANGE     uint32
  1550  	self.match_char('"')
  1551  	if vt := p.vt(); !mapfast(vt) {
  1552  		self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN uint32, vt.Iv
  1553  	} else {
  1554  		self.mapassign_fastx(vt, _F_mapassign_fast32) // MAPASSIGN uint32, mapassign_fast32
  1555  	}
  1556  }
  1557  
  1558  func (self *_Assembler) _asm_OP_map_key_u64(p *_Instr) {
  1559  	self.parse_unsigned(uint64Type, "", p.vi()) // PARSE     uint64
  1560  	self.match_char('"')
  1561  	if vt := p.vt(); !mapfast(vt) {
  1562  		self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN uint64, vt.Iv
  1563  	} else {
  1564  		self.Emit("MOVQ", _VAR_st_Iv, _AX)            // MOVQ      st.Iv, AX
  1565  		self.mapassign_fastx(vt, _F_mapassign_fast64) // MAPASSIGN uint64, mapassign_fast64
  1566  	}
  1567  }
  1568  
  1569  func (self *_Assembler) _asm_OP_map_key_f32(p *_Instr) {
  1570  	self.parse_number(float32Type, "", p.vi()) // PARSE     NUMBER
  1571  	self.range_single()                        // RANGE     float32
  1572  	self.Emit("MOVSS", _X0, _VAR_st_Dv)        // MOVSS     X0, st.Dv
  1573  	self.match_char('"')
  1574  	self.mapassign_std(p.vt(), _VAR_st_Dv) // MAPASSIGN ${p.vt()}, mapassign, st.Dv
  1575  }
  1576  
  1577  func (self *_Assembler) _asm_OP_map_key_f64(p *_Instr) {
  1578  	self.parse_number(float64Type, "", p.vi()) // PARSE     NUMBER
  1579  	self.match_char('"')
  1580  	self.mapassign_std(p.vt(), _VAR_st_Dv) // MAPASSIGN ${p.vt()}, mapassign, st.Dv
  1581  }
  1582  
  1583  func (self *_Assembler) _asm_OP_map_key_str(p *_Instr) {
  1584  	self.parse_string()                                 // PARSE     STRING
  1585  	self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true) // UNQUOTE   once, sv.p, sv.n
  1586  	if vt := p.vt(); !mapfast(vt) {
  1587  		self.valloc(vt.Key(), _DI)
  1588  		self.Emit("MOVOU", _VAR_sv, _X0)
  1589  		self.Emit("MOVOU", _X0, jit.Ptr(_DI, 0))
  1590  		self.mapassign_std(vt, jit.Ptr(_DI, 0))
  1591  	} else {
  1592  		self.Emit("MOVQ", _VAR_sv_p, _DI)     // MOVQ      sv.p, DI
  1593  		self.Emit("MOVQ", _VAR_sv_n, _SI)     // MOVQ      sv.n, SI
  1594  		self.mapassign_str_fast(vt, _DI, _SI) // MAPASSIGN string, DI, SI
  1595  	}
  1596  }
  1597  
  1598  func (self *_Assembler) _asm_OP_map_key_utext(p *_Instr) {
  1599  	self.parse_string()                                 // PARSE     STRING
  1600  	self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true) // UNQUOTE   once, sv.p, sv.n
  1601  	self.mapassign_utext(p.vt(), false)                 // MAPASSIGN utext, ${p.vt()}, false
  1602  }
  1603  
  1604  func (self *_Assembler) _asm_OP_map_key_utext_p(p *_Instr) {
  1605  	self.parse_string()                                  // PARSE     STRING
  1606  	self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false) // UNQUOTE   once, sv.p, sv.n
  1607  	self.mapassign_utext(p.vt(), true)                   // MAPASSIGN utext, ${p.vt()}, true
  1608  }
  1609  
  1610  func (self *_Assembler) _asm_OP_array_skip(_ *_Instr) {
  1611  	self.call_sf(_F_skip_array)          // CALL_SF skip_array
  1612  	self.Emit("TESTQ", _AX, _AX)         // TESTQ   AX, AX
  1613  	self.Sjmp("JS", _LB_parsing_error_v) // JS      _parse_error_v
  1614  }
  1615  
  1616  func (self *_Assembler) _asm_OP_array_clear(p *_Instr) {
  1617  	self.mem_clear_rem(p.i64(), true)
  1618  }
  1619  
  1620  func (self *_Assembler) _asm_OP_array_clear_p(p *_Instr) {
  1621  	self.mem_clear_rem(p.i64(), false)
  1622  }
  1623  
  1624  func (self *_Assembler) _asm_OP_slice_init(p *_Instr) {
  1625  	self.Emit("XORL", _AX, _AX)                // XORL    AX, AX
  1626  	self.Emit("MOVQ", _AX, jit.Ptr(_VP, 8))    // MOVQ    AX, 8(VP)
  1627  	self.Emit("MOVQ", jit.Ptr(_VP, 16), _AX)   // MOVQ    16(VP), AX
  1628  	self.Emit("TESTQ", _AX, _AX)               // TESTQ   AX, AX
  1629  	self.Sjmp("JNZ", "_done_{n}")              // JNZ     _done_{n}
  1630  	self.Emit("MOVQ", jit.Imm(_MinSlice), _CX) // MOVQ    ${_MinSlice}, CX
  1631  	self.Emit("MOVQ", _CX, jit.Ptr(_VP, 16))   // MOVQ    CX, 16(VP)
  1632  	self.Emit("MOVQ", jit.Type(p.vt()), _DX)   // MOVQ    ${p.vt()}, DX
  1633  	self.Emit("MOVQ", _DX, jit.Ptr(_SP, 0))    // MOVQ    DX, (SP)
  1634  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))    // MOVQ    AX, 8(SP)
  1635  	self.Emit("MOVQ", _CX, jit.Ptr(_SP, 16))   // MOVQ    CX, 16(SP)
  1636  	self.call_go(_F_makeslice)                 // CALL_GO makeslice
  1637  	self.Emit("MOVQ", jit.Ptr(_SP, 24), _AX)   // MOVQ    24(SP), AX
  1638  	self.WritePtrAX(7, jit.Ptr(_VP, 0), false) // MOVQ    AX, (VP)
  1639  	self.Link("_done_{n}")                     // _done_{n}:
  1640  	self.Emit("XORL", _AX, _AX)                // XORL    AX, AX
  1641  	self.Emit("MOVQ", _AX, jit.Ptr(_VP, 8))    // MOVQ    AX, 8(VP)
  1642  }
  1643  
  1644  func (self *_Assembler) _asm_OP_check_empty(p *_Instr) {
  1645  	rbracket := p.vb()
  1646  	if rbracket == ']' {
  1647  		self.check_eof(1)
  1648  		self.Emit("LEAQ", jit.Ptr(_IC, 1), _AX)                              // LEAQ    1(IC), AX
  1649  		self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(rbracket))) // CMPB    (IP)(IC), ']'
  1650  		self.Sjmp("JNE", "_not_empty_array_{n}")                             // JNE     _not_empty_array_{n}
  1651  		self.Emit("MOVQ", _AX, _IC)                                          // MOVQ    AX, IC
  1652  		self.Emit("MOVQ", jit.Imm(_Zero_Base), _AX)
  1653  		self.WritePtrAX(9, jit.Ptr(_VP, 0), false)
  1654  		self.Emit("PXOR", _X0, _X0)              // PXOR    X0, X0
  1655  		self.Emit("MOVOU", _X0, jit.Ptr(_VP, 8)) // MOVOU   X0, 8(VP)
  1656  		self.Xjmp("JMP", p.vi())                 // JMP     {p.vi()}
  1657  		self.Link("_not_empty_array_{n}")
  1658  	} else {
  1659  		panic("only implement check empty array here!")
  1660  	}
  1661  }
  1662  
  1663  func (self *_Assembler) _asm_OP_slice_append(p *_Instr) {
  1664  	self.Emit("MOVQ", jit.Ptr(_VP, 8), _AX)                 // MOVQ    8(VP), AX
  1665  	self.Emit("CMPQ", _AX, jit.Ptr(_VP, 16))                // CMPQ    AX, 16(VP)
  1666  	self.Sjmp("JB", "_index_{n}")                           // JB      _index_{n}
  1667  	self.Emit("MOVQ", jit.Type(p.vt()), _AX)                // MOVQ    ${p.vt()}, AX
  1668  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))                 // MOVQ    AX, (SP)
  1669  	self.Emit("MOVOU", jit.Ptr(_VP, 0), _X0)                // MOVOU   (VP), X0
  1670  	self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8))                // MOVOU   X0, 8(SP)
  1671  	self.Emit("MOVQ", jit.Ptr(_VP, 16), _AX)                // MOVQ    16(VP), AX
  1672  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 24))                // MOVQ    AX, 24(SP)
  1673  	self.Emit("SHLQ", jit.Imm(1), _AX)                      // SHLQ    $1, AX
  1674  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 32))                // MOVQ    AX, 32(SP)
  1675  	self.call_go(_F_growslice)                              // CALL_GO growslice
  1676  	self.Emit("MOVQ", jit.Ptr(_SP, 40), _DI)                // MOVQ    40(SP), DI
  1677  	self.Emit("MOVQ", jit.Ptr(_SP, 48), _AX)                // MOVQ    48(SP), AX
  1678  	self.Emit("MOVQ", jit.Ptr(_SP, 56), _SI)                // MOVQ    56(SP), SI
  1679  	self.WriteRecNotAX(8, _DI, jit.Ptr(_VP, 0), true, true) // MOVQ    DI, (VP)
  1680  	self.Emit("MOVQ", _AX, jit.Ptr(_VP, 8))                 // MOVQ    AX, 8(VP)
  1681  	self.Emit("MOVQ", _SI, jit.Ptr(_VP, 16))                // MOVQ    SI, 16(VP)
  1682  
  1683  	// because growslice not zero memory {oldcap, newlen} when append et not has ptrdata.
  1684  	// but we should zero it, avoid decode it as random values.
  1685  	if rt.UnpackType(p.vt()).PtrData == 0 {
  1686  		self.Emit("SUBQ", _AX, _SI) // MOVQ    AX, SI
  1687  
  1688  		self.Emit("ADDQ", jit.Imm(1), jit.Ptr(_VP, 8))   // ADDQ    $1, 8(VP)
  1689  		self.Emit("MOVQ", _DI, _VP)                      // MOVQ    DI, VP
  1690  		self.Emit("MOVQ", jit.Imm(int64(p.vlen())), _CX) // MOVQ    ${p.vlen()}, CX
  1691  		self.From("MULQ", _CX)                           // MULQ    CX
  1692  		self.Emit("ADDQ", _AX, _VP)                      // ADDQ    AX, VP
  1693  
  1694  		self.Emit("MOVQ", _SI, _AX)             // MOVQ    SI, AX
  1695  		self.From("MULQ", _CX)                  // MULQ    CX
  1696  		self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ    AX, 8(SP)
  1697  
  1698  		self.Emit("MOVQ", _VP, jit.Ptr(_SP, 0))   // MOVQ    VP, (SP)
  1699  		self.mem_clear_fn(true)                   // CALL_GO memclr{Has,NoHeap}
  1700  		self.Sjmp("JMP", "_append_slice_end_{n}") // JMP    _append_slice_end_{n}
  1701  	}
  1702  
  1703  	self.Link("_index_{n}")                          // _index_{n}:
  1704  	self.Emit("ADDQ", jit.Imm(1), jit.Ptr(_VP, 8))   // ADDQ    $1, 8(VP)
  1705  	self.Emit("MOVQ", jit.Ptr(_VP, 0), _VP)          // MOVQ    (VP), VP
  1706  	self.Emit("MOVQ", jit.Imm(int64(p.vlen())), _CX) // MOVQ    ${p.vlen()}, CX
  1707  	self.From("MULQ", _CX)                           // MULQ    CX
  1708  	self.Emit("ADDQ", _AX, _VP)                      // ADDQ    AX, VP
  1709  	self.Link("_append_slice_end_{n}")
  1710  }
  1711  
  1712  func (self *_Assembler) _asm_OP_object_skip(_ *_Instr) {
  1713  	self.call_sf(_F_skip_object)         // CALL_SF skip_object
  1714  	self.Emit("TESTQ", _AX, _AX)         // TESTQ   AX, AX
  1715  	self.Sjmp("JS", _LB_parsing_error_v) // JS      _parse_error_v
  1716  }
  1717  
  1718  func (self *_Assembler) _asm_OP_object_next(_ *_Instr) {
  1719  	self.call_sf(_F_skip_one)            // CALL_SF skip_one
  1720  	self.Emit("TESTQ", _AX, _AX)         // TESTQ   AX, AX
  1721  	self.Sjmp("JS", _LB_parsing_error_v) // JS      _parse_error_v
  1722  }
  1723  
  1724  func (self *_Assembler) _asm_OP_struct_field(p *_Instr) {
  1725  	assert_eq(caching.FieldEntrySize, 32, "invalid field entry size")
  1726  	self.Emit("MOVQ", jit.Imm(-1), _AX)                      // MOVQ    $-1, AX
  1727  	self.Emit("MOVQ", _AX, _VAR_sr)                          // MOVQ    AX, sr
  1728  	self.parse_string()                                      // PARSE   STRING
  1729  	self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false)     // UNQUOTE once, sv.p, sv.n
  1730  	self.Emit("LEAQ", _VAR_sv, _AX)                          // LEAQ    sv, AX
  1731  	self.Emit("XORL", _CX, _CX)                              // XORL    CX, CX
  1732  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))                  // MOVQ    AX, (SP)
  1733  	self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8))                  // MOVQ    CX, 8(SP)
  1734  	self.call_go(_F_strhash)                                 // CALL_GO strhash
  1735  	self.Emit("MOVQ", jit.Ptr(_SP, 16), _AX)                 // MOVQ    16(SP), AX
  1736  	self.Emit("MOVQ", _AX, _R9)                              // MOVQ    AX, R9
  1737  	self.Emit("MOVQ", jit.Imm(freezeFields(p.vf())), _CX)    // MOVQ    ${p.vf()}, CX
  1738  	self.Emit("MOVQ", jit.Ptr(_CX, caching.FieldMap_b), _SI) // MOVQ    FieldMap.b(CX), SI
  1739  	self.Emit("MOVQ", jit.Ptr(_CX, caching.FieldMap_N), _CX) // MOVQ    FieldMap.N(CX), CX
  1740  	self.Emit("TESTQ", _CX, _CX)                             // TESTQ   CX, CX
  1741  	self.Sjmp("JZ", "_try_lowercase_{n}")                    // JZ      _try_lowercase_{n}
  1742  	self.Link("_loop_{n}")                                   // _loop_{n}:
  1743  	self.Emit("XORL", _DX, _DX)                              // XORL    DX, DX
  1744  	self.From("DIVQ", _CX)                                   // DIVQ    CX
  1745  	self.Emit("LEAQ", jit.Ptr(_DX, 1), _AX)                  // LEAQ    1(DX), AX
  1746  	self.Emit("SHLQ", jit.Imm(5), _DX)                       // SHLQ    $5, DX
  1747  	self.Emit("LEAQ", jit.Sib(_SI, _DX, 1, 0), _DI)          // LEAQ    (SI)(DX), DI
  1748  	self.Emit("MOVQ", jit.Ptr(_DI, _Fe_Hash), _R8)           // MOVQ    FieldEntry.Hash(DI), R8
  1749  	self.Emit("TESTQ", _R8, _R8)                             // TESTQ   R8, R8
  1750  	self.Sjmp("JZ", "_try_lowercase_{n}")                    // JZ      _try_lowercase_{n}
  1751  	self.Emit("CMPQ", _R8, _R9)                              // CMPQ    R8, R9
  1752  	self.Sjmp("JNE", "_loop_{n}")                            // JNE     _loop_{n}
  1753  	self.Emit("MOVQ", jit.Ptr(_DI, _Fe_Name+8), _DX)         // MOVQ    FieldEntry.Name+8(DI), DX
  1754  	self.Emit("CMPQ", _DX, _VAR_sv_n)                        // CMPQ    DX, sv.n
  1755  	self.Sjmp("JNE", "_loop_{n}")                            // JNE     _loop_{n}
  1756  	self.Emit("MOVQ", jit.Ptr(_DI, _Fe_ID), _R8)             // MOVQ    FieldEntry.ID(DI), R8
  1757  	self.Emit("MOVQ", _AX, _VAR_ss_AX)                       // MOVQ    AX, ss.AX
  1758  	self.Emit("MOVQ", _CX, _VAR_ss_CX)                       // MOVQ    CX, ss.CX
  1759  	self.Emit("MOVQ", _SI, _VAR_ss_SI)                       // MOVQ    SI, ss.SI
  1760  	self.Emit("MOVQ", _R8, _VAR_ss_R8)                       // MOVQ    R8, ss.R8
  1761  	self.Emit("MOVQ", _R9, _VAR_ss_R9)                       // MOVQ    R9, ss.R9
  1762  	self.Emit("MOVQ", _VAR_sv_p, _AX)                        // MOVQ    _VAR_sv_p, AX
  1763  	self.Emit("MOVQ", jit.Ptr(_DI, _Fe_Name), _CX)           // MOVQ    FieldEntry.Name(DI), CX
  1764  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))                  // MOVQ    AX, (SP)
  1765  	self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8))                  // MOVQ    CX, 8(SP)
  1766  	self.Emit("MOVQ", _DX, jit.Ptr(_SP, 16))                 // MOVQ    DX, 16(SP)
  1767  	self.call_go(_F_memequal)                                // CALL_GO memequal
  1768  	self.Emit("MOVQ", _VAR_ss_AX, _AX)                       // MOVQ    ss.AX, AX
  1769  	self.Emit("MOVQ", _VAR_ss_CX, _CX)                       // MOVQ    ss.CX, CX
  1770  	self.Emit("MOVQ", _VAR_ss_SI, _SI)                       // MOVQ    ss.SI, SI
  1771  	self.Emit("MOVQ", _VAR_ss_R9, _R9)                       // MOVQ    ss.R9, R9
  1772  	self.Emit("MOVB", jit.Ptr(_SP, 24), _DX)                 // MOVB    24(SP), DX
  1773  	self.Emit("TESTB", _DX, _DX)                             // TESTB   DX, DX
  1774  	self.Sjmp("JZ", "_loop_{n}")                             // JZ      _loop_{n}
  1775  	self.Emit("MOVQ", _VAR_ss_R8, _R8)                       // MOVQ    ss.R8, R8
  1776  	self.Emit("MOVQ", _R8, _VAR_sr)                          // MOVQ    R8, sr
  1777  	self.Sjmp("JMP", "_end_{n}")                             // JMP     _end_{n}
  1778  	self.Link("_try_lowercase_{n}")                          // _try_lowercase_{n}:
  1779  	self.Emit("MOVQ", jit.Imm(referenceFields(p.vf())), _AX) // MOVQ    ${p.vf()}, AX
  1780  	self.Emit("MOVOU", _VAR_sv, _X0)                         // MOVOU   sv, X0
  1781  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))                  // MOVQ    AX, (SP)
  1782  	self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8))                 // MOVOU   X0, 8(SP)
  1783  	self.call_go(_F_FieldMap_GetCaseInsensitive)             // CALL_GO FieldMap::GetCaseInsensitive
  1784  	self.Emit("MOVQ", jit.Ptr(_SP, 24), _AX)                 // MOVQ    24(SP), AX
  1785  	self.Emit("MOVQ", _AX, _VAR_sr)                          // MOVQ    AX, _VAR_sr
  1786  	self.Emit("TESTQ", _AX, _AX)                             // TESTQ   AX, AX
  1787  	self.Sjmp("JNS", "_end_{n}")                             // JNS     _end_{n}
  1788  	self.Emit("BTQ", jit.Imm(_F_disable_unknown), _ARG_fv)   // BTQ     ${_F_disable_unknown}, fv
  1789  	self.Sjmp("JC", _LB_field_error)                         // JC      _field_error
  1790  	self.Link("_end_{n}")                                    // _end_{n}:
  1791  }
  1792  
  1793  func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) {
  1794  	self.unmarshal_json(p.vt(), true)
  1795  }
  1796  
  1797  func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) {
  1798  	self.unmarshal_json(p.vt(), false)
  1799  }
  1800  
  1801  func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) {
  1802  	self.unmarshal_text(p.vt(), true)
  1803  }
  1804  
  1805  func (self *_Assembler) _asm_OP_unmarshal_text_p(p *_Instr) {
  1806  	self.unmarshal_text(p.vt(), false)
  1807  }
  1808  
  1809  func (self *_Assembler) _asm_OP_lspace(_ *_Instr) {
  1810  	self.lspace("_{n}")
  1811  }
  1812  
  1813  func (self *_Assembler) lspace(subfix string) {
  1814  	var label = "_lspace" + subfix
  1815  
  1816  	self.Emit("CMPQ", _IC, _IL)                        // CMPQ    IC, IL
  1817  	self.Sjmp("JAE", _LB_eof_error)                    // JAE     _eof_error
  1818  	self.Emit("MOVQ", jit.Imm(_BM_space), _DX)         // MOVQ    _BM_space, DX
  1819  	self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
  1820  	self.Emit("CMPQ", _AX, jit.Imm(' '))               // CMPQ    AX, $' '
  1821  	self.Sjmp("JA", label)                             // JA      _nospace_{n}
  1822  	self.Emit("BTQ", _AX, _DX)                         // BTQ     AX, DX
  1823  	self.Sjmp("JNC", label)                            // JNC     _nospace_{n}
  1824  
  1825  	/* test up to 4 characters */
  1826  	for i := 0; i < 3; i++ {
  1827  		self.Emit("ADDQ", jit.Imm(1), _IC)                 // ADDQ    $1, IC
  1828  		self.Emit("CMPQ", _IC, _IL)                        // CMPQ    IC, IL
  1829  		self.Sjmp("JAE", _LB_eof_error)                    // JAE     _eof_error
  1830  		self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
  1831  		self.Emit("CMPQ", _AX, jit.Imm(' '))               // CMPQ    AX, $' '
  1832  		self.Sjmp("JA", label)                             // JA      _nospace_{n}
  1833  		self.Emit("BTQ", _AX, _DX)                         // BTQ     AX, DX
  1834  		self.Sjmp("JNC", label)                            // JNC     _nospace_{n}
  1835  	}
  1836  
  1837  	/* handle over to the native function */
  1838  	self.Emit("MOVQ", _IP, _DI)          // MOVQ    IP, DI
  1839  	self.Emit("MOVQ", _IL, _SI)          // MOVQ    IL, SI
  1840  	self.Emit("MOVQ", _IC, _DX)          // MOVQ    IC, DX
  1841  	self.call(_F_lspace)                 // CALL    lspace
  1842  	self.Emit("TESTQ", _AX, _AX)         // TESTQ   AX, AX
  1843  	self.Sjmp("JS", _LB_parsing_error_v) // JS      _parsing_error_v
  1844  	self.Emit("CMPQ", _AX, _IL)          // CMPQ    AX, IL
  1845  	self.Sjmp("JAE", _LB_eof_error)      // JAE     _eof_error
  1846  	self.Emit("MOVQ", _AX, _IC)          // MOVQ    AX, IC
  1847  	self.Link(label)                     // _nospace_{n}:
  1848  }
  1849  
  1850  func (self *_Assembler) _asm_OP_match_char(p *_Instr) {
  1851  	self.match_char(p.vb())
  1852  }
  1853  
  1854  func (self *_Assembler) match_char(char byte) {
  1855  	self.check_eof(1)
  1856  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(char))) // CMPB (IP)(IC), ${p.vb()}
  1857  	self.Sjmp("JNE", _LB_char_0_error)                               // JNE  _char_0_error
  1858  	self.Emit("ADDQ", jit.Imm(1), _IC)                               // ADDQ $1, IC
  1859  }
  1860  
  1861  func (self *_Assembler) _asm_OP_check_char(p *_Instr) {
  1862  	self.check_eof(1)
  1863  	self.Emit("LEAQ", jit.Ptr(_IC, 1), _AX)                            // LEAQ    1(IC), AX
  1864  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb()))) // CMPB    (IP)(IC), ${p.vb()}
  1865  	self.Emit("CMOVQEQ", _AX, _IC)                                     // CMOVQEQ AX, IC
  1866  	self.Xjmp("JE", p.vi())                                            // JE      {p.vi()}
  1867  }
  1868  
  1869  func (self *_Assembler) _asm_OP_check_char_0(p *_Instr) {
  1870  	self.check_eof(1)
  1871  	self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb()))) // CMPB    (IP)(IC), ${p.vb()}
  1872  	self.Xjmp("JE", p.vi())                                            // JE      {p.vi()}
  1873  }
  1874  
  1875  func (self *_Assembler) _asm_OP_add(p *_Instr) {
  1876  	self.Emit("ADDQ", jit.Imm(int64(p.vi())), _IC) // ADDQ ${p.vi()}, IC
  1877  }
  1878  
  1879  func (self *_Assembler) _asm_OP_load(_ *_Instr) {
  1880  	self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)         // MOVQ (ST), AX
  1881  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _VP) // MOVQ (ST)(AX), VP
  1882  }
  1883  
  1884  func (self *_Assembler) _asm_OP_save(_ *_Instr) {
  1885  	self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX)                           // MOVQ (ST), CX
  1886  	self.Emit("CMPQ", _CX, jit.Imm(_MaxStackBytes))                   // CMPQ CX, ${_MaxStackBytes}
  1887  	self.Sjmp("JAE", _LB_stack_error)                                 // JA   _stack_error
  1888  	self.WriteRecNotAX(0, _VP, jit.Sib(_ST, _CX, 1, 8), false, false) // MOVQ VP, 8(ST)(CX)
  1889  	self.Emit("ADDQ", jit.Imm(8), _CX)                                // ADDQ $8, CX
  1890  	self.Emit("MOVQ", _CX, jit.Ptr(_ST, 0))                           // MOVQ CX, (ST)
  1891  }
  1892  
  1893  func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
  1894  	self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)         // MOVQ (ST), AX
  1895  	self.Emit("SUBQ", jit.Imm(8), _AX)              // SUBQ $8, AX
  1896  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _VP) // MOVQ 8(ST)(AX), VP
  1897  	self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0))         // MOVQ AX, (ST)
  1898  	self.Emit("XORL", _ET, _ET)                     // XORL ET, ET
  1899  	self.Emit("MOVQ", _ET, jit.Sib(_ST, _AX, 1, 8)) // MOVQ ET, 8(ST)(AX)
  1900  }
  1901  
  1902  func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
  1903  	self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)          // MOVQ  (ST), AX
  1904  	self.Emit("SUBQ", jit.Imm(16), _AX)              // SUBQ  $16, AX
  1905  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _VP)  // MOVQ  8(ST)(AX), VP
  1906  	self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0))          // MOVQ  AX, (ST)
  1907  	self.Emit("PXOR", _X0, _X0)                      // PXOR  X0, X0
  1908  	self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX)
  1909  }
  1910  
  1911  func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
  1912  	self.Emit("MOVQ", jit.Type(p.vt()), _AX) // MOVQ   ${p.vt()}, AX
  1913  	self.decode_dynamic(_AX, _VP)            // DECODE AX, VP
  1914  }
  1915  
  1916  func (self *_Assembler) _asm_OP_goto(p *_Instr) {
  1917  	self.Xjmp("JMP", p.vi())
  1918  }
  1919  
  1920  func (self *_Assembler) _asm_OP_switch(p *_Instr) {
  1921  	self.Emit("MOVQ", _VAR_sr, _AX)          // MOVQ sr, AX
  1922  	self.Emit("CMPQ", _AX, jit.Imm(p.i64())) // CMPQ AX, ${len(p.vs())}
  1923  	self.Sjmp("JAE", "_default_{n}")         // JAE  _default_{n}
  1924  
  1925  	/* jump table selector */
  1926  	self.Byte(0x48, 0x8d, 0x3d)                        // LEAQ    ?(PC), DI
  1927  	self.Sref("_switch_table_{n}", 4)                  // ....    &_switch_table_{n}
  1928  	self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX) // MOVLQSX (DI)(AX*4), AX
  1929  	self.Emit("ADDQ", _DI, _AX)                        // ADDQ    DI, AX
  1930  	self.Rjmp("JMP", _AX)                              // JMP     AX
  1931  	self.Link("_switch_table_{n}")                     // _switch_table_{n}:
  1932  
  1933  	/* generate the jump table */
  1934  	for i, v := range p.vs() {
  1935  		self.Xref(v, int64(-i)*4)
  1936  	}
  1937  
  1938  	/* default case */
  1939  	self.Link("_default_{n}")
  1940  	self.NOP()
  1941  }
  1942  
  1943  func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
  1944  	self.Emit("MOVQ", jit.Imm(int64(p2.op())), jit.Ptr(_SP, 16)) // MOVQ $(p2.op()), 16(SP)
  1945  	self.Emit("MOVQ", jit.Imm(int64(p1.op())), jit.Ptr(_SP, 8))  // MOVQ $(p1.op()), 8(SP)
  1946  	self.Emit("MOVQ", jit.Imm(int64(i)), jit.Ptr(_SP, 0))        // MOVQ $(i), (SP)
  1947  	self.call_go(_F_println)
  1948  }