github.com/goshafaq/sonic@v0.0.0-20231026082336-871835fb94c6/internal/encoder/assembler_regabi_amd64.go (about)

     1  //go:build go1.17 && !go1.22
     2  // +build go1.17,!go1.22
     3  
     4  /*
     5   * Copyright 2021 ByteDance Inc.
     6   *
     7   * Licensed under the Apache License, Version 2.0 (the "License");
     8   * you may not use this file except in compliance with the License.
     9   * You may obtain a copy of the License at
    10   *
    11   *     http://www.apache.org/licenses/LICENSE-2.0
    12   *
    13   * Unless required by applicable law or agreed to in writing, software
    14   * distributed under the License is distributed on an "AS IS" BASIS,
    15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    16   * See the License for the specific language governing permissions and
    17   * limitations under the License.
    18   */
    19  
    20  package encoder
    21  
    22  import (
    23  	"fmt"
    24  	"reflect"
    25  	"strconv"
    26  	"unsafe"
    27  
    28  	"github.com/goshafaq/sonic/internal/cpu"
    29  	"github.com/goshafaq/sonic/internal/jit"
    30  	"github.com/goshafaq/sonic/internal/native/types"
    31  	"github.com/twitchyliquid64/golang-asm/obj"
    32  	"github.com/twitchyliquid64/golang-asm/obj/x86"
    33  
    34  	"github.com/goshafaq/sonic/internal/native"
    35  	"github.com/goshafaq/sonic/internal/rt"
    36  )
    37  
    38  /** Register Allocations
    39   *
    40   *  State Registers:
    41   *
    42   *      %rbx : stack base
    43   *      %rdi : result pointer
    44   *      %rsi : result length
    45   *      %rdx : result capacity
    46   *      %r12 : sp->p
    47   *      %r13 : sp->q
    48   *      %r14 : sp->x
    49   *      %r15 : sp->f
    50   *
    51   *  Error Registers:
    52   *
    53   *      %r10 : error type register
    54   *      %r11 : error pointer register
    55   */
    56  
    57  /** Function Prototype & Stack Map
    58   *
    59   *  func (buf *[]byte, p unsafe.Pointer, sb *_Stack, fv uint64) (err error)
    60   *
    61   *  buf    :   (FP)
    62   *  p      :  8(FP)
    63   *  sb     : 16(FP)
    64   *  fv     : 24(FP)
    65   *  err.vt : 32(FP)
    66   *  err.vp : 40(FP)
    67   */
    68  
    69  const (
    70  	_S_cond = iota
    71  	_S_init
    72  )
    73  
    74  const (
    75  	_FP_args   = 32 // 32 bytes for spill registers of arguments
    76  	_FP_fargs  = 40 // 40 bytes for passing arguments to other Go functions
    77  	_FP_saves  = 64 // 64 bytes for saving the registers before CALL instructions
    78  	_FP_locals = 24 // 24 bytes for local variables
    79  )
    80  
    81  const (
    82  	_FP_loffs = _FP_fargs + _FP_saves
    83  	_FP_offs  = _FP_loffs + _FP_locals
    84  	// _FP_offs  = _FP_loffs + _FP_locals + _FP_debug
    85  	_FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer
    86  	_FP_base = _FP_size + 8 // 8 bytes for the return address
    87  )
    88  
    89  const (
    90  	_FM_exp32 = 0x7f800000
    91  	_FM_exp64 = 0x7ff0000000000000
    92  )
    93  
    94  const (
    95  	_IM_null   = 0x6c6c756e // 'null'
    96  	_IM_true   = 0x65757274 // 'true'
    97  	_IM_fals   = 0x736c6166 // 'fals' ('false' without the 'e')
    98  	_IM_open   = 0x00225c22 // '"\"∅'
    99  	_IM_array  = 0x5d5b     // '[]'
   100  	_IM_object = 0x7d7b     // '{}'
   101  	_IM_mulv   = -0x5555555555555555
   102  )
   103  
   104  const (
   105  	_LB_more_space        = "_more_space"
   106  	_LB_more_space_return = "_more_space_return_"
   107  )
   108  
   109  const (
   110  	_LB_error                 = "_error"
   111  	_LB_error_too_deep        = "_error_too_deep"
   112  	_LB_error_invalid_number  = "_error_invalid_number"
   113  	_LB_error_nan_or_infinite = "_error_nan_or_infinite"
   114  	_LB_panic                 = "_panic"
   115  )
   116  
   117  var (
   118  	_AX = jit.Reg("AX")
   119  	_BX = jit.Reg("BX")
   120  	_CX = jit.Reg("CX")
   121  	_DX = jit.Reg("DX")
   122  	_DI = jit.Reg("DI")
   123  	_SI = jit.Reg("SI")
   124  	_BP = jit.Reg("BP")
   125  	_SP = jit.Reg("SP")
   126  	_R8 = jit.Reg("R8")
   127  	_R9 = jit.Reg("R9")
   128  )
   129  
   130  var (
   131  	_X0 = jit.Reg("X0")
   132  	_Y0 = jit.Reg("Y0")
   133  )
   134  
   135  var (
   136  	_ST = jit.Reg("R15") // can't use R14 since it's always scratched by Go...
   137  	_RP = jit.Reg("DI")
   138  	_RL = jit.Reg("SI")
   139  	_RC = jit.Reg("DX")
   140  )
   141  
   142  var (
   143  	_LR = jit.Reg("R9")
   144  	_ET = jit.Reg("AX")
   145  	_EP = jit.Reg("BX")
   146  )
   147  
   148  var (
   149  	_SP_p = jit.Reg("R10") // saved on BX when call_c
   150  	_SP_q = jit.Reg("R11") // saved on BP when call_c
   151  	_SP_x = jit.Reg("R12")
   152  	_SP_f = jit.Reg("R13")
   153  )
   154  
   155  var (
   156  	_ARG_rb = jit.Ptr(_SP, _FP_base)
   157  	_ARG_vp = jit.Ptr(_SP, _FP_base+8)
   158  	_ARG_sb = jit.Ptr(_SP, _FP_base+16)
   159  	_ARG_fv = jit.Ptr(_SP, _FP_base+24)
   160  )
   161  
   162  var (
   163  	_RET_et = _ET
   164  	_RET_ep = _EP
   165  )
   166  
   167  var (
   168  	_VAR_sp = jit.Ptr(_SP, _FP_fargs+_FP_saves)
   169  	_VAR_dn = jit.Ptr(_SP, _FP_fargs+_FP_saves+8)
   170  	_VAR_vp = jit.Ptr(_SP, _FP_fargs+_FP_saves+16)
   171  )
   172  
   173  var (
   174  	_REG_ffi = []obj.Addr{_RP, _RL, _RC, _SP_q}
   175  	_REG_b64 = []obj.Addr{_SP_p, _SP_q}
   176  
   177  	_REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC}
   178  	_REG_ms  = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR}
   179  	_REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL}
   180  )
   181  
   182  type _Assembler struct {
   183  	jit.BaseAssembler
   184  	p    _Program
   185  	x    int
   186  	name string
   187  }
   188  
   189  func newAssembler(p _Program) *_Assembler {
   190  	return new(_Assembler).Init(p)
   191  }
   192  
   193  /** Assembler Interface **/
   194  
   195  func (self *_Assembler) Load() _Encoder {
   196  	return ptoenc(self.BaseAssembler.Load("encode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
   197  }
   198  
   199  func (self *_Assembler) Init(p _Program) *_Assembler {
   200  	self.p = p
   201  	self.BaseAssembler.Init(self.compile)
   202  	return self
   203  }
   204  
   205  func (self *_Assembler) compile() {
   206  	self.prologue()
   207  	self.instrs()
   208  	self.epilogue()
   209  	self.builtins()
   210  }
   211  
   212  /** Assembler Stages **/
   213  
   214  var _OpFuncTab = [256]func(*_Assembler, *_Instr){
   215  	_OP_null:           (*_Assembler)._asm_OP_null,
   216  	_OP_empty_arr:      (*_Assembler)._asm_OP_empty_arr,
   217  	_OP_empty_obj:      (*_Assembler)._asm_OP_empty_obj,
   218  	_OP_bool:           (*_Assembler)._asm_OP_bool,
   219  	_OP_i8:             (*_Assembler)._asm_OP_i8,
   220  	_OP_i16:            (*_Assembler)._asm_OP_i16,
   221  	_OP_i32:            (*_Assembler)._asm_OP_i32,
   222  	_OP_i64:            (*_Assembler)._asm_OP_i64,
   223  	_OP_u8:             (*_Assembler)._asm_OP_u8,
   224  	_OP_u16:            (*_Assembler)._asm_OP_u16,
   225  	_OP_u32:            (*_Assembler)._asm_OP_u32,
   226  	_OP_u64:            (*_Assembler)._asm_OP_u64,
   227  	_OP_f32:            (*_Assembler)._asm_OP_f32,
   228  	_OP_f64:            (*_Assembler)._asm_OP_f64,
   229  	_OP_str:            (*_Assembler)._asm_OP_str,
   230  	_OP_bin:            (*_Assembler)._asm_OP_bin,
   231  	_OP_quote:          (*_Assembler)._asm_OP_quote,
   232  	_OP_number:         (*_Assembler)._asm_OP_number,
   233  	_OP_eface:          (*_Assembler)._asm_OP_eface,
   234  	_OP_iface:          (*_Assembler)._asm_OP_iface,
   235  	_OP_byte:           (*_Assembler)._asm_OP_byte,
   236  	_OP_text:           (*_Assembler)._asm_OP_text,
   237  	_OP_deref:          (*_Assembler)._asm_OP_deref,
   238  	_OP_index:          (*_Assembler)._asm_OP_index,
   239  	_OP_load:           (*_Assembler)._asm_OP_load,
   240  	_OP_save:           (*_Assembler)._asm_OP_save,
   241  	_OP_drop:           (*_Assembler)._asm_OP_drop,
   242  	_OP_drop_2:         (*_Assembler)._asm_OP_drop_2,
   243  	_OP_recurse:        (*_Assembler)._asm_OP_recurse,
   244  	_OP_is_nil:         (*_Assembler)._asm_OP_is_nil,
   245  	_OP_is_nil_p1:      (*_Assembler)._asm_OP_is_nil_p1,
   246  	_OP_is_zero_1:      (*_Assembler)._asm_OP_is_zero_1,
   247  	_OP_is_zero_2:      (*_Assembler)._asm_OP_is_zero_2,
   248  	_OP_is_zero_4:      (*_Assembler)._asm_OP_is_zero_4,
   249  	_OP_is_zero_8:      (*_Assembler)._asm_OP_is_zero_8,
   250  	_OP_is_zero_map:    (*_Assembler)._asm_OP_is_zero_map,
   251  	_OP_goto:           (*_Assembler)._asm_OP_goto,
   252  	_OP_map_iter:       (*_Assembler)._asm_OP_map_iter,
   253  	_OP_map_stop:       (*_Assembler)._asm_OP_map_stop,
   254  	_OP_map_check_key:  (*_Assembler)._asm_OP_map_check_key,
   255  	_OP_map_write_key:  (*_Assembler)._asm_OP_map_write_key,
   256  	_OP_map_value_next: (*_Assembler)._asm_OP_map_value_next,
   257  	_OP_slice_len:      (*_Assembler)._asm_OP_slice_len,
   258  	_OP_slice_next:     (*_Assembler)._asm_OP_slice_next,
   259  	_OP_marshal:        (*_Assembler)._asm_OP_marshal,
   260  	_OP_marshal_p:      (*_Assembler)._asm_OP_marshal_p,
   261  	_OP_marshal_text:   (*_Assembler)._asm_OP_marshal_text,
   262  	_OP_marshal_text_p: (*_Assembler)._asm_OP_marshal_text_p,
   263  	_OP_cond_set:       (*_Assembler)._asm_OP_cond_set,
   264  	_OP_cond_testc:     (*_Assembler)._asm_OP_cond_testc,
   265  }
   266  
   267  func (self *_Assembler) instr(v *_Instr) {
   268  	if fn := _OpFuncTab[v.op()]; fn != nil {
   269  		fn(self, v)
   270  	} else {
   271  		panic(fmt.Sprintf("invalid opcode: %d", v.op()))
   272  	}
   273  }
   274  
   275  func (self *_Assembler) instrs() {
   276  	for i, v := range self.p {
   277  		self.Mark(i)
   278  		self.instr(&v)
   279  		self.debug_instr(i, &v)
   280  	}
   281  }
   282  
   283  func (self *_Assembler) builtins() {
   284  	self.more_space()
   285  	self.error_too_deep()
   286  	self.error_invalid_number()
   287  	self.error_nan_or_infinite()
   288  	self.go_panic()
   289  }
   290  
   291  func (self *_Assembler) epilogue() {
   292  	self.Mark(len(self.p))
   293  	self.Emit("XORL", _ET, _ET)
   294  	self.Emit("XORL", _EP, _EP)
   295  	self.Link(_LB_error)
   296  	self.Emit("MOVQ", _ARG_rb, _CX)                // MOVQ rb<>+0(FP), CX
   297  	self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8))        // MOVQ RL, 8(CX)
   298  	self.Emit("MOVQ", jit.Imm(0), _ARG_rb)         // MOVQ AX, rb<>+0(FP)
   299  	self.Emit("MOVQ", jit.Imm(0), _ARG_vp)         // MOVQ BX, vp<>+8(FP)
   300  	self.Emit("MOVQ", jit.Imm(0), _ARG_sb)         // MOVQ CX, sb<>+16(FP)
   301  	self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP
   302  	self.Emit("ADDQ", jit.Imm(_FP_size), _SP)      // ADDQ $_FP_size, SP
   303  	self.Emit("RET")                               // RET
   304  }
   305  
   306  func (self *_Assembler) prologue() {
   307  	self.Emit("SUBQ", jit.Imm(_FP_size), _SP)      // SUBQ $_FP_size, SP
   308  	self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP)
   309  	self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP
   310  	self.Emit("MOVQ", _AX, _ARG_rb)                // MOVQ AX, rb<>+0(FP)
   311  	self.Emit("MOVQ", _BX, _ARG_vp)                // MOVQ BX, vp<>+8(FP)
   312  	self.Emit("MOVQ", _CX, _ARG_sb)                // MOVQ CX, sb<>+16(FP)
   313  	self.Emit("MOVQ", _DI, _ARG_fv)                // MOVQ DI, rb<>+24(FP)
   314  	self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP)        // MOVQ (AX)  , DI
   315  	self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL)        // MOVQ 8(AX) , SI
   316  	self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC)       // MOVQ 16(AX), DX
   317  	self.Emit("MOVQ", _BX, _SP_p)                  // MOVQ BX, R10
   318  	self.Emit("MOVQ", _CX, _ST)                    // MOVQ CX, R8
   319  	self.Emit("XORL", _SP_x, _SP_x)                // XORL R10, R12
   320  	self.Emit("XORL", _SP_f, _SP_f)                // XORL R11, R13
   321  	self.Emit("XORL", _SP_q, _SP_q)                // XORL R13, R11
   322  }
   323  
   324  /** Assembler Inline Functions **/
   325  
   326  func (self *_Assembler) xsave(reg ...obj.Addr) {
   327  	for i, v := range reg {
   328  		if i > _FP_saves/8-1 {
   329  			panic("too many registers to save")
   330  		} else {
   331  			self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs+int64(i)*8))
   332  		}
   333  	}
   334  }
   335  
   336  func (self *_Assembler) xload(reg ...obj.Addr) {
   337  	for i, v := range reg {
   338  		if i > _FP_saves/8-1 {
   339  			panic("too many registers to load")
   340  		} else {
   341  			self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs+int64(i)*8), v)
   342  		}
   343  	}
   344  }
   345  
   346  func (self *_Assembler) rbuf_di() {
   347  	if _RP.Reg != x86.REG_DI {
   348  		panic("register allocation messed up: RP != DI")
   349  	} else {
   350  		self.Emit("ADDQ", _RL, _RP)
   351  	}
   352  }
   353  
   354  func (self *_Assembler) store_int(nd int, fn obj.Addr, ins string) {
   355  	self.check_size(nd)
   356  	self.save_c()                          // SAVE   $C_regs
   357  	self.rbuf_di()                         // MOVQ   RP, DI
   358  	self.Emit(ins, jit.Ptr(_SP_p, 0), _SI) // $ins   (SP.p), SI
   359  	self.call_c(fn)                        // CALL_C $fn
   360  	self.Emit("ADDQ", _AX, _RL)            // ADDQ   AX, RL
   361  }
   362  
   363  func (self *_Assembler) store_str(s string) {
   364  	i := 0
   365  	m := rt.Str2Mem(s)
   366  
   367  	/* 8-byte stores */
   368  	for i <= len(m)-8 {
   369  		self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX)       // MOVQ $s[i:], AX
   370  		self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i))) // MOVQ AX, i(RP)(RL)
   371  		i += 8
   372  	}
   373  
   374  	/* 4-byte stores */
   375  	if i <= len(m)-4 {
   376  		self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVL $s[i:], i(RP)(RL)
   377  		i += 4
   378  	}
   379  
   380  	/* 2-byte stores */
   381  	if i <= len(m)-2 {
   382  		self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVW $s[i:], i(RP)(RL)
   383  		i += 2
   384  	}
   385  
   386  	/* last byte */
   387  	if i < len(m) {
   388  		self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i))) // MOVB $s[i:], i(RP)(RL)
   389  	}
   390  }
   391  
   392  func (self *_Assembler) check_size(n int) {
   393  	self.check_size_rl(jit.Ptr(_RL, int64(n)))
   394  }
   395  
   396  func (self *_Assembler) check_size_r(r obj.Addr, d int) {
   397  	self.check_size_rl(jit.Sib(_RL, r, 1, int64(d)))
   398  }
   399  
   400  func (self *_Assembler) check_size_rl(v obj.Addr) {
   401  	idx := self.x
   402  	key := _LB_more_space_return + strconv.Itoa(idx)
   403  
   404  	/* the following code relies on LR == R9 to work */
   405  	if _LR.Reg != x86.REG_R9 {
   406  		panic("register allocation messed up: LR != R9")
   407  	}
   408  
   409  	/* check for buffer capacity */
   410  	self.x++
   411  	self.Emit("LEAQ", v, _AX)   // LEAQ $v, AX
   412  	self.Emit("CMPQ", _AX, _RC) // CMPQ AX, RC
   413  	self.Sjmp("JBE", key)       // JBE  _more_space_return_{n}
   414  	self.slice_grow_ax(key)     // GROW $key
   415  	self.Link(key)              // _more_space_return_{n}:
   416  }
   417  
   418  func (self *_Assembler) slice_grow_ax(ret string) {
   419  	self.Byte(0x4c, 0x8d, 0x0d)      // LEAQ ?(PC), R9
   420  	self.Sref(ret, 4)                // .... &ret
   421  	self.Sjmp("JMP", _LB_more_space) // JMP  _more_space
   422  }
   423  
   424  /** State Stack Helpers **/
   425  
   426  const (
   427  	_StateSize  = int64(unsafe.Sizeof(_State{}))
   428  	_StackLimit = _MaxStack * _StateSize
   429  )
   430  
   431  func (self *_Assembler) save_state() {
   432  	self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX)            // MOVQ (ST), CX
   433  	self.Emit("LEAQ", jit.Ptr(_CX, _StateSize), _R9)   // LEAQ _StateSize(CX), R9
   434  	self.Emit("CMPQ", _R9, jit.Imm(_StackLimit))       // CMPQ R9, $_StackLimit
   435  	self.Sjmp("JAE", _LB_error_too_deep)               // JA   _error_too_deep
   436  	self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8))  // MOVQ SP.x, 8(ST)(CX)
   437  	self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16)) // MOVQ SP.f, 16(ST)(CX)
   438  	self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24))  // MOVQ SP.p, 24(ST)(CX)
   439  	self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32))  // MOVQ SP.q, 32(ST)(CX)
   440  	self.Emit("MOVQ", _R9, jit.Ptr(_ST, 0))            // MOVQ R9, (ST)
   441  }
   442  
   443  func (self *_Assembler) drop_state(decr int64) {
   444  	self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)            // MOVQ  (ST), AX
   445  	self.Emit("SUBQ", jit.Imm(decr), _AX)              // SUBQ  $decr, AX
   446  	self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0))            // MOVQ  AX, (ST)
   447  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _SP_x)  // MOVQ  8(ST)(AX), SP.x
   448  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 16), _SP_f) // MOVQ  16(ST)(AX), SP.f
   449  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 24), _SP_p) // MOVQ  24(ST)(AX), SP.p
   450  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 32), _SP_q) // MOVQ  32(ST)(AX), SP.q
   451  	self.Emit("PXOR", _X0, _X0)                        // PXOR  X0, X0
   452  	self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8))   // MOVOU X0, 8(ST)(AX)
   453  	self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24))  // MOVOU X0, 24(ST)(AX)
   454  }
   455  
   456  /** Buffer Helpers **/
   457  
   458  func (self *_Assembler) add_char(ch byte) {
   459  	self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVB $ch, (RP)(RL)
   460  	self.Emit("ADDQ", jit.Imm(1), _RL)                             // ADDQ $1, RL
   461  }
   462  
   463  func (self *_Assembler) add_long(ch uint32, n int64) {
   464  	self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVL $ch, (RP)(RL)
   465  	self.Emit("ADDQ", jit.Imm(n), _RL)                             // ADDQ $n, RL
   466  }
   467  
   468  func (self *_Assembler) add_text(ss string) {
   469  	self.store_str(ss)                              // TEXT $ss
   470  	self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL) // ADDQ ${len(ss)}, RL
   471  }
   472  
   473  // get *buf at AX
   474  func (self *_Assembler) prep_buffer_AX() {
   475  	self.Emit("MOVQ", _ARG_rb, _AX)         // MOVQ rb<>+0(FP), AX
   476  	self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX)
   477  }
   478  
   479  func (self *_Assembler) save_buffer() {
   480  	self.Emit("MOVQ", _ARG_rb, _CX)          // MOVQ rb<>+0(FP), CX
   481  	self.Emit("MOVQ", _RP, jit.Ptr(_CX, 0))  // MOVQ RP, (CX)
   482  	self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8))  // MOVQ RL, 8(CX)
   483  	self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16)) // MOVQ RC, 16(CX)
   484  }
   485  
   486  // get *buf at AX
   487  func (self *_Assembler) load_buffer_AX() {
   488  	self.Emit("MOVQ", _ARG_rb, _AX)          // MOVQ rb<>+0(FP), AX
   489  	self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP)  // MOVQ (AX), RP
   490  	self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL)  // MOVQ 8(AX), RL
   491  	self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), RC
   492  }
   493  
   494  /** Function Interface Helpers **/
   495  
   496  func (self *_Assembler) call(pc obj.Addr) {
   497  	self.Emit("MOVQ", pc, _LR) // MOVQ $pc, AX
   498  	self.Rjmp("CALL", _LR)     // CALL AX
   499  }
   500  
   501  func (self *_Assembler) save_c() {
   502  	self.xsave(_REG_ffi...) // SAVE $REG_ffi
   503  }
   504  
   505  func (self *_Assembler) call_b64(pc obj.Addr) {
   506  	self.xsave(_REG_b64...) // SAVE $REG_all
   507  	self.call(pc)           // CALL $pc
   508  	self.xload(_REG_b64...) // LOAD $REG_ffi
   509  }
   510  
   511  func (self *_Assembler) call_c(pc obj.Addr) {
   512  	self.Emit("XCHGQ", _SP_p, _BX)
   513  	self.call(pc)           // CALL $pc
   514  	self.xload(_REG_ffi...) // LOAD $REG_ffi
   515  	self.Emit("XCHGQ", _SP_p, _BX)
   516  }
   517  
   518  func (self *_Assembler) call_go(pc obj.Addr) {
   519  	self.xsave(_REG_all...) // SAVE $REG_all
   520  	self.call(pc)           // CALL $pc
   521  	self.xload(_REG_all...) // LOAD $REG_all
   522  }
   523  
   524  func (self *_Assembler) call_more_space(pc obj.Addr) {
   525  	self.xsave(_REG_ms...) // SAVE $REG_all
   526  	self.call(pc)          // CALL $pc
   527  	self.xload(_REG_ms...) // LOAD $REG_all
   528  }
   529  
   530  func (self *_Assembler) call_encoder(pc obj.Addr) {
   531  	self.xsave(_REG_enc...) // SAVE $REG_all
   532  	self.call(pc)           // CALL $pc
   533  	self.xload(_REG_enc...) // LOAD $REG_all
   534  }
   535  
   536  func (self *_Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) {
   537  	switch vt.Kind() {
   538  	case reflect.Interface:
   539  		self.call_marshaler_i(fn, it)
   540  	case reflect.Ptr, reflect.Map:
   541  		self.call_marshaler_v(fn, it, vt, true)
   542  	// struct/array of 1 direct iface type can be direct
   543  	default:
   544  		self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect())
   545  	}
   546  }
   547  
   548  func (self *_Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) {
   549  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ    (SP.p), AX
   550  	self.Emit("TESTQ", _AX, _AX)              // TESTQ   AX, AX
   551  	self.Sjmp("JZ", "_null_{n}")              // JZ      _null_{n}
   552  	self.Emit("MOVQ", _AX, _BX)               // MOVQ    AX, BX
   553  	self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _CX) // MOVQ    8(SP.p), CX
   554  	self.Emit("MOVQ", jit.Gtype(it), _AX)     // MOVQ    $it, AX
   555  	self.call_go(_F_assertI2I)                // CALL_GO assertI2I
   556  	self.Emit("TESTQ", _AX, _AX)              // TESTQ   AX, AX
   557  	self.Sjmp("JZ", "_null_{n}")              // JZ      _null_{n}
   558  	self.Emit("MOVQ", _BX, _CX)               // MOVQ   BX, CX
   559  	self.Emit("MOVQ", _AX, _BX)               // MOVQ   AX, BX
   560  	self.prep_buffer_AX()
   561  	self.Emit("MOVQ", _ARG_fv, _DI) // MOVQ   ARG.fv, DI
   562  	self.call_go(fn)                // CALL    $fn
   563  	self.Emit("TESTQ", _ET, _ET)    // TESTQ ET, ET
   564  	self.Sjmp("JNZ", _LB_error)     // JNZ   _error
   565  	self.load_buffer_AX()
   566  	self.Sjmp("JMP", "_done_{n}")                                 // JMP     _done_{n}
   567  	self.Link("_null_{n}")                                        // _null_{n}:
   568  	self.check_size(4)                                            // SIZE    $4
   569  	self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL    $'null', (RP)(RL*1)
   570  	self.Emit("ADDQ", jit.Imm(4), _RL)                            // ADDQ    $4, RL
   571  	self.Link("_done_{n}")                                        // _done_{n}:
   572  }
   573  
   574  func (self *_Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) {
   575  	self.prep_buffer_AX()                    // MOVE {buf}, (SP)
   576  	self.Emit("MOVQ", jit.Itab(it, vt), _BX) // MOVQ $(itab(it, vt)), BX
   577  
   578  	/* dereference the pointer if needed */
   579  	if !deref {
   580  		self.Emit("MOVQ", _SP_p, _CX) // MOVQ SP.p, CX
   581  	} else {
   582  		self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _CX) // MOVQ 0(SP.p), CX
   583  	}
   584  
   585  	/* call the encoder, and perform error checks */
   586  	self.Emit("MOVQ", _ARG_fv, _DI) // MOVQ   ARG.fv, DI
   587  	self.call_go(fn)                // CALL  $fn
   588  	self.Emit("TESTQ", _ET, _ET)    // TESTQ ET, ET
   589  	self.Sjmp("JNZ", _LB_error)     // JNZ   _error
   590  	self.load_buffer_AX()
   591  }
   592  
   593  /** Builtin: _more_space **/
   594  
   595  var (
   596  	_T_byte      = jit.Type(byteType)
   597  	_F_growslice = jit.Func(growslice)
   598  )
   599  
   600  // AX must saving n
   601  func (self *_Assembler) more_space() {
   602  	self.Link(_LB_more_space)
   603  	self.Emit("MOVQ", _RP, _BX)        // MOVQ DI, BX
   604  	self.Emit("MOVQ", _RL, _CX)        // MOVQ SI, CX
   605  	self.Emit("MOVQ", _RC, _DI)        // MOVQ DX, DI
   606  	self.Emit("MOVQ", _AX, _SI)        // MOVQ AX, SI
   607  	self.Emit("MOVQ", _T_byte, _AX)    // MOVQ $_T_byte, AX
   608  	self.call_more_space(_F_growslice) // CALL $pc
   609  	self.Emit("MOVQ", _AX, _RP)        // MOVQ AX, DI
   610  	self.Emit("MOVQ", _BX, _RL)        // MOVQ BX, SI
   611  	self.Emit("MOVQ", _CX, _RC)        // MOVQ CX, DX
   612  	self.save_buffer()                 // SAVE {buf}
   613  	self.Rjmp("JMP", _LR)              // JMP  LR
   614  }
   615  
   616  /** Builtin Errors **/
   617  
   618  var (
   619  	_V_ERR_too_deep               = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_too_deep))))
   620  	_V_ERR_nan_or_infinite        = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_nan_or_infinite))))
   621  	_I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(errorType), jsonUnsupportedValueType)
   622  )
   623  
   624  func (self *_Assembler) error_too_deep() {
   625  	self.Link(_LB_error_too_deep)
   626  	self.Emit("MOVQ", _V_ERR_too_deep, _EP)               // MOVQ $_V_ERR_too_deep, EP
   627  	self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET
   628  	self.Sjmp("JMP", _LB_error)                           // JMP  _error
   629  }
   630  
   631  func (self *_Assembler) error_invalid_number() {
   632  	self.Link(_LB_error_invalid_number)
   633  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ    0(SP), AX
   634  	self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _BX) // MOVQ    8(SP), BX
   635  	self.call_go(_F_error_number)             // CALL_GO error_number
   636  	self.Sjmp("JMP", _LB_error)               // JMP     _error
   637  }
   638  
   639  func (self *_Assembler) error_nan_or_infinite() {
   640  	self.Link(_LB_error_nan_or_infinite)
   641  	self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP)        // MOVQ $_V_ERR_nan_or_infinite, EP
   642  	self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET
   643  	self.Sjmp("JMP", _LB_error)                           // JMP  _error
   644  }
   645  
   646  /** String Encoding Routine **/
   647  
   648  var (
   649  	_F_quote = jit.Imm(int64(native.S_quote))
   650  	_F_panic = jit.Func(goPanic)
   651  )
   652  
   653  func (self *_Assembler) go_panic() {
   654  	self.Link(_LB_panic)
   655  	self.Emit("MOVQ", _SP_p, _BX)
   656  	self.call_go(_F_panic)
   657  }
   658  
   659  func (self *_Assembler) encode_string(doubleQuote bool) {
   660  	self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX) // MOVQ  8(SP.p), AX
   661  	self.Emit("TESTQ", _AX, _AX)              // TESTQ AX, AX
   662  	self.Sjmp("JZ", "_str_empty_{n}")         // JZ    _str_empty_{n}
   663  	self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
   664  	self.Sjmp("JNE", "_str_next_{n}")
   665  	self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), _AX)
   666  	self.Sjmp("JMP", _LB_panic)
   667  	self.Link("_str_next_{n}")
   668  
   669  	/* openning quote, check for double quote */
   670  	if !doubleQuote {
   671  		self.check_size_r(_AX, 2) // SIZE $2
   672  		self.add_char('"')        // CHAR $'"'
   673  	} else {
   674  		self.check_size_r(_AX, 6)  // SIZE $6
   675  		self.add_long(_IM_open, 3) // TEXT $`"\"`
   676  	}
   677  
   678  	/* quoting loop */
   679  	self.Emit("XORL", _AX, _AX)     // XORL AX, AX
   680  	self.Emit("MOVQ", _AX, _VAR_sp) // MOVQ AX, sp
   681  	self.Link("_str_loop_{n}")      // _str_loop_{n}:
   682  	self.save_c()                   // SAVE $REG_ffi
   683  
   684  	/* load the output buffer first, and then input buffer,
   685  	 * because the parameter registers collide with RP / RL / RC */
   686  	self.Emit("MOVQ", _RC, _CX)                     // MOVQ RC, CX
   687  	self.Emit("SUBQ", _RL, _CX)                     // SUBQ RL, CX
   688  	self.Emit("MOVQ", _CX, _VAR_dn)                 // MOVQ CX, dn
   689  	self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX) // LEAQ (RP)(RL), DX
   690  	self.Emit("LEAQ", _VAR_dn, _CX)                 // LEAQ dn, CX
   691  	self.Emit("MOVQ", _VAR_sp, _AX)                 // MOVQ sp, AX
   692  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI)       // MOVQ (SP.p), DI
   693  	self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI)       // MOVQ 8(SP.p), SI
   694  	self.Emit("ADDQ", _AX, _DI)                     // ADDQ AX, DI
   695  	self.Emit("SUBQ", _AX, _SI)                     // SUBQ AX, SI
   696  
   697  	/* set the flags based on `doubleQuote` */
   698  	if !doubleQuote {
   699  		self.Emit("XORL", _R8, _R8) // XORL R8, R8
   700  	} else {
   701  		self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8
   702  	}
   703  
   704  	/* call the native quoter */
   705  	self.call_c(_F_quote)           // CALL  quote
   706  	self.Emit("ADDQ", _VAR_dn, _RL) // ADDQ  dn, RL
   707  
   708  	self.Emit("TESTQ", _AX, _AX)      // TESTQ AX, AX
   709  	self.Sjmp("JS", "_str_space_{n}") // JS    _str_space_{n}
   710  
   711  	/* close the string, check for double quote */
   712  	if !doubleQuote {
   713  		self.check_size(1)               // SIZE $1
   714  		self.add_char('"')               // CHAR $'"'
   715  		self.Sjmp("JMP", "_str_end_{n}") // JMP  _str_end_{n}
   716  	} else {
   717  		self.check_size(3)               // SIZE $3
   718  		self.add_text("\\\"\"")          // TEXT $'\""'
   719  		self.Sjmp("JMP", "_str_end_{n}") // JMP  _str_end_{n}
   720  	}
   721  
   722  	/* not enough space to contain the quoted string */
   723  	self.Link("_str_space_{n}")                     // _str_space_{n}:
   724  	self.Emit("NOTQ", _AX)                          // NOTQ AX
   725  	self.Emit("ADDQ", _AX, _VAR_sp)                 // ADDQ AX, sp
   726  	self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX) // LEAQ (RC)(RC), AX
   727  	self.slice_grow_ax("_str_loop_{n}")             // GROW _str_loop_{n}
   728  
   729  	/* empty string, check for double quote */
   730  	if !doubleQuote {
   731  		self.Link("_str_empty_{n}") // _str_empty_{n}:
   732  		self.check_size(2)          // SIZE $2
   733  		self.add_text("\"\"")       // TEXT $'""'
   734  		self.Link("_str_end_{n}")   // _str_end_{n}:
   735  	} else {
   736  		self.Link("_str_empty_{n}")   // _str_empty_{n}:
   737  		self.check_size(6)            // SIZE $6
   738  		self.add_text("\"\\\"\\\"\"") // TEXT $'"\"\""'
   739  		self.Link("_str_end_{n}")     // _str_end_{n}:
   740  	}
   741  }
   742  
   743  /** OpCode Assembler Functions **/
   744  
   745  var (
   746  	_T_json_Marshaler         = rt.UnpackType(jsonMarshalerType)
   747  	_T_encoding_TextMarshaler = rt.UnpackType(encodingTextMarshalerType)
   748  )
   749  
   750  var (
   751  	_F_f64toa    = jit.Imm(int64(native.S_f64toa))
   752  	_F_f32toa    = jit.Imm(int64(native.S_f32toa))
   753  	_F_i64toa    = jit.Imm(int64(native.S_i64toa))
   754  	_F_u64toa    = jit.Imm(int64(native.S_u64toa))
   755  	_F_b64encode = jit.Imm(int64(_subr__b64encode))
   756  )
   757  
   758  var (
   759  	_F_memmove       = jit.Func(memmove)
   760  	_F_error_number  = jit.Func(error_number)
   761  	_F_isValidNumber = jit.Func(isValidNumber)
   762  )
   763  
   764  var (
   765  	_F_iteratorStop  = jit.Func(iteratorStop)
   766  	_F_iteratorNext  = jit.Func(iteratorNext)
   767  	_F_iteratorStart = jit.Func(iteratorStart)
   768  )
   769  
   770  var (
   771  	_F_encodeTypedPointer  obj.Addr
   772  	_F_encodeJsonMarshaler obj.Addr
   773  	_F_encodeTextMarshaler obj.Addr
   774  )
   775  
   776  const (
   777  	_MODE_AVX2 = 1 << 2
   778  )
   779  
   780  func init() {
   781  	_F_encodeTypedPointer = jit.Func(encodeTypedPointer)
   782  	_F_encodeJsonMarshaler = jit.Func(encodeJsonMarshaler)
   783  	_F_encodeTextMarshaler = jit.Func(encodeTextMarshaler)
   784  }
   785  
   786  func (self *_Assembler) _asm_OP_null(_ *_Instr) {
   787  	self.check_size(4)
   788  	self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1)
   789  	self.Emit("ADDQ", jit.Imm(4), _RL)                            // ADDQ $4, RL
   790  }
   791  
   792  func (self *_Assembler) _asm_OP_empty_arr(_ *_Instr) {
   793  	self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
   794  	self.Sjmp("JC", "_empty_arr_{n}")
   795  	self._asm_OP_null(nil)
   796  	self.Sjmp("JMP", "_empty_arr_end_{n}")
   797  	self.Link("_empty_arr_{n}")
   798  	self.check_size(2)
   799  	self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0))
   800  	self.Emit("ADDQ", jit.Imm(2), _RL)
   801  	self.Link("_empty_arr_end_{n}")
   802  }
   803  
   804  func (self *_Assembler) _asm_OP_empty_obj(_ *_Instr) {
   805  	self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
   806  	self.Sjmp("JC", "_empty_obj_{n}")
   807  	self._asm_OP_null(nil)
   808  	self.Sjmp("JMP", "_empty_obj_end_{n}")
   809  	self.Link("_empty_obj_{n}")
   810  	self.check_size(2)
   811  	self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0))
   812  	self.Emit("ADDQ", jit.Imm(2), _RL)
   813  	self.Link("_empty_obj_end_{n}")
   814  }
   815  
   816  func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
   817  	self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0))              // CMPB (SP.p), $0
   818  	self.Sjmp("JE", "_false_{n}")                                 // JE   _false_{n}
   819  	self.check_size(4)                                            // SIZE $4
   820  	self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'true', (RP)(RL*1)
   821  	self.Emit("ADDQ", jit.Imm(4), _RL)                            // ADDQ $4, RL
   822  	self.Sjmp("JMP", "_end_{n}")                                  // JMP  _end_{n}
   823  	self.Link("_false_{n}")                                       // _false_{n}:
   824  	self.check_size(5)                                            // SIZE $5
   825  	self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'fals', (RP)(RL*1)
   826  	self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4))      // MOVB $'e', 4(RP)(RL*1)
   827  	self.Emit("ADDQ", jit.Imm(5), _RL)                            // ADDQ $5, RL
   828  	self.Link("_end_{n}")                                         // _end_{n}:
   829  }
   830  
   831  func (self *_Assembler) _asm_OP_i8(_ *_Instr) {
   832  	self.store_int(4, _F_i64toa, "MOVBQSX")
   833  }
   834  
   835  func (self *_Assembler) _asm_OP_i16(_ *_Instr) {
   836  	self.store_int(6, _F_i64toa, "MOVWQSX")
   837  }
   838  
   839  func (self *_Assembler) _asm_OP_i32(_ *_Instr) {
   840  	self.store_int(17, _F_i64toa, "MOVLQSX")
   841  }
   842  
   843  func (self *_Assembler) _asm_OP_i64(_ *_Instr) {
   844  	self.store_int(21, _F_i64toa, "MOVQ")
   845  }
   846  
   847  func (self *_Assembler) _asm_OP_u8(_ *_Instr) {
   848  	self.store_int(3, _F_u64toa, "MOVBQZX")
   849  }
   850  
   851  func (self *_Assembler) _asm_OP_u16(_ *_Instr) {
   852  	self.store_int(5, _F_u64toa, "MOVWQZX")
   853  }
   854  
   855  func (self *_Assembler) _asm_OP_u32(_ *_Instr) {
   856  	self.store_int(16, _F_u64toa, "MOVLQZX")
   857  }
   858  
   859  func (self *_Assembler) _asm_OP_u64(_ *_Instr) {
   860  	self.store_int(20, _F_u64toa, "MOVQ")
   861  }
   862  
   863  func (self *_Assembler) _asm_OP_f32(_ *_Instr) {
   864  	self.check_size(32)
   865  	self.Emit("MOVL", jit.Ptr(_SP_p, 0), _AX)  // MOVL     (SP.p), AX
   866  	self.Emit("ANDL", jit.Imm(_FM_exp32), _AX) // ANDL     $_FM_exp32, AX
   867  	self.Emit("XORL", jit.Imm(_FM_exp32), _AX) // XORL     $_FM_exp32, AX
   868  	self.Sjmp("JZ", _LB_error_nan_or_infinite) // JZ       _error_nan_or_infinite
   869  	self.save_c()                              // SAVE     $C_regs
   870  	self.rbuf_di()                             // MOVQ     RP, DI
   871  	self.Emit("MOVSS", jit.Ptr(_SP_p, 0), _X0) // MOVSS    (SP.p), X0
   872  	self.call_c(_F_f32toa)                     // CALL_C   f64toa
   873  	self.Emit("ADDQ", _AX, _RL)                // ADDQ     AX, RL
   874  }
   875  
   876  func (self *_Assembler) _asm_OP_f64(_ *_Instr) {
   877  	self.check_size(32)
   878  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX)  // MOVQ   (SP.p), AX
   879  	self.Emit("MOVQ", jit.Imm(_FM_exp64), _CX) // MOVQ   $_FM_exp64, CX
   880  	self.Emit("ANDQ", _CX, _AX)                // ANDQ   CX, AX
   881  	self.Emit("XORQ", _CX, _AX)                // XORQ   CX, AX
   882  	self.Sjmp("JZ", _LB_error_nan_or_infinite) // JZ     _error_nan_or_infinite
   883  	self.save_c()                              // SAVE   $C_regs
   884  	self.rbuf_di()                             // MOVQ   RP, DI
   885  	self.Emit("MOVSD", jit.Ptr(_SP_p, 0), _X0) // MOVSD  (SP.p), X0
   886  	self.call_c(_F_f64toa)                     // CALL_C f64toa
   887  	self.Emit("ADDQ", _AX, _RL)                // ADDQ   AX, RL
   888  }
   889  
   890  func (self *_Assembler) _asm_OP_str(_ *_Instr) {
   891  	self.encode_string(false)
   892  }
   893  
   894  func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
   895  	self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX)       // MOVQ 8(SP.p), AX
   896  	self.Emit("ADDQ", jit.Imm(2), _AX)              // ADDQ $2, AX
   897  	self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX)       // MOVQ $_MF_mulv, CX
   898  	self.Emit("MOVQ", _DX, _BX)                     // MOVQ DX, BX
   899  	self.From("MULQ", _CX)                          // MULQ CX
   900  	self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX) // LEAQ 1(DX)(DX), AX
   901  	self.Emit("ORQ", jit.Imm(2), _AX)               // ORQ  $2, AX
   902  	self.Emit("MOVQ", _BX, _DX)                     // MOVQ BX, DX
   903  	self.check_size_r(_AX, 0)                       // SIZE AX
   904  	self.add_char('"')                              // CHAR $'"'
   905  	self.Emit("MOVQ", _ARG_rb, _DI)                 // MOVQ rb<>+0(FP), DI
   906  	self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8))         // MOVQ SI, 8(DI)
   907  	self.Emit("MOVQ", _SP_p, _SI)                   // MOVQ SP.p, SI
   908  
   909  	/* check for AVX2 support */
   910  	if !cpu.HasAVX2 {
   911  		self.Emit("XORL", _DX, _DX) // XORL DX, DX
   912  	} else {
   913  		self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX) // MOVL $_MODE_AVX2, DX
   914  	}
   915  
   916  	/* call the encoder */
   917  	self.call_b64(_F_b64encode) // CALL b64encode
   918  	self.load_buffer_AX()       // LOAD {buf}
   919  	self.add_char('"')          // CHAR $'"'
   920  }
   921  
   922  func (self *_Assembler) _asm_OP_quote(_ *_Instr) {
   923  	self.encode_string(true)
   924  }
   925  
   926  func (self *_Assembler) _asm_OP_number(_ *_Instr) {
   927  	self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _BX) // MOVQ    (SP.p), BX
   928  	self.Emit("TESTQ", _BX, _BX)              // TESTQ   BX, BX
   929  	self.Sjmp("JZ", "_empty_{n}")
   930  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ    (SP.p), AX
   931  	self.Emit("TESTQ", _AX, _AX)              // TESTQ   AX, AX
   932  	self.Sjmp("JNZ", "_number_next_{n}")
   933  	self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), _AX)
   934  	self.Sjmp("JMP", _LB_panic)
   935  	self.Link("_number_next_{n}")
   936  	self.call_go(_F_isValidNumber)                  // CALL_GO isValidNumber
   937  	self.Emit("CMPB", _AX, jit.Imm(0))              // CMPB    AX, $0
   938  	self.Sjmp("JE", _LB_error_invalid_number)       // JE      _error_invalid_number
   939  	self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _BX)       // MOVQ    (SP.p), BX
   940  	self.check_size_r(_BX, 0)                       // SIZE    BX
   941  	self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _AX) // LEAQ    (RP)(RL), AX
   942  	self.Emit("ADDQ", jit.Ptr(_SP_p, 8), _RL)       // ADDQ    8(SP.p), RL
   943  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _BX)       // MOVOU   (SP.p), BX
   944  	self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _CX)       // MOVOU   X0, 8(SP)
   945  	self.call_go(_F_memmove)                        // CALL_GO memmove
   946  	self.Emit("MOVQ", _ARG_rb, _AX)                 // MOVQ rb<>+0(FP), AX
   947  	self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8))         // MOVQ RL, 8(AX)
   948  	self.Sjmp("JMP", "_done_{n}")                   // JMP     _done_{n}
   949  	self.Link("_empty_{n}")                         // _empty_{n}
   950  	self.check_size(1)                              // SIZE    $1
   951  	self.add_char('0')                              // CHAR    $'0'
   952  	self.Link("_done_{n}")                          // _done_{n}:
   953  }
   954  
   955  func (self *_Assembler) _asm_OP_eface(_ *_Instr) {
   956  	self.prep_buffer_AX()                     // MOVE  {buf}, AX
   957  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _BX) // MOVQ  (SP.p), BX
   958  	self.Emit("LEAQ", jit.Ptr(_SP_p, 8), _CX) // LEAQ  8(SP.p), CX
   959  	self.Emit("MOVQ", _ST, _DI)               // MOVQ  ST, DI
   960  	self.Emit("MOVQ", _ARG_fv, _SI)           // MOVQ  fv, AX
   961  	self.call_encoder(_F_encodeTypedPointer)  // CALL  encodeTypedPointer
   962  	self.Emit("TESTQ", _ET, _ET)              // TESTQ ET, ET
   963  	self.Sjmp("JNZ", _LB_error)               // JNZ   _error
   964  	self.load_buffer_AX()
   965  }
   966  
   967  func (self *_Assembler) _asm_OP_iface(_ *_Instr) {
   968  	self.prep_buffer_AX()                     // MOVE  {buf}, AX
   969  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _CX) // MOVQ  (SP.p), CX
   970  	self.Emit("MOVQ", jit.Ptr(_CX, 8), _BX)   // MOVQ  8(CX), BX
   971  	self.Emit("LEAQ", jit.Ptr(_SP_p, 8), _CX) // LEAQ  8(SP.p), CX
   972  	self.Emit("MOVQ", _ST, _DI)               // MOVQ  ST, DI
   973  	self.Emit("MOVQ", _ARG_fv, _SI)           // MOVQ  fv, AX
   974  	self.call_encoder(_F_encodeTypedPointer)  // CALL  encodeTypedPointer
   975  	self.Emit("TESTQ", _ET, _ET)              // TESTQ ET, ET
   976  	self.Sjmp("JNZ", _LB_error)               // JNZ   _error
   977  	self.load_buffer_AX()
   978  }
   979  
   980  func (self *_Assembler) _asm_OP_byte(p *_Instr) {
   981  	self.check_size(1)
   982  	self.Emit("MOVB", jit.Imm(p.i64()), jit.Sib(_RP, _RL, 1, 0)) // MOVL p.vi(), (RP)(RL*1)
   983  	self.Emit("ADDQ", jit.Imm(1), _RL)                           // ADDQ $1, RL
   984  }
   985  
   986  func (self *_Assembler) _asm_OP_text(p *_Instr) {
   987  	self.check_size(len(p.vs())) // SIZE ${len(p.vs())}
   988  	self.add_text(p.vs())        // TEXT ${p.vs()}
   989  }
   990  
   991  func (self *_Assembler) _asm_OP_deref(_ *_Instr) {
   992  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p
   993  }
   994  
   995  func (self *_Assembler) _asm_OP_index(p *_Instr) {
   996  	self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ $p.vi(), AX
   997  	self.Emit("ADDQ", _AX, _SP_p)            // ADDQ AX, SP.p
   998  }
   999  
  1000  func (self *_Assembler) _asm_OP_load(_ *_Instr) {
  1001  	self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)             // MOVQ (ST), AX
  1002  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x) // MOVQ -24(ST)(AX), SP.x
  1003  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p)  // MOVQ -8(ST)(AX), SP.p
  1004  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q)   // MOVQ (ST)(AX), SP.q
  1005  }
  1006  
  1007  func (self *_Assembler) _asm_OP_save(_ *_Instr) {
  1008  	self.save_state()
  1009  }
  1010  
  1011  func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
  1012  	self.drop_state(_StateSize)
  1013  }
  1014  
  1015  func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
  1016  	self.drop_state(_StateSize * 2)                   // DROP  $(_StateSize * 2)
  1017  	self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56)) // MOVOU X0, 56(ST)(AX)
  1018  }
  1019  
  1020  func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
  1021  	self.prep_buffer_AX() // MOVE {buf}, (SP)
  1022  	vt, pv := p.vp()
  1023  	self.Emit("MOVQ", jit.Type(vt), _BX) // MOVQ $(type(p.vt())), BX
  1024  
  1025  	/* check for indirection */
  1026  	if !rt.UnpackType(vt).Indirect() {
  1027  		self.Emit("MOVQ", _SP_p, _CX) // MOVQ SP.p, CX
  1028  	} else {
  1029  		self.Emit("MOVQ", _SP_p, _VAR_vp) // MOVQ SP.p, VAR.vp
  1030  		self.Emit("LEAQ", _VAR_vp, _CX)   // LEAQ VAR.vp, CX
  1031  	}
  1032  
  1033  	/* call the encoder */
  1034  	self.Emit("MOVQ", _ST, _DI)     // MOVQ  ST, DI
  1035  	self.Emit("MOVQ", _ARG_fv, _SI) // MOVQ  $fv, SI
  1036  	if pv {
  1037  		self.Emit("BTCQ", jit.Imm(bitPointerValue), _SI) // BTCQ $1, SI
  1038  	}
  1039  	self.call_encoder(_F_encodeTypedPointer) // CALL  encodeTypedPointer
  1040  	self.Emit("TESTQ", _ET, _ET)             // TESTQ ET, ET
  1041  	self.Sjmp("JNZ", _LB_error)              // JNZ   _error
  1042  	self.load_buffer_AX()
  1043  }
  1044  
  1045  func (self *_Assembler) _asm_OP_is_nil(p *_Instr) {
  1046  	self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0
  1047  	self.Xjmp("JE", p.vi())                          // JE   p.vi()
  1048  }
  1049  
  1050  func (self *_Assembler) _asm_OP_is_nil_p1(p *_Instr) {
  1051  	self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0)) // CMPQ 8(SP.p), $0
  1052  	self.Xjmp("JE", p.vi())                          // JE   p.vi()
  1053  }
  1054  
  1055  func (self *_Assembler) _asm_OP_is_zero_1(p *_Instr) {
  1056  	self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0
  1057  	self.Xjmp("JE", p.vi())                          // JE   p.vi()
  1058  }
  1059  
  1060  func (self *_Assembler) _asm_OP_is_zero_2(p *_Instr) {
  1061  	self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPW (SP.p), $0
  1062  	self.Xjmp("JE", p.vi())                          // JE   p.vi()
  1063  }
  1064  
  1065  func (self *_Assembler) _asm_OP_is_zero_4(p *_Instr) {
  1066  	self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPL (SP.p), $0
  1067  	self.Xjmp("JE", p.vi())                          // JE   p.vi()
  1068  }
  1069  
  1070  func (self *_Assembler) _asm_OP_is_zero_8(p *_Instr) {
  1071  	self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0
  1072  	self.Xjmp("JE", p.vi())                          // JE   p.vi()
  1073  }
  1074  
  1075  func (self *_Assembler) _asm_OP_is_zero_map(p *_Instr) {
  1076  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX)      // MOVQ  (SP.p), AX
  1077  	self.Emit("TESTQ", _AX, _AX)                   // TESTQ AX, AX
  1078  	self.Xjmp("JZ", p.vi())                        // JZ    p.vi()
  1079  	self.Emit("CMPQ", jit.Ptr(_AX, 0), jit.Imm(0)) // CMPQ  (AX), $0
  1080  	self.Xjmp("JE", p.vi())                        // JE    p.vi()
  1081  }
  1082  
  1083  func (self *_Assembler) _asm_OP_goto(p *_Instr) {
  1084  	self.Xjmp("JMP", p.vi())
  1085  }
  1086  
  1087  func (self *_Assembler) _asm_OP_map_iter(p *_Instr) {
  1088  	self.Emit("MOVQ", jit.Type(p.vt()), _AX)  // MOVQ    $p.vt(), AX
  1089  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _BX) // MOVQ    (SP.p), BX
  1090  	self.Emit("MOVQ", _ARG_fv, _CX)           // MOVQ    fv, CX
  1091  	self.call_go(_F_iteratorStart)            // CALL_GO iteratorStart
  1092  	self.Emit("MOVQ", _AX, _SP_q)             // MOVQ    AX, SP.q
  1093  	self.Emit("MOVQ", _BX, _ET)               // MOVQ    32(SP), ET
  1094  	self.Emit("MOVQ", _CX, _EP)               // MOVQ    40(SP), EP
  1095  	self.Emit("TESTQ", _ET, _ET)              // TESTQ   ET, ET
  1096  	self.Sjmp("JNZ", _LB_error)               // JNZ     _error
  1097  }
  1098  
  1099  func (self *_Assembler) _asm_OP_map_stop(_ *_Instr) {
  1100  	self.Emit("MOVQ", _SP_q, _AX)   // MOVQ    SP.q, AX
  1101  	self.call_go(_F_iteratorStop)   // CALL_GO iteratorStop
  1102  	self.Emit("XORL", _SP_q, _SP_q) // XORL    SP.q, SP.q
  1103  }
  1104  
  1105  func (self *_Assembler) _asm_OP_map_check_key(p *_Instr) {
  1106  	self.Emit("MOVQ", jit.Ptr(_SP_q, 0), _SP_p) // MOVQ    (SP.q), SP.p
  1107  	self.Emit("TESTQ", _SP_p, _SP_p)            // TESTQ   SP.p, SP.p
  1108  	self.Xjmp("JZ", p.vi())                     // JNZ     p.vi()
  1109  }
  1110  
  1111  func (self *_Assembler) _asm_OP_map_write_key(p *_Instr) {
  1112  	self.Emit("BTQ", jit.Imm(bitSortMapKeys), _ARG_fv) // BTQ ${SortMapKeys}, fv
  1113  	self.Sjmp("JNC", "_unordered_key_{n}")             // JNC _unordered_key_{n}
  1114  	self.encode_string(false)                          // STR $false
  1115  	self.Xjmp("JMP", p.vi())                           // JMP ${p.vi()}
  1116  	self.Link("_unordered_key_{n}")                    // _unordered_key_{n}:
  1117  }
  1118  
  1119  func (self *_Assembler) _asm_OP_map_value_next(_ *_Instr) {
  1120  	self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p) // MOVQ    8(SP.q), SP.p
  1121  	self.Emit("MOVQ", _SP_q, _AX)               // MOVQ    SP.q, AX
  1122  	self.call_go(_F_iteratorNext)               // CALL_GO iteratorNext
  1123  }
  1124  
  1125  func (self *_Assembler) _asm_OP_slice_len(_ *_Instr) {
  1126  	self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SP_x)  // MOVQ  8(SP.p), SP.x
  1127  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p)  // MOVQ  (SP.p), SP.p
  1128  	self.Emit("ORQ", jit.Imm(1<<_S_init), _SP_f) // ORQ   $(1<<_S_init), SP.f
  1129  }
  1130  
  1131  func (self *_Assembler) _asm_OP_slice_next(p *_Instr) {
  1132  	self.Emit("TESTQ", _SP_x, _SP_x)                        // TESTQ   SP.x, SP.x
  1133  	self.Xjmp("JZ", p.vi())                                 // JZ      p.vi()
  1134  	self.Emit("SUBQ", jit.Imm(1), _SP_x)                    // SUBQ    $1, SP.x
  1135  	self.Emit("BTRQ", jit.Imm(_S_init), _SP_f)              // BTRQ    $_S_init, SP.f
  1136  	self.Emit("LEAQ", jit.Ptr(_SP_p, int64(p.vlen())), _AX) // LEAQ    $(p.vlen())(SP.p), AX
  1137  	self.Emit("CMOVQCC", _AX, _SP_p)                        // CMOVQNC AX, SP.p
  1138  }
  1139  
  1140  func (self *_Assembler) _asm_OP_marshal(p *_Instr) {
  1141  	self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt())
  1142  }
  1143  
  1144  func (self *_Assembler) _asm_OP_marshal_p(p *_Instr) {
  1145  	if p.vk() != reflect.Ptr {
  1146  		panic("marshal_p: invalid type")
  1147  	} else {
  1148  		self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt(), false)
  1149  	}
  1150  }
  1151  
  1152  func (self *_Assembler) _asm_OP_marshal_text(p *_Instr) {
  1153  	self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt())
  1154  }
  1155  
  1156  func (self *_Assembler) _asm_OP_marshal_text_p(p *_Instr) {
  1157  	if p.vk() != reflect.Ptr {
  1158  		panic("marshal_text_p: invalid type")
  1159  	} else {
  1160  		self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt(), false)
  1161  	}
  1162  }
  1163  
  1164  func (self *_Assembler) _asm_OP_cond_set(_ *_Instr) {
  1165  	self.Emit("ORQ", jit.Imm(1<<_S_cond), _SP_f) // ORQ $(1<<_S_cond), SP.f
  1166  }
  1167  
  1168  func (self *_Assembler) _asm_OP_cond_testc(p *_Instr) {
  1169  	self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f) // BTRQ $_S_cond, SP.f
  1170  	self.Xjmp("JC", p.vi())
  1171  }
  1172  
  1173  func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
  1174  	self.Emit("MOVQ", jit.Imm(int64(p2.op())), _CX) // MOVQ $(p2.op()), AX
  1175  	self.Emit("MOVQ", jit.Imm(int64(p1.op())), _BX) // MOVQ $(p1.op()), BX
  1176  	self.Emit("MOVQ", jit.Imm(int64(i)), _AX)       // MOVQ $(i), CX
  1177  	self.call_go(_F_println)
  1178  }