github.com/goshafaq/sonic@v0.0.0-20231026082336-871835fb94c6/internal/encoder/assembler_stkabi_amd64.go (about)

     1  //go:build go1.16 && !go1.17
     2  // +build go1.16,!go1.17
     3  
     4  /*
     5   * Copyright 2021 ByteDance Inc.
     6   *
     7   * Licensed under the Apache License, Version 2.0 (the "License");
     8   * you may not use this file except in compliance with the License.
     9   * You may obtain a copy of the License at
    10   *
    11   *     http://www.apache.org/licenses/LICENSE-2.0
    12   *
    13   * Unless required by applicable law or agreed to in writing, software
    14   * distributed under the License is distributed on an "AS IS" BASIS,
    15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    16   * See the License for the specific language governing permissions and
    17   * limitations under the License.
    18   */
    19  
    20  package encoder
    21  
    22  import (
    23  	"fmt"
    24  	"reflect"
    25  	"strconv"
    26  	"unsafe"
    27  
    28  	"github.com/goshafaq/sonic/internal/cpu"
    29  	"github.com/goshafaq/sonic/internal/jit"
    30  	"github.com/goshafaq/sonic/internal/native/types"
    31  	"github.com/twitchyliquid64/golang-asm/obj"
    32  	"github.com/twitchyliquid64/golang-asm/obj/x86"
    33  
    34  	"github.com/goshafaq/sonic/internal/native"
    35  	"github.com/goshafaq/sonic/internal/rt"
    36  )
    37  
    38  /** Register Allocations
    39   *
    40   *  State Registers:
    41   *
    42   *      %rbx : stack base
    43   *      %rdi : result pointer
    44   *      %rsi : result length
    45   *      %rdx : result capacity
    46   *      %r12 : sp->p
    47   *      %r13 : sp->q
    48   *      %r14 : sp->x
    49   *      %r15 : sp->f
    50   *
    51   *  Error Registers:
    52   *
    53   *      %r10 : error type register
    54   *      %r11 : error pointer register
    55   */
    56  
    57  /** Function Prototype & Stack Map
    58   *
    59   *  func (buf *[]byte, p unsafe.Pointer, sb *_Stack, fv uint64) (err error)
    60   *
    61   *  buf    :   (FP)
    62   *  p      :  8(FP)
    63   *  sb     : 16(FP)
    64   *  fv     : 24(FP)
    65   *  err.vt : 32(FP)
    66   *  err.vp : 40(FP)
    67   */
    68  
    69  const (
    70  	_S_cond = iota
    71  	_S_init
    72  )
    73  
    74  const (
    75  	_FP_args   = 48 // 48 bytes for passing arguments to this function
    76  	_FP_fargs  = 64 // 64 bytes for passing arguments to other Go functions
    77  	_FP_saves  = 64 // 64 bytes for saving the registers before CALL instructions
    78  	_FP_locals = 24 // 24 bytes for local variables
    79  )
    80  
    81  const (
    82  	_FP_offs = _FP_fargs + _FP_saves + _FP_locals
    83  	_FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer
    84  	_FP_base = _FP_size + 8 // 8 bytes for the return address
    85  )
    86  
    87  const (
    88  	_FM_exp32 = 0x7f800000
    89  	_FM_exp64 = 0x7ff0000000000000
    90  )
    91  
    92  const (
    93  	_IM_null   = 0x6c6c756e // 'null'
    94  	_IM_true   = 0x65757274 // 'true'
    95  	_IM_fals   = 0x736c6166 // 'fals' ('false' without the 'e')
    96  	_IM_open   = 0x00225c22 // '"\"∅'
    97  	_IM_array  = 0x5d5b     // '[]'
    98  	_IM_object = 0x7d7b     // '{}'
    99  	_IM_mulv   = -0x5555555555555555
   100  )
   101  
   102  const (
   103  	_LB_more_space        = "_more_space"
   104  	_LB_more_space_return = "_more_space_return_"
   105  )
   106  
   107  const (
   108  	_LB_error                 = "_error"
   109  	_LB_error_too_deep        = "_error_too_deep"
   110  	_LB_error_invalid_number  = "_error_invalid_number"
   111  	_LB_error_nan_or_infinite = "_error_nan_or_infinite"
   112  	_LB_panic                 = "_panic"
   113  )
   114  
   115  var (
   116  	_AX = jit.Reg("AX")
   117  	_CX = jit.Reg("CX")
   118  	_DX = jit.Reg("DX")
   119  	_DI = jit.Reg("DI")
   120  	_SI = jit.Reg("SI")
   121  	_BP = jit.Reg("BP")
   122  	_SP = jit.Reg("SP")
   123  	_R8 = jit.Reg("R8")
   124  )
   125  
   126  var (
   127  	_X0 = jit.Reg("X0")
   128  	_Y0 = jit.Reg("Y0")
   129  )
   130  
   131  var (
   132  	_ST = jit.Reg("BX")
   133  	_RP = jit.Reg("DI")
   134  	_RL = jit.Reg("SI")
   135  	_RC = jit.Reg("DX")
   136  )
   137  
   138  var (
   139  	_LR  = jit.Reg("R9")
   140  	_R10 = jit.Reg("R10") // used for gcWriterBarrier
   141  	_ET  = jit.Reg("R10")
   142  	_EP  = jit.Reg("R11")
   143  )
   144  
   145  var (
   146  	_SP_p = jit.Reg("R12")
   147  	_SP_q = jit.Reg("R13")
   148  	_SP_x = jit.Reg("R14")
   149  	_SP_f = jit.Reg("R15")
   150  )
   151  
   152  var (
   153  	_ARG_rb = jit.Ptr(_SP, _FP_base)
   154  	_ARG_vp = jit.Ptr(_SP, _FP_base+8)
   155  	_ARG_sb = jit.Ptr(_SP, _FP_base+16)
   156  	_ARG_fv = jit.Ptr(_SP, _FP_base+24)
   157  )
   158  
   159  var (
   160  	_RET_et = jit.Ptr(_SP, _FP_base+32)
   161  	_RET_ep = jit.Ptr(_SP, _FP_base+40)
   162  )
   163  
   164  var (
   165  	_VAR_sp = jit.Ptr(_SP, _FP_fargs+_FP_saves)
   166  	_VAR_dn = jit.Ptr(_SP, _FP_fargs+_FP_saves+8)
   167  	_VAR_vp = jit.Ptr(_SP, _FP_fargs+_FP_saves+16)
   168  )
   169  
   170  var (
   171  	_REG_ffi = []obj.Addr{_RP, _RL, _RC}
   172  	_REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL}
   173  	_REG_jsr = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR}
   174  	_REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC}
   175  )
   176  
   177  type _Assembler struct {
   178  	jit.BaseAssembler
   179  	p    _Program
   180  	x    int
   181  	name string
   182  }
   183  
   184  func newAssembler(p _Program) *_Assembler {
   185  	return new(_Assembler).Init(p)
   186  }
   187  
   188  /** Assembler Interface **/
   189  func (self *_Assembler) Load() _Encoder {
   190  	return ptoenc(self.BaseAssembler.Load("encode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
   191  }
   192  
   193  func (self *_Assembler) Init(p _Program) *_Assembler {
   194  	self.p = p
   195  	self.BaseAssembler.Init(self.compile)
   196  	return self
   197  }
   198  
   199  func (self *_Assembler) compile() {
   200  	self.prologue()
   201  	self.instrs()
   202  	self.epilogue()
   203  	self.builtins()
   204  }
   205  
   206  /** Assembler Stages **/
   207  
   208  var _OpFuncTab = [256]func(*_Assembler, *_Instr){
   209  	_OP_null:           (*_Assembler)._asm_OP_null,
   210  	_OP_empty_arr:      (*_Assembler)._asm_OP_empty_arr,
   211  	_OP_empty_obj:      (*_Assembler)._asm_OP_empty_obj,
   212  	_OP_bool:           (*_Assembler)._asm_OP_bool,
   213  	_OP_i8:             (*_Assembler)._asm_OP_i8,
   214  	_OP_i16:            (*_Assembler)._asm_OP_i16,
   215  	_OP_i32:            (*_Assembler)._asm_OP_i32,
   216  	_OP_i64:            (*_Assembler)._asm_OP_i64,
   217  	_OP_u8:             (*_Assembler)._asm_OP_u8,
   218  	_OP_u16:            (*_Assembler)._asm_OP_u16,
   219  	_OP_u32:            (*_Assembler)._asm_OP_u32,
   220  	_OP_u64:            (*_Assembler)._asm_OP_u64,
   221  	_OP_f32:            (*_Assembler)._asm_OP_f32,
   222  	_OP_f64:            (*_Assembler)._asm_OP_f64,
   223  	_OP_str:            (*_Assembler)._asm_OP_str,
   224  	_OP_bin:            (*_Assembler)._asm_OP_bin,
   225  	_OP_quote:          (*_Assembler)._asm_OP_quote,
   226  	_OP_number:         (*_Assembler)._asm_OP_number,
   227  	_OP_eface:          (*_Assembler)._asm_OP_eface,
   228  	_OP_iface:          (*_Assembler)._asm_OP_iface,
   229  	_OP_byte:           (*_Assembler)._asm_OP_byte,
   230  	_OP_text:           (*_Assembler)._asm_OP_text,
   231  	_OP_deref:          (*_Assembler)._asm_OP_deref,
   232  	_OP_index:          (*_Assembler)._asm_OP_index,
   233  	_OP_load:           (*_Assembler)._asm_OP_load,
   234  	_OP_save:           (*_Assembler)._asm_OP_save,
   235  	_OP_drop:           (*_Assembler)._asm_OP_drop,
   236  	_OP_drop_2:         (*_Assembler)._asm_OP_drop_2,
   237  	_OP_recurse:        (*_Assembler)._asm_OP_recurse,
   238  	_OP_is_nil:         (*_Assembler)._asm_OP_is_nil,
   239  	_OP_is_nil_p1:      (*_Assembler)._asm_OP_is_nil_p1,
   240  	_OP_is_zero_1:      (*_Assembler)._asm_OP_is_zero_1,
   241  	_OP_is_zero_2:      (*_Assembler)._asm_OP_is_zero_2,
   242  	_OP_is_zero_4:      (*_Assembler)._asm_OP_is_zero_4,
   243  	_OP_is_zero_8:      (*_Assembler)._asm_OP_is_zero_8,
   244  	_OP_is_zero_map:    (*_Assembler)._asm_OP_is_zero_map,
   245  	_OP_goto:           (*_Assembler)._asm_OP_goto,
   246  	_OP_map_iter:       (*_Assembler)._asm_OP_map_iter,
   247  	_OP_map_stop:       (*_Assembler)._asm_OP_map_stop,
   248  	_OP_map_check_key:  (*_Assembler)._asm_OP_map_check_key,
   249  	_OP_map_write_key:  (*_Assembler)._asm_OP_map_write_key,
   250  	_OP_map_value_next: (*_Assembler)._asm_OP_map_value_next,
   251  	_OP_slice_len:      (*_Assembler)._asm_OP_slice_len,
   252  	_OP_slice_next:     (*_Assembler)._asm_OP_slice_next,
   253  	_OP_marshal:        (*_Assembler)._asm_OP_marshal,
   254  	_OP_marshal_p:      (*_Assembler)._asm_OP_marshal_p,
   255  	_OP_marshal_text:   (*_Assembler)._asm_OP_marshal_text,
   256  	_OP_marshal_text_p: (*_Assembler)._asm_OP_marshal_text_p,
   257  	_OP_cond_set:       (*_Assembler)._asm_OP_cond_set,
   258  	_OP_cond_testc:     (*_Assembler)._asm_OP_cond_testc,
   259  }
   260  
   261  func (self *_Assembler) instr(v *_Instr) {
   262  	if fn := _OpFuncTab[v.op()]; fn != nil {
   263  		fn(self, v)
   264  	} else {
   265  		panic(fmt.Sprintf("invalid opcode: %d", v.op()))
   266  	}
   267  }
   268  
   269  func (self *_Assembler) instrs() {
   270  	for i, v := range self.p {
   271  		self.Mark(i)
   272  		self.instr(&v)
   273  		self.debug_instr(i, &v)
   274  	}
   275  }
   276  
   277  func (self *_Assembler) builtins() {
   278  	self.more_space()
   279  	self.error_too_deep()
   280  	self.error_invalid_number()
   281  	self.error_nan_or_infinite()
   282  	self.go_panic()
   283  }
   284  
   285  func (self *_Assembler) epilogue() {
   286  	self.Mark(len(self.p))
   287  	self.Emit("XORL", _ET, _ET)
   288  	self.Emit("XORL", _EP, _EP)
   289  	self.Link(_LB_error)
   290  	self.Emit("MOVQ", _ARG_rb, _AX)                // MOVQ rb<>+0(FP), AX
   291  	self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8))        // MOVQ RL, 8(AX)
   292  	self.Emit("MOVQ", _ET, _RET_et)                // MOVQ ET, et<>+24(FP)
   293  	self.Emit("MOVQ", _EP, _RET_ep)                // MOVQ EP, ep<>+32(FP)
   294  	self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP
   295  	self.Emit("ADDQ", jit.Imm(_FP_size), _SP)      // ADDQ $_FP_size, SP
   296  	self.Emit("RET")                               // RET
   297  }
   298  
   299  func (self *_Assembler) prologue() {
   300  	self.Emit("SUBQ", jit.Imm(_FP_size), _SP)      // SUBQ $_FP_size, SP
   301  	self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP)
   302  	self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP
   303  	self.load_buffer()                             // LOAD {buf}
   304  	self.Emit("MOVQ", _ARG_vp, _SP_p)              // MOVQ vp<>+8(FP), SP.p
   305  	self.Emit("MOVQ", _ARG_sb, _ST)                // MOVQ sb<>+16(FP), ST
   306  	self.Emit("XORL", _SP_x, _SP_x)                // XORL SP.x, SP.x
   307  	self.Emit("XORL", _SP_f, _SP_f)                // XORL SP.f, SP.f
   308  	self.Emit("XORL", _SP_q, _SP_q)                // XORL SP.q, SP.q
   309  }
   310  
   311  /** Assembler Inline Functions **/
   312  
   313  func (self *_Assembler) xsave(reg ...obj.Addr) {
   314  	for i, v := range reg {
   315  		if i > _FP_saves/8-1 {
   316  			panic("too many registers to save")
   317  		} else {
   318  			self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs+int64(i)*8))
   319  		}
   320  	}
   321  }
   322  
   323  func (self *_Assembler) xload(reg ...obj.Addr) {
   324  	for i, v := range reg {
   325  		if i > _FP_saves/8-1 {
   326  			panic("too many registers to load")
   327  		} else {
   328  			self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs+int64(i)*8), v)
   329  		}
   330  	}
   331  }
   332  
   333  func (self *_Assembler) rbuf_di() {
   334  	if _RP.Reg != x86.REG_DI {
   335  		panic("register allocation messed up: RP != DI")
   336  	} else {
   337  		self.Emit("ADDQ", _RL, _RP)
   338  	}
   339  }
   340  
   341  func (self *_Assembler) store_int(nd int, fn obj.Addr, ins string) {
   342  	self.check_size(nd)
   343  	self.save_c()                          // SAVE   $C_regs
   344  	self.rbuf_di()                         // MOVQ   RP, DI
   345  	self.Emit(ins, jit.Ptr(_SP_p, 0), _SI) // $ins   (SP.p), SI
   346  	self.call_c(fn)                        // CALL_C $fn
   347  	self.Emit("ADDQ", _AX, _RL)            // ADDQ   AX, RL
   348  }
   349  
   350  func (self *_Assembler) store_str(s string) {
   351  	i := 0
   352  	m := rt.Str2Mem(s)
   353  
   354  	/* 8-byte stores */
   355  	for i <= len(m)-8 {
   356  		self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX)       // MOVQ $s[i:], AX
   357  		self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i))) // MOVQ AX, i(RP)(RL)
   358  		i += 8
   359  	}
   360  
   361  	/* 4-byte stores */
   362  	if i <= len(m)-4 {
   363  		self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVL $s[i:], i(RP)(RL)
   364  		i += 4
   365  	}
   366  
   367  	/* 2-byte stores */
   368  	if i <= len(m)-2 {
   369  		self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVW $s[i:], i(RP)(RL)
   370  		i += 2
   371  	}
   372  
   373  	/* last byte */
   374  	if i < len(m) {
   375  		self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i))) // MOVB $s[i:], i(RP)(RL)
   376  	}
   377  }
   378  
   379  func (self *_Assembler) check_size(n int) {
   380  	self.check_size_rl(jit.Ptr(_RL, int64(n)))
   381  }
   382  
   383  func (self *_Assembler) check_size_r(r obj.Addr, d int) {
   384  	self.check_size_rl(jit.Sib(_RL, r, 1, int64(d)))
   385  }
   386  
   387  func (self *_Assembler) check_size_rl(v obj.Addr) {
   388  	idx := self.x
   389  	key := _LB_more_space_return + strconv.Itoa(idx)
   390  
   391  	/* the following code relies on LR == R9 to work */
   392  	if _LR.Reg != x86.REG_R9 {
   393  		panic("register allocation messed up: LR != R9")
   394  	}
   395  
   396  	/* check for buffer capacity */
   397  	self.x++
   398  	self.Emit("LEAQ", v, _AX)   // LEAQ $v, AX
   399  	self.Emit("CMPQ", _AX, _RC) // CMPQ AX, RC
   400  	self.Sjmp("JBE", key)       // JBE  _more_space_return_{n}
   401  	self.slice_grow_ax(key)     // GROW $key
   402  	self.Link(key)              // _more_space_return_{n}:
   403  }
   404  
   405  func (self *_Assembler) slice_grow_ax(ret string) {
   406  	self.Byte(0x4c, 0x8d, 0x0d)      // LEAQ ?(PC), R9
   407  	self.Sref(ret, 4)                // .... &ret
   408  	self.Sjmp("JMP", _LB_more_space) // JMP  _more_space
   409  }
   410  
   411  /** State Stack Helpers **/
   412  
   413  const (
   414  	_StateSize  = int64(unsafe.Sizeof(_State{}))
   415  	_StackLimit = _MaxStack * _StateSize
   416  )
   417  
   418  func (self *_Assembler) save_state() {
   419  	self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX)            // MOVQ (ST), CX
   420  	self.Emit("LEAQ", jit.Ptr(_CX, _StateSize), _R8)   // LEAQ _StateSize(CX), R8
   421  	self.Emit("CMPQ", _R8, jit.Imm(_StackLimit))       // CMPQ R8, $_StackLimit
   422  	self.Sjmp("JAE", _LB_error_too_deep)               // JA   _error_too_deep
   423  	self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8))  // MOVQ SP.x, 8(ST)(CX)
   424  	self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16)) // MOVQ SP.f, 16(ST)(CX)
   425  	self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24))  // MOVQ SP.p, 24(ST)(CX)
   426  	self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32))  // MOVQ SP.q, 32(ST)(CX)
   427  	self.Emit("MOVQ", _R8, jit.Ptr(_ST, 0))            // MOVQ R8, (ST)
   428  }
   429  
   430  func (self *_Assembler) drop_state(decr int64) {
   431  	self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)            // MOVQ  (ST), AX
   432  	self.Emit("SUBQ", jit.Imm(decr), _AX)              // SUBQ  $decr, AX
   433  	self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0))            // MOVQ  AX, (ST)
   434  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _SP_x)  // MOVQ  8(ST)(AX), SP.x
   435  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 16), _SP_f) // MOVQ  16(ST)(AX), SP.f
   436  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 24), _SP_p) // MOVQ  24(ST)(AX), SP.p
   437  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 32), _SP_q) // MOVQ  32(ST)(AX), SP.q
   438  	self.Emit("PXOR", _X0, _X0)                        // PXOR  X0, X0
   439  	self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8))   // MOVOU X0, 8(ST)(AX)
   440  	self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24))  // MOVOU X0, 24(ST)(AX)
   441  }
   442  
   443  /** Buffer Helpers **/
   444  
   445  func (self *_Assembler) add_char(ch byte) {
   446  	self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVB $ch, (RP)(RL)
   447  	self.Emit("ADDQ", jit.Imm(1), _RL)                             // ADDQ $1, RL
   448  }
   449  
   450  func (self *_Assembler) add_long(ch uint32, n int64) {
   451  	self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVL $ch, (RP)(RL)
   452  	self.Emit("ADDQ", jit.Imm(n), _RL)                             // ADDQ $n, RL
   453  }
   454  
   455  func (self *_Assembler) add_text(ss string) {
   456  	self.store_str(ss)                              // TEXT $ss
   457  	self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL) // ADDQ ${len(ss)}, RL
   458  }
   459  
   460  func (self *_Assembler) prep_buffer() {
   461  	self.Emit("MOVQ", _ARG_rb, _AX)         // MOVQ rb<>+0(FP), AX
   462  	self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX)
   463  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
   464  }
   465  
   466  func (self *_Assembler) prep_buffer_c() {
   467  	self.Emit("MOVQ", _ARG_rb, _DI)         // MOVQ rb<>+0(FP), DI
   468  	self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8)) // MOVQ RL, 8(DI)
   469  }
   470  
   471  func (self *_Assembler) save_buffer() {
   472  	self.Emit("MOVQ", _ARG_rb, _CX)          // MOVQ rb<>+0(FP), CX
   473  	self.Emit("MOVQ", _RP, jit.Ptr(_CX, 0))  // MOVQ RP, (CX)
   474  	self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8))  // MOVQ RL, 8(CX)
   475  	self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16)) // MOVQ RC, 16(CX)
   476  }
   477  
   478  func (self *_Assembler) load_buffer() {
   479  	self.Emit("MOVQ", _ARG_rb, _AX)          // MOVQ rb<>+0(FP), AX
   480  	self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP)  // MOVQ (AX), RP
   481  	self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL)  // MOVQ 8(AX), RL
   482  	self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), RC
   483  }
   484  
   485  /** Function Interface Helpers **/
   486  
   487  func (self *_Assembler) call(pc obj.Addr) {
   488  	self.Emit("MOVQ", pc, _AX) // MOVQ $pc, AX
   489  	self.Rjmp("CALL", _AX)     // CALL AX
   490  }
   491  
   492  func (self *_Assembler) save_c() {
   493  	self.xsave(_REG_ffi...) // SAVE $REG_ffi
   494  }
   495  
   496  func (self *_Assembler) call_c(pc obj.Addr) {
   497  	self.call(pc)           // CALL $pc
   498  	self.xload(_REG_ffi...) // LOAD $REG_ffi
   499  }
   500  
   501  func (self *_Assembler) call_go(pc obj.Addr) {
   502  	self.xsave(_REG_all...) // SAVE $REG_all
   503  	self.call(pc)           // CALL $pc
   504  	self.xload(_REG_all...) // LOAD $REG_all
   505  }
   506  
   507  func (self *_Assembler) call_encoder(pc obj.Addr) {
   508  	self.xsave(_REG_enc...) // SAVE $REG_enc
   509  	self.call(pc)           // CALL $pc
   510  	self.xload(_REG_enc...) // LOAD $REG_enc
   511  	self.load_buffer()      // LOAD {buf}
   512  }
   513  
   514  func (self *_Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) {
   515  	switch vt.Kind() {
   516  	case reflect.Interface:
   517  		self.call_marshaler_i(fn, it)
   518  	case reflect.Ptr, reflect.Map:
   519  		self.call_marshaler_v(fn, it, vt, true)
   520  	// struct/array of 1 direct iface type can be direct
   521  	default:
   522  		self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect())
   523  	}
   524  }
   525  
   526  func (self *_Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) {
   527  	self.Emit("MOVQ", jit.Gtype(it), _AX)                         // MOVQ    $it, AX
   528  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))                       // MOVQ    AX, (SP)
   529  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX)                     // MOVQ    (SP.p), AX
   530  	self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _CX)                     // MOVQ    8(SP.p), CX
   531  	self.Emit("TESTQ", _AX, _AX)                                  // TESTQ   AX, AX
   532  	self.Sjmp("JZ", "_null_{n}")                                  // JZ      _null_{n}
   533  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))                       // MOVQ    AX, 8(SP)
   534  	self.Emit("MOVQ", _CX, jit.Ptr(_SP, 16))                      // MOVQ    CX, 16(SP)
   535  	self.call_go(_F_assertI2I)                                    // CALL_GO assertI2I
   536  	self.prep_buffer()                                            // MOVE    {buf}, (SP)
   537  	self.Emit("MOVOU", jit.Ptr(_SP, 24), _X0)                     // MOVOU   24(SP), X0
   538  	self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8))                      // MOVOU   X0, 8(SP)
   539  	self.Emit("MOVQ", _ARG_fv, _CX)                               // MOVQ   ARG.fv, CX
   540  	self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24))                      // MOVQ   CX, 24(SP)
   541  	self.call_encoder(fn)                                         // CALL    $fn
   542  	self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET)                      // MOVQ    32(SP), ET
   543  	self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP)                      // MOVQ    40(SP), EP
   544  	self.Emit("TESTQ", _ET, _ET)                                  // TESTQ   ET, ET
   545  	self.Sjmp("JNZ", _LB_error)                                   // JNZ     _error
   546  	self.Sjmp("JMP", "_done_{n}")                                 // JMP     _done_{n}
   547  	self.Link("_null_{n}")                                        // _null_{n}:
   548  	self.check_size(4)                                            // SIZE    $4
   549  	self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL    $'null', (RP)(RL*1)
   550  	self.Emit("ADDQ", jit.Imm(4), _RL)                            // ADDQ    $4, RL
   551  	self.Link("_done_{n}")                                        // _done_{n}:
   552  }
   553  
   554  func (self *_Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) {
   555  	self.prep_buffer()                       // MOVE {buf}, (SP)
   556  	self.Emit("MOVQ", jit.Itab(it, vt), _AX) // MOVQ $(itab(it, vt)), AX
   557  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))  // MOVQ AX, 8(SP)
   558  
   559  	/* dereference the pointer if needed */
   560  	if !deref {
   561  		self.Emit("MOVQ", _SP_p, jit.Ptr(_SP, 16)) // MOVQ SP.p, 16(SP)
   562  	} else {
   563  		self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
   564  		self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))  // MOVQ AX, 16(SP)
   565  	}
   566  
   567  	/* call the encoder, and perform error checks */
   568  	self.Emit("MOVQ", _ARG_fv, _CX)          // MOVQ   ARG.fv, CX
   569  	self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24)) // MOVQ   CX, 24(SP)
   570  	self.call_encoder(fn)                    // CALL  $fn
   571  	self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET) // MOVQ  32(SP), ET
   572  	self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP) // MOVQ  40(SP), EP
   573  	self.Emit("TESTQ", _ET, _ET)             // TESTQ ET, ET
   574  	self.Sjmp("JNZ", _LB_error)              // JNZ   _error
   575  }
   576  
   577  /** Builtin: _more_space **/
   578  
   579  var (
   580  	_T_byte      = jit.Type(byteType)
   581  	_F_growslice = jit.Func(growslice)
   582  )
   583  
   584  func (self *_Assembler) more_space() {
   585  	self.Link(_LB_more_space)
   586  	self.Emit("MOVQ", _T_byte, _AX)          // MOVQ $_T_byte, _AX
   587  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))  // MOVQ _AX, (SP)
   588  	self.Emit("MOVQ", _RP, jit.Ptr(_SP, 8))  // MOVQ RP, 8(SP)
   589  	self.Emit("MOVQ", _RL, jit.Ptr(_SP, 16)) // MOVQ RL, 16(SP)
   590  	self.Emit("MOVQ", _RC, jit.Ptr(_SP, 24)) // MOVQ RC, 24(SP)
   591  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP)
   592  	self.xsave(_REG_jsr...)                  // SAVE $REG_jsr
   593  	self.call(_F_growslice)                  // CALL $pc
   594  	self.xload(_REG_jsr...)                  // LOAD $REG_jsr
   595  	self.Emit("MOVQ", jit.Ptr(_SP, 40), _RP) // MOVQ 40(SP), RP
   596  	self.Emit("MOVQ", jit.Ptr(_SP, 48), _RL) // MOVQ 48(SP), RL
   597  	self.Emit("MOVQ", jit.Ptr(_SP, 56), _RC) // MOVQ 56(SP), RC
   598  	self.save_buffer()                       // SAVE {buf}
   599  	self.Rjmp("JMP", _LR)                    // JMP  LR
   600  }
   601  
   602  /** Builtin Errors **/
   603  
   604  var (
   605  	_V_ERR_too_deep               = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_too_deep))))
   606  	_V_ERR_nan_or_infinite        = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_nan_or_infinite))))
   607  	_I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(errorType), jsonUnsupportedValueType)
   608  )
   609  
   610  func (self *_Assembler) error_too_deep() {
   611  	self.Link(_LB_error_too_deep)
   612  	self.Emit("MOVQ", _V_ERR_too_deep, _EP)               // MOVQ $_V_ERR_too_deep, EP
   613  	self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET
   614  	self.Sjmp("JMP", _LB_error)                           // JMP  _error
   615  }
   616  
   617  func (self *_Assembler) error_invalid_number() {
   618  	self.Link(_LB_error_invalid_number)
   619  	self.call_go(_F_error_number)            // CALL_GO error_number
   620  	self.Emit("MOVQ", jit.Ptr(_SP, 16), _ET) // MOVQ    16(SP), ET
   621  	self.Emit("MOVQ", jit.Ptr(_SP, 24), _EP) // MOVQ    24(SP), EP
   622  	self.Sjmp("JMP", _LB_error)              // JMP     _error
   623  }
   624  
   625  func (self *_Assembler) error_nan_or_infinite() {
   626  	self.Link(_LB_error_nan_or_infinite)
   627  	self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP)        // MOVQ $_V_ERR_nan_or_infinite, EP
   628  	self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET
   629  	self.Sjmp("JMP", _LB_error)                           // JMP  _error
   630  }
   631  
   632  /** String Encoding Routine **/
   633  
   634  var (
   635  	_F_quote = jit.Imm(int64(native.S_quote))
   636  	_F_panic = jit.Func(goPanic)
   637  )
   638  
   639  func (self *_Assembler) go_panic() {
   640  	self.Link(_LB_panic)
   641  	self.Emit("MOVQ", _SP_p, jit.Ptr(_SP, 8))
   642  	self.call_go(_F_panic)
   643  }
   644  
   645  func (self *_Assembler) encode_string(doubleQuote bool) {
   646  	self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX) // MOVQ  8(SP.p), AX
   647  	self.Emit("TESTQ", _AX, _AX)              // TESTQ AX, AX
   648  	self.Sjmp("JZ", "_str_empty_{n}")         // JZ    _str_empty_{n}
   649  	self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
   650  	self.Sjmp("JNE", "_str_next_{n}")
   651  	self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), jit.Ptr(_SP, 0))
   652  	self.Sjmp("JMP", _LB_panic)
   653  	self.Link("_str_next_{n}")
   654  
   655  	/* openning quote, check for double quote */
   656  	if !doubleQuote {
   657  		self.check_size_r(_AX, 2) // SIZE $2
   658  		self.add_char('"')        // CHAR $'"'
   659  	} else {
   660  		self.check_size_r(_AX, 6)  // SIZE $6
   661  		self.add_long(_IM_open, 3) // TEXT $`"\"`
   662  	}
   663  
   664  	/* quoting loop */
   665  	self.Emit("XORL", _AX, _AX)     // XORL AX, AX
   666  	self.Emit("MOVQ", _AX, _VAR_sp) // MOVQ AX, sp
   667  	self.Link("_str_loop_{n}")      // _str_loop_{n}:
   668  	self.save_c()                   // SAVE $REG_ffi
   669  
   670  	/* load the output buffer first, and then input buffer,
   671  	 * because the parameter registers collide with RP / RL / RC */
   672  	self.Emit("MOVQ", _RC, _CX)                     // MOVQ RC, CX
   673  	self.Emit("SUBQ", _RL, _CX)                     // SUBQ RL, CX
   674  	self.Emit("MOVQ", _CX, _VAR_dn)                 // MOVQ CX, dn
   675  	self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX) // LEAQ (RP)(RL), DX
   676  	self.Emit("LEAQ", _VAR_dn, _CX)                 // LEAQ dn, CX
   677  	self.Emit("MOVQ", _VAR_sp, _AX)                 // MOVQ sp, AX
   678  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI)       // MOVQ (SP.p), DI
   679  	self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI)       // MOVQ 8(SP.p), SI
   680  	self.Emit("ADDQ", _AX, _DI)                     // ADDQ AX, DI
   681  	self.Emit("SUBQ", _AX, _SI)                     // SUBQ AX, SI
   682  
   683  	/* set the flags based on `doubleQuote` */
   684  	if !doubleQuote {
   685  		self.Emit("XORL", _R8, _R8) // XORL R8, R8
   686  	} else {
   687  		self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8
   688  	}
   689  
   690  	/* call the native quoter */
   691  	self.call_c(_F_quote)             // CALL  quote
   692  	self.Emit("ADDQ", _VAR_dn, _RL)   // ADDQ  dn, RL
   693  	self.Emit("TESTQ", _AX, _AX)      // TESTQ AX, AX
   694  	self.Sjmp("JS", "_str_space_{n}") // JS    _str_space_{n}
   695  
   696  	/* close the string, check for double quote */
   697  	if !doubleQuote {
   698  		self.check_size(1)               // SIZE $1
   699  		self.add_char('"')               // CHAR $'"'
   700  		self.Sjmp("JMP", "_str_end_{n}") // JMP  _str_end_{n}
   701  	} else {
   702  		self.check_size(3)               // SIZE $3
   703  		self.add_text("\\\"\"")          // TEXT $'\""'
   704  		self.Sjmp("JMP", "_str_end_{n}") // JMP  _str_end_{n}
   705  	}
   706  
   707  	/* not enough space to contain the quoted string */
   708  	self.Link("_str_space_{n}")                     // _str_space_{n}:
   709  	self.Emit("NOTQ", _AX)                          // NOTQ AX
   710  	self.Emit("ADDQ", _AX, _VAR_sp)                 // ADDQ AX, sp
   711  	self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX) // LEAQ (RC)(RC), AX
   712  	self.slice_grow_ax("_str_loop_{n}")             // GROW _str_loop_{n}
   713  
   714  	/* empty string, check for double quote */
   715  	if !doubleQuote {
   716  		self.Link("_str_empty_{n}") // _str_empty_{n}:
   717  		self.check_size(2)          // SIZE $2
   718  		self.add_text("\"\"")       // TEXT $'""'
   719  		self.Link("_str_end_{n}")   // _str_end_{n}:
   720  	} else {
   721  		self.Link("_str_empty_{n}")   // _str_empty_{n}:
   722  		self.check_size(6)            // SIZE $6
   723  		self.add_text("\"\\\"\\\"\"") // TEXT $'"\"\""'
   724  		self.Link("_str_end_{n}")     // _str_end_{n}:
   725  	}
   726  }
   727  
   728  /** OpCode Assembler Functions **/
   729  
   730  var (
   731  	_T_json_Marshaler         = rt.UnpackType(jsonMarshalerType)
   732  	_T_encoding_TextMarshaler = rt.UnpackType(encodingTextMarshalerType)
   733  )
   734  
   735  var (
   736  	_F_f64toa    = jit.Imm(int64(native.S_f64toa))
   737  	_F_f32toa    = jit.Imm(int64(native.S_f32toa))
   738  	_F_i64toa    = jit.Imm(int64(native.S_i64toa))
   739  	_F_u64toa    = jit.Imm(int64(native.S_u64toa))
   740  	_F_b64encode = jit.Imm(int64(_subr__b64encode))
   741  )
   742  
   743  var (
   744  	_F_memmove       = jit.Func(memmove)
   745  	_F_error_number  = jit.Func(error_number)
   746  	_F_isValidNumber = jit.Func(isValidNumber)
   747  )
   748  
   749  var (
   750  	_F_iteratorStop  = jit.Func(iteratorStop)
   751  	_F_iteratorNext  = jit.Func(iteratorNext)
   752  	_F_iteratorStart = jit.Func(iteratorStart)
   753  )
   754  
   755  var (
   756  	_F_encodeTypedPointer  obj.Addr
   757  	_F_encodeJsonMarshaler obj.Addr
   758  	_F_encodeTextMarshaler obj.Addr
   759  )
   760  
   761  const (
   762  	_MODE_AVX2 = 1 << 2
   763  )
   764  
   765  func init() {
   766  	_F_encodeTypedPointer = jit.Func(encodeTypedPointer)
   767  	_F_encodeJsonMarshaler = jit.Func(encodeJsonMarshaler)
   768  	_F_encodeTextMarshaler = jit.Func(encodeTextMarshaler)
   769  }
   770  
   771  func (self *_Assembler) _asm_OP_null(_ *_Instr) {
   772  	self.check_size(4)
   773  	self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1)
   774  	self.Emit("ADDQ", jit.Imm(4), _RL)                            // ADDQ $4, RL
   775  }
   776  
   777  func (self *_Assembler) _asm_OP_empty_arr(_ *_Instr) {
   778  	self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
   779  	self.Sjmp("JC", "_empty_arr_{n}")
   780  	self._asm_OP_null(nil)
   781  	self.Sjmp("JMP", "_empty_arr_end_{n}")
   782  	self.Link("_empty_arr_{n}")
   783  	self.check_size(2)
   784  	self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0))
   785  	self.Emit("ADDQ", jit.Imm(2), _RL)
   786  	self.Link("_empty_arr_end_{n}")
   787  }
   788  
   789  func (self *_Assembler) _asm_OP_empty_obj(_ *_Instr) {
   790  	self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
   791  	self.Sjmp("JC", "_empty_obj_{n}")
   792  	self._asm_OP_null(nil)
   793  	self.Sjmp("JMP", "_empty_obj_end_{n}")
   794  	self.Link("_empty_obj_{n}")
   795  	self.check_size(2)
   796  	self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0))
   797  	self.Emit("ADDQ", jit.Imm(2), _RL)
   798  	self.Link("_empty_obj_end_{n}")
   799  }
   800  
   801  func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
   802  	self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0))              // CMPB (SP.p), $0
   803  	self.Sjmp("JE", "_false_{n}")                                 // JE   _false_{n}
   804  	self.check_size(4)                                            // SIZE $4
   805  	self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'true', (RP)(RL*1)
   806  	self.Emit("ADDQ", jit.Imm(4), _RL)                            // ADDQ $4, RL
   807  	self.Sjmp("JMP", "_end_{n}")                                  // JMP  _end_{n}
   808  	self.Link("_false_{n}")                                       // _false_{n}:
   809  	self.check_size(5)                                            // SIZE $5
   810  	self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'fals', (RP)(RL*1)
   811  	self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4))      // MOVB $'e', 4(RP)(RL*1)
   812  	self.Emit("ADDQ", jit.Imm(5), _RL)                            // ADDQ $5, RL
   813  	self.Link("_end_{n}")                                         // _end_{n}:
   814  }
   815  
   816  func (self *_Assembler) _asm_OP_i8(_ *_Instr) {
   817  	self.store_int(4, _F_i64toa, "MOVBQSX")
   818  }
   819  
   820  func (self *_Assembler) _asm_OP_i16(_ *_Instr) {
   821  	self.store_int(6, _F_i64toa, "MOVWQSX")
   822  }
   823  
   824  func (self *_Assembler) _asm_OP_i32(_ *_Instr) {
   825  	self.store_int(17, _F_i64toa, "MOVLQSX")
   826  }
   827  
   828  func (self *_Assembler) _asm_OP_i64(_ *_Instr) {
   829  	self.store_int(21, _F_i64toa, "MOVQ")
   830  }
   831  
   832  func (self *_Assembler) _asm_OP_u8(_ *_Instr) {
   833  	self.store_int(3, _F_u64toa, "MOVBQZX")
   834  }
   835  
   836  func (self *_Assembler) _asm_OP_u16(_ *_Instr) {
   837  	self.store_int(5, _F_u64toa, "MOVWQZX")
   838  }
   839  
   840  func (self *_Assembler) _asm_OP_u32(_ *_Instr) {
   841  	self.store_int(16, _F_u64toa, "MOVLQZX")
   842  }
   843  
   844  func (self *_Assembler) _asm_OP_u64(_ *_Instr) {
   845  	self.store_int(20, _F_u64toa, "MOVQ")
   846  }
   847  
   848  func (self *_Assembler) _asm_OP_f32(_ *_Instr) {
   849  	self.check_size(32)
   850  	self.Emit("MOVL", jit.Ptr(_SP_p, 0), _AX)  // MOVL     (SP.p), AX
   851  	self.Emit("ANDL", jit.Imm(_FM_exp32), _AX) // ANDL     $_FM_exp32, AX
   852  	self.Emit("XORL", jit.Imm(_FM_exp32), _AX) // XORL     $_FM_exp32, AX
   853  	self.Sjmp("JZ", _LB_error_nan_or_infinite) // JZ       _error_nan_or_infinite
   854  	self.save_c()                              // SAVE     $C_regs
   855  	self.rbuf_di()                             // MOVQ     RP, DI
   856  	self.Emit("MOVSS", jit.Ptr(_SP_p, 0), _X0) // MOVSS    (SP.p), X0
   857  	self.call_c(_F_f32toa)                     // CALL_C   f64toa
   858  	self.Emit("ADDQ", _AX, _RL)                // ADDQ     AX, RL
   859  }
   860  
   861  func (self *_Assembler) _asm_OP_f64(_ *_Instr) {
   862  	self.check_size(32)
   863  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX)  // MOVQ   (SP.p), AX
   864  	self.Emit("MOVQ", jit.Imm(_FM_exp64), _CX) // MOVQ   $_FM_exp64, CX
   865  	self.Emit("ANDQ", _CX, _AX)                // ANDQ   CX, AX
   866  	self.Emit("XORQ", _CX, _AX)                // XORQ   CX, AX
   867  	self.Sjmp("JZ", _LB_error_nan_or_infinite) // JZ     _error_nan_or_infinite
   868  	self.save_c()                              // SAVE   $C_regs
   869  	self.rbuf_di()                             // MOVQ   RP, DI
   870  	self.Emit("MOVSD", jit.Ptr(_SP_p, 0), _X0) // MOVSD  (SP.p), X0
   871  	self.call_c(_F_f64toa)                     // CALL_C f64toa
   872  	self.Emit("ADDQ", _AX, _RL)                // ADDQ   AX, RL
   873  }
   874  
   875  func (self *_Assembler) _asm_OP_str(_ *_Instr) {
   876  	self.encode_string(false)
   877  }
   878  
   879  func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
   880  	self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX)       // MOVQ 8(SP.p), AX
   881  	self.Emit("ADDQ", jit.Imm(2), _AX)              // ADDQ $2, AX
   882  	self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX)       // MOVQ $_MF_mulv, CX
   883  	self.Emit("MOVQ", _DX, _R8)                     // MOVQ DX, R8
   884  	self.From("MULQ", _CX)                          // MULQ CX
   885  	self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX) // LEAQ 1(DX)(DX), AX
   886  	self.Emit("ORQ", jit.Imm(2), _AX)               // ORQ  $2, AX
   887  	self.Emit("MOVQ", _R8, _DX)                     // MOVQ R8, DX
   888  	self.check_size_r(_AX, 0)                       // SIZE AX
   889  	self.add_char('"')                              // CHAR $'"'
   890  	self.save_c()                                   // SAVE $REG_ffi
   891  	self.prep_buffer_c()                            // MOVE {buf}, DI
   892  	self.Emit("MOVQ", _SP_p, _SI)                   // MOVQ SP.p, SI
   893  
   894  	/* check for AVX2 support */
   895  	if !cpu.HasAVX2 {
   896  		self.Emit("XORL", _DX, _DX) // XORL DX, DX
   897  	} else {
   898  		self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX) // MOVL $_MODE_AVX2, DX
   899  	}
   900  
   901  	/* call the encoder */
   902  	self.call_c(_F_b64encode) // CALL b64encode
   903  	self.load_buffer()        // LOAD {buf}
   904  	self.add_char('"')        // CHAR $'"'
   905  }
   906  
   907  func (self *_Assembler) _asm_OP_quote(_ *_Instr) {
   908  	self.encode_string(true)
   909  }
   910  
   911  func (self *_Assembler) _asm_OP_number(_ *_Instr) {
   912  	self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _CX) // MOVQ    (SP.p), CX
   913  	self.Emit("TESTQ", _CX, _CX)              // TESTQ   CX, CX
   914  	self.Sjmp("JZ", "_empty_{n}")             // JZ      _empty_{n}
   915  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ    (SP.p), AX
   916  	self.Emit("TESTQ", _AX, _AX)              // TESTQ   AX, AX
   917  	self.Sjmp("JNZ", "_number_next_{n}")
   918  	self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), jit.Ptr(_SP, 0))
   919  	self.Sjmp("JMP", _LB_panic)
   920  	self.Link("_number_next_{n}")
   921  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))         // MOVQ    AX, (SP)
   922  	self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8))         // MOVQ    CX, 8(SP)
   923  	self.call_go(_F_isValidNumber)                  // CALL_GO isValidNumber
   924  	self.Emit("CMPB", jit.Ptr(_SP, 16), jit.Imm(0)) // CMPB    16(SP), $0
   925  	self.Sjmp("JE", _LB_error_invalid_number)       // JE      _error_invalid_number
   926  	self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX)       // MOVQ    8(SP.p), AX
   927  	self.check_size_r(_AX, 0)                       // SIZE    AX
   928  	self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _AX) // LEAQ    (RP)(RL), AX
   929  	self.Emit("ADDQ", jit.Ptr(_SP_p, 8), _RL)       // ADDQ    8(SP.p), RL
   930  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))         // MOVQ    AX, (SP)
   931  	self.Emit("MOVOU", jit.Ptr(_SP_p, 0), _X0)      // MOVOU   (SP.p), X0
   932  	self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8))        // MOVOU   X0, 8(SP)
   933  	self.call_go(_F_memmove)                        // CALL_GO memmove
   934  	self.Sjmp("JMP", "_done_{n}")                   // JMP     _done_{n}
   935  	self.Link("_empty_{n}")                         // _empty_{n}:
   936  	self.check_size(1)                              // SIZE    $1
   937  	self.add_char('0')                              // CHAR    $'0'
   938  	self.Link("_done_{n}")                          // _done_{n}:
   939  }
   940  
   941  func (self *_Assembler) _asm_OP_eface(_ *_Instr) {
   942  	self.prep_buffer()                        // MOVE  {buf}, (SP)s
   943  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ  (SP.p), AX
   944  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))   // MOVQ  AX, 8(SP)
   945  	self.Emit("LEAQ", jit.Ptr(_SP_p, 8), _AX) // LEAQ  8(SP.p), AX
   946  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))  // MOVQ  AX, 16(SP)
   947  	self.Emit("MOVQ", _ST, jit.Ptr(_SP, 24))  // MOVQ  ST, 24(SP)
   948  	self.Emit("MOVQ", _ARG_fv, _AX)           // MOVQ  fv, AX
   949  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 32))  // MOVQ  AX, 32(SP)
   950  	self.call_encoder(_F_encodeTypedPointer)  // CALL  encodeTypedPointer
   951  	self.Emit("MOVQ", jit.Ptr(_SP, 40), _ET)  // MOVQ  40(SP), ET
   952  	self.Emit("MOVQ", jit.Ptr(_SP, 48), _EP)  // MOVQ  48(SP), EP
   953  	self.Emit("TESTQ", _ET, _ET)              // TESTQ ET, ET
   954  	self.Sjmp("JNZ", _LB_error)               // JNZ   _error
   955  }
   956  
   957  func (self *_Assembler) _asm_OP_iface(_ *_Instr) {
   958  	self.prep_buffer()                        // MOVE  {buf}, (SP)
   959  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ  (SP.p), AX
   960  	self.Emit("MOVQ", jit.Ptr(_AX, 8), _AX)   // MOVQ  8(AX), AX
   961  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))   // MOVQ  AX, 8(SP)
   962  	self.Emit("LEAQ", jit.Ptr(_SP_p, 8), _AX) // LEAQ  8(SP.p), AX
   963  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))  // MOVQ  AX, 16(SP)
   964  	self.Emit("MOVQ", _ST, jit.Ptr(_SP, 24))  // MOVQ  ST, 24(SP)
   965  	self.Emit("MOVQ", _ARG_fv, _AX)           // MOVQ  fv, AX
   966  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 32))  // MOVQ  AX, 32(SP)
   967  	self.call_encoder(_F_encodeTypedPointer)  // CALL  encodeTypedPointer
   968  	self.Emit("MOVQ", jit.Ptr(_SP, 40), _ET)  // MOVQ  40(SP), ET
   969  	self.Emit("MOVQ", jit.Ptr(_SP, 48), _EP)  // MOVQ  48(SP), EP
   970  	self.Emit("TESTQ", _ET, _ET)              // TESTQ ET, ET
   971  	self.Sjmp("JNZ", _LB_error)               // JNZ   _error
   972  }
   973  
   974  func (self *_Assembler) _asm_OP_byte(p *_Instr) {
   975  	self.check_size(1)
   976  	self.Emit("MOVB", jit.Imm(p.i64()), jit.Sib(_RP, _RL, 1, 0)) // MOVL p.vi(), (RP)(RL*1)
   977  	self.Emit("ADDQ", jit.Imm(1), _RL)                           // ADDQ $1, RL
   978  }
   979  
   980  func (self *_Assembler) _asm_OP_text(p *_Instr) {
   981  	self.check_size(len(p.vs())) // SIZE ${len(p.vs())}
   982  	self.add_text(p.vs())        // TEXT ${p.vs()}
   983  }
   984  
   985  func (self *_Assembler) _asm_OP_deref(_ *_Instr) {
   986  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p
   987  }
   988  
   989  func (self *_Assembler) _asm_OP_index(p *_Instr) {
   990  	self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ $p.vi(), AX
   991  	self.Emit("ADDQ", _AX, _SP_p)            // ADDQ AX, SP.p
   992  }
   993  
   994  func (self *_Assembler) _asm_OP_load(_ *_Instr) {
   995  	self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)             // MOVQ (ST), AX
   996  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x) // MOVQ -24(ST)(AX), SP.x
   997  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p)  // MOVQ -8(ST)(AX), SP.p
   998  	self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q)   // MOVQ (ST)(AX), SP.q
   999  }
  1000  
  1001  func (self *_Assembler) _asm_OP_save(_ *_Instr) {
  1002  	self.save_state()
  1003  }
  1004  
  1005  func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
  1006  	self.drop_state(_StateSize)
  1007  }
  1008  
  1009  func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
  1010  	self.drop_state(_StateSize * 2)                   // DROP  $(_StateSize * 2)
  1011  	self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56)) // MOVOU X0, 56(ST)(AX)
  1012  }
  1013  
  1014  func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
  1015  	self.prep_buffer() // MOVE {buf}, (SP)
  1016  	vt, pv := p.vp()
  1017  	self.Emit("MOVQ", jit.Type(vt), _AX)    // MOVQ $(type(p.vt())), AX
  1018  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
  1019  
  1020  	/* check for indirection */
  1021  	if !rt.UnpackType(vt).Indirect() {
  1022  		self.Emit("MOVQ", _SP_p, _AX) // MOVQ SP.p, AX
  1023  	} else {
  1024  		self.Emit("MOVQ", _SP_p, _VAR_vp) // MOVQ SP.p, 48(SP)
  1025  		self.Emit("LEAQ", _VAR_vp, _AX)   // LEAQ 48(SP), AX
  1026  	}
  1027  
  1028  	/* call the encoder */
  1029  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ  AX, 16(SP)
  1030  	self.Emit("MOVQ", _ST, jit.Ptr(_SP, 24)) // MOVQ  ST, 24(SP)
  1031  	self.Emit("MOVQ", _ARG_fv, _AX)          // MOVQ  fv, AX
  1032  	if pv {
  1033  		self.Emit("BTCQ", jit.Imm(bitPointerValue), _AX) // BTCQ $1, AX
  1034  	}
  1035  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 32)) // MOVQ  AX, 32(SP)
  1036  	self.call_encoder(_F_encodeTypedPointer) // CALL  encodeTypedPointer
  1037  	self.Emit("MOVQ", jit.Ptr(_SP, 40), _ET) // MOVQ  40(SP), ET
  1038  	self.Emit("MOVQ", jit.Ptr(_SP, 48), _EP) // MOVQ  48(SP), EP
  1039  	self.Emit("TESTQ", _ET, _ET)             // TESTQ ET, ET
  1040  	self.Sjmp("JNZ", _LB_error)              // JNZ   _error
  1041  }
  1042  
  1043  func (self *_Assembler) _asm_OP_is_nil(p *_Instr) {
  1044  	self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0
  1045  	self.Xjmp("JE", p.vi())                          // JE   p.vi()
  1046  }
  1047  
  1048  func (self *_Assembler) _asm_OP_is_nil_p1(p *_Instr) {
  1049  	self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0)) // CMPQ 8(SP.p), $0
  1050  	self.Xjmp("JE", p.vi())                          // JE   p.vi()
  1051  }
  1052  
  1053  func (self *_Assembler) _asm_OP_is_zero_1(p *_Instr) {
  1054  	self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0
  1055  	self.Xjmp("JE", p.vi())                          // JE   p.vi()
  1056  }
  1057  
  1058  func (self *_Assembler) _asm_OP_is_zero_2(p *_Instr) {
  1059  	self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPW (SP.p), $0
  1060  	self.Xjmp("JE", p.vi())                          // JE   p.vi()
  1061  }
  1062  
  1063  func (self *_Assembler) _asm_OP_is_zero_4(p *_Instr) {
  1064  	self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPL (SP.p), $0
  1065  	self.Xjmp("JE", p.vi())                          // JE   p.vi()
  1066  }
  1067  
  1068  func (self *_Assembler) _asm_OP_is_zero_8(p *_Instr) {
  1069  	self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0
  1070  	self.Xjmp("JE", p.vi())                          // JE   p.vi()
  1071  }
  1072  
  1073  func (self *_Assembler) _asm_OP_is_zero_map(p *_Instr) {
  1074  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX)      // MOVQ  (SP.p), AX
  1075  	self.Emit("TESTQ", _AX, _AX)                   // TESTQ AX, AX
  1076  	self.Xjmp("JZ", p.vi())                        // JZ    p.vi()
  1077  	self.Emit("CMPQ", jit.Ptr(_AX, 0), jit.Imm(0)) // CMPQ  (AX), $0
  1078  	self.Xjmp("JE", p.vi())                        // JE    p.vi()
  1079  }
  1080  
  1081  func (self *_Assembler) _asm_OP_goto(p *_Instr) {
  1082  	self.Xjmp("JMP", p.vi())
  1083  }
  1084  
  1085  func (self *_Assembler) _asm_OP_map_iter(p *_Instr) {
  1086  	self.Emit("MOVQ", jit.Type(p.vt()), _AX)   // MOVQ    $p.vt(), AX
  1087  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _CX)  // MOVQ    (SP.p), CX
  1088  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))    // MOVQ    AX, (SP)
  1089  	self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8))    // MOVQ    CX, 8(SP)
  1090  	self.Emit("MOVQ", _ARG_fv, _AX)            // MOVQ    fv, AX
  1091  	self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))   // MOVQ    AX, 16(SP)
  1092  	self.call_go(_F_iteratorStart)             // CALL_GO iteratorStart
  1093  	self.Emit("MOVQ", jit.Ptr(_SP, 24), _SP_q) // MOVQ    24(SP), SP.q
  1094  	self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET)   // MOVQ    32(SP), ET
  1095  	self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP)   // MOVQ    40(SP), EP
  1096  	self.Emit("TESTQ", _ET, _ET)               // TESTQ   ET, ET
  1097  	self.Sjmp("JNZ", _LB_error)                // JNZ     _error
  1098  }
  1099  
  1100  func (self *_Assembler) _asm_OP_map_stop(_ *_Instr) {
  1101  	self.Emit("MOVQ", _SP_q, jit.Ptr(_SP, 0)) // MOVQ    SP.q, 0(SP)
  1102  	self.call_go(_F_iteratorStop)             // CALL_GO iteratorStop
  1103  	self.Emit("XORL", _SP_q, _SP_q)           // XORL    SP.q, SP.q
  1104  }
  1105  
  1106  func (self *_Assembler) _asm_OP_map_check_key(p *_Instr) {
  1107  	self.Emit("MOVQ", jit.Ptr(_SP_q, 0), _SP_p) // MOVQ    (SP.q), SP.p
  1108  	self.Emit("TESTQ", _SP_p, _SP_p)            // TESTQ   SP.p, SP.p
  1109  	self.Xjmp("JZ", p.vi())                     // JNZ     p.vi()
  1110  }
  1111  
  1112  func (self *_Assembler) _asm_OP_map_write_key(p *_Instr) {
  1113  	self.Emit("BTQ", jit.Imm(bitSortMapKeys), _ARG_fv) // BTQ ${SortMapKeys}, fv
  1114  	self.Sjmp("JNC", "_unordered_key_{n}")             // JNC _unordered_key_{n}
  1115  	self.encode_string(false)                          // STR $false
  1116  	self.Xjmp("JMP", p.vi())                           // JMP ${p.vi()}
  1117  	self.Link("_unordered_key_{n}")                    // _unordered_key_{n}:
  1118  }
  1119  
  1120  func (self *_Assembler) _asm_OP_map_value_next(_ *_Instr) {
  1121  	self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p) // MOVQ    8(SP.q), SP.p
  1122  	self.Emit("MOVQ", _SP_q, jit.Ptr(_SP, 0))   // MOVQ    SP.q, (SP)
  1123  	self.call_go(_F_iteratorNext)               // CALL_GO iteratorNext
  1124  }
  1125  
  1126  func (self *_Assembler) _asm_OP_slice_len(_ *_Instr) {
  1127  	self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SP_x)  // MOVQ  8(SP.p), SP.x
  1128  	self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p)  // MOVQ  (SP.p), SP.p
  1129  	self.Emit("ORQ", jit.Imm(1<<_S_init), _SP_f) // ORQ   $(1<<_S_init), SP.f
  1130  }
  1131  
  1132  func (self *_Assembler) _asm_OP_slice_next(p *_Instr) {
  1133  	self.Emit("TESTQ", _SP_x, _SP_x)                        // TESTQ   SP.x, SP.x
  1134  	self.Xjmp("JZ", p.vi())                                 // JZ      p.vi()
  1135  	self.Emit("SUBQ", jit.Imm(1), _SP_x)                    // SUBQ    $1, SP.x
  1136  	self.Emit("BTRQ", jit.Imm(_S_init), _SP_f)              // BTRQ    $_S_init, SP.f
  1137  	self.Emit("LEAQ", jit.Ptr(_SP_p, int64(p.vlen())), _AX) // LEAQ    $(p.vlen())(SP.p), AX
  1138  	self.Emit("CMOVQCC", _AX, _SP_p)                        // CMOVQNC AX, SP.p
  1139  }
  1140  
  1141  func (self *_Assembler) _asm_OP_marshal(p *_Instr) {
  1142  	self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt())
  1143  }
  1144  
  1145  func (self *_Assembler) _asm_OP_marshal_p(p *_Instr) {
  1146  	if p.vk() != reflect.Ptr {
  1147  		panic("marshal_p: invalid type")
  1148  	} else {
  1149  		self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt(), false)
  1150  	}
  1151  }
  1152  
  1153  func (self *_Assembler) _asm_OP_marshal_text(p *_Instr) {
  1154  	self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt())
  1155  }
  1156  
  1157  func (self *_Assembler) _asm_OP_marshal_text_p(p *_Instr) {
  1158  	if p.vk() != reflect.Ptr {
  1159  		panic("marshal_text_p: invalid type")
  1160  	} else {
  1161  		self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt(), false)
  1162  	}
  1163  }
  1164  
  1165  func (self *_Assembler) _asm_OP_cond_set(_ *_Instr) {
  1166  	self.Emit("ORQ", jit.Imm(1<<_S_cond), _SP_f) // ORQ $(1<<_S_cond), SP.f
  1167  }
  1168  
  1169  func (self *_Assembler) _asm_OP_cond_testc(p *_Instr) {
  1170  	self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f) // BTRQ $_S_cond, SP.f
  1171  	self.Xjmp("JC", p.vi())
  1172  }
  1173  
  1174  func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
  1175  	self.Emit("MOVQ", jit.Imm(int64(p2.op())), jit.Ptr(_SP, 16)) // MOVQ $(p2.op()), 16(SP)
  1176  	self.Emit("MOVQ", jit.Imm(int64(p1.op())), jit.Ptr(_SP, 8))  // MOVQ $(p1.op()), 8(SP)
  1177  	self.Emit("MOVQ", jit.Imm(int64(i)), jit.Ptr(_SP, 0))        // MOVQ $(i), (SP)
  1178  	self.call_go(_F_println)
  1179  }