github.com/bytedance/sonic@v1.11.7-0.20240517092252-d2edb31b167b/internal/encoder/assembler_stkabi_amd64.go (about)

     1  // +build go1.16,!go1.17
     2  
     3  /*
     4   * Copyright 2021 ByteDance Inc.
     5   *
     6   * Licensed under the Apache License, Version 2.0 (the "License");
     7   * you may not use this file except in compliance with the License.
     8   * You may obtain a copy of the License at
     9   *
    10   *     http://www.apache.org/licenses/LICENSE-2.0
    11   *
    12   * Unless required by applicable law or agreed to in writing, software
    13   * distributed under the License is distributed on an "AS IS" BASIS,
    14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    15   * See the License for the specific language governing permissions and
    16   * limitations under the License.
    17   */
    18  
    19  package encoder
    20  
    21  import (
    22      `fmt`
    23      `reflect`
    24      `strconv`
    25      `unsafe`
    26  
    27      `github.com/bytedance/sonic/internal/cpu`
    28      `github.com/bytedance/sonic/internal/jit`
    29      `github.com/bytedance/sonic/internal/native/types`
    30      `github.com/twitchyliquid64/golang-asm/obj`
    31      `github.com/twitchyliquid64/golang-asm/obj/x86`
    32  
    33      `github.com/bytedance/sonic/internal/native`
    34      `github.com/bytedance/sonic/internal/rt`
    35  )
    36  
    37  /** Register Allocations
    38   *
    39   *  State Registers:
    40   *
    41   *      %rbx : stack base
    42   *      %rdi : result pointer
    43   *      %rsi : result length
    44   *      %rdx : result capacity
    45   *      %r12 : sp->p
    46   *      %r13 : sp->q
    47   *      %r14 : sp->x
    48   *      %r15 : sp->f
    49   *
    50   *  Error Registers:
    51   *
    52   *      %r10 : error type register
    53   *      %r11 : error pointer register
    54   */
    55  
    56  /** Function Prototype & Stack Map
    57   *
    58   *  func (buf *[]byte, p unsafe.Pointer, sb *_Stack, fv uint64) (err error)
    59   *
    60   *  buf    :   (FP)
    61   *  p      :  8(FP)
    62   *  sb     : 16(FP)
    63   *  fv     : 24(FP)
    64   *  err.vt : 32(FP)
    65   *  err.vp : 40(FP)
    66   */
    67  
    68  const (
    69      _S_cond = iota
    70      _S_init
    71  )
    72  
    73  const (
    74      _FP_args   = 48     // 48 bytes for passing arguments to this function
    75      _FP_fargs  = 64     // 64 bytes for passing arguments to other Go functions
    76      _FP_saves  = 64     // 64 bytes for saving the registers before CALL instructions
    77      _FP_locals = 24     // 24 bytes for local variables
    78  )
    79  
    80  const (
    81      _FP_offs = _FP_fargs + _FP_saves + _FP_locals
    82      _FP_size = _FP_offs + 8     // 8 bytes for the parent frame pointer
    83      _FP_base = _FP_size + 8     // 8 bytes for the return address
    84  )
    85  
    86  const (
    87      _FM_exp32 = 0x7f800000
    88      _FM_exp64 = 0x7ff0000000000000
    89  )
    90  
    91  const (
    92      _IM_null   = 0x6c6c756e           // 'null'
    93      _IM_true   = 0x65757274           // 'true'
    94      _IM_fals   = 0x736c6166           // 'fals' ('false' without the 'e')
    95      _IM_open   = 0x00225c22           // '"\"∅'
    96      _IM_array  = 0x5d5b               // '[]'
    97      _IM_object = 0x7d7b               // '{}'
    98      _IM_mulv   = -0x5555555555555555
    99  )
   100  
   101  const (
   102      _LB_more_space        = "_more_space"
   103      _LB_more_space_return = "_more_space_return_"
   104  )
   105  
   106  const (
   107      _LB_error                 = "_error"
   108      _LB_error_too_deep        = "_error_too_deep"
   109      _LB_error_invalid_number  = "_error_invalid_number"
   110      _LB_error_nan_or_infinite = "_error_nan_or_infinite"
   111      _LB_panic = "_panic"
   112  )
   113  
   114  var (
   115      _AX = jit.Reg("AX")
   116      _CX = jit.Reg("CX")
   117      _DX = jit.Reg("DX")
   118      _DI = jit.Reg("DI")
   119      _SI = jit.Reg("SI")
   120      _BP = jit.Reg("BP")
   121      _SP = jit.Reg("SP")
   122      _R8 = jit.Reg("R8")
   123  )
   124  
   125  var (
   126      _X0 = jit.Reg("X0")
   127      _Y0 = jit.Reg("Y0")
   128  )
   129  
   130  var (
   131      _ST = jit.Reg("BX")
   132      _RP = jit.Reg("DI")
   133      _RL = jit.Reg("SI")
   134      _RC = jit.Reg("DX")
   135  )
   136  
   137  var (
   138      _LR  = jit.Reg("R9")
   139      _R10 = jit.Reg("R10")   // used for gcWriterBarrier
   140      _ET  = jit.Reg("R10")
   141      _EP  = jit.Reg("R11")
   142  )
   143  
   144  var (
   145      _SP_p = jit.Reg("R12")
   146      _SP_q = jit.Reg("R13")
   147      _SP_x = jit.Reg("R14")
   148      _SP_f = jit.Reg("R15")
   149  )
   150  
   151  var (
   152      _ARG_rb = jit.Ptr(_SP, _FP_base)
   153      _ARG_vp = jit.Ptr(_SP, _FP_base + 8)
   154      _ARG_sb = jit.Ptr(_SP, _FP_base + 16)
   155      _ARG_fv = jit.Ptr(_SP, _FP_base + 24)
   156  )
   157  
   158  var (
   159      _RET_et = jit.Ptr(_SP, _FP_base + 32)
   160      _RET_ep = jit.Ptr(_SP, _FP_base + 40)
   161  )
   162  
   163  var (
   164      _VAR_sp = jit.Ptr(_SP, _FP_fargs + _FP_saves)
   165      _VAR_dn = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
   166      _VAR_vp = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
   167  )
   168  
   169  var (
   170      _REG_ffi = []obj.Addr{_RP, _RL, _RC}
   171      _REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL}
   172      _REG_jsr = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR}
   173      _REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC}
   174  )
   175  
   176  type _Assembler struct {
   177      jit.BaseAssembler
   178      p _Program
   179      x int
   180      name string
   181  }
   182  
   183  func newAssembler(p _Program) *_Assembler {
   184      return new(_Assembler).Init(p)
   185  }
   186  
   187  /** Assembler Interface **/
   188  func (self *_Assembler) Load() _Encoder {
   189      return ptoenc(self.BaseAssembler.Load("encode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
   190  }
   191  
   192  func (self *_Assembler) Init(p _Program) *_Assembler {
   193      self.p = p
   194      self.BaseAssembler.Init(self.compile)
   195      return self
   196  }
   197  
   198  func (self *_Assembler) compile() {
   199      self.prologue()
   200      self.instrs()
   201      self.epilogue()
   202      self.builtins()
   203  }
   204  
   205  /** Assembler Stages **/
   206  
   207  var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
   208      _OP_null           : (*_Assembler)._asm_OP_null,
   209      _OP_empty_arr      : (*_Assembler)._asm_OP_empty_arr,
   210      _OP_empty_obj      : (*_Assembler)._asm_OP_empty_obj, 
   211      _OP_bool           : (*_Assembler)._asm_OP_bool,
   212      _OP_i8             : (*_Assembler)._asm_OP_i8,
   213      _OP_i16            : (*_Assembler)._asm_OP_i16,
   214      _OP_i32            : (*_Assembler)._asm_OP_i32,
   215      _OP_i64            : (*_Assembler)._asm_OP_i64,
   216      _OP_u8             : (*_Assembler)._asm_OP_u8,
   217      _OP_u16            : (*_Assembler)._asm_OP_u16,
   218      _OP_u32            : (*_Assembler)._asm_OP_u32,
   219      _OP_u64            : (*_Assembler)._asm_OP_u64,
   220      _OP_f32            : (*_Assembler)._asm_OP_f32,
   221      _OP_f64            : (*_Assembler)._asm_OP_f64,
   222      _OP_str            : (*_Assembler)._asm_OP_str,
   223      _OP_bin            : (*_Assembler)._asm_OP_bin,
   224      _OP_quote          : (*_Assembler)._asm_OP_quote,
   225      _OP_number         : (*_Assembler)._asm_OP_number,
   226      _OP_eface          : (*_Assembler)._asm_OP_eface,
   227      _OP_iface          : (*_Assembler)._asm_OP_iface,
   228      _OP_byte           : (*_Assembler)._asm_OP_byte,
   229      _OP_text           : (*_Assembler)._asm_OP_text,
   230      _OP_deref          : (*_Assembler)._asm_OP_deref,
   231      _OP_index          : (*_Assembler)._asm_OP_index,
   232      _OP_load           : (*_Assembler)._asm_OP_load,
   233      _OP_save           : (*_Assembler)._asm_OP_save,
   234      _OP_drop           : (*_Assembler)._asm_OP_drop,
   235      _OP_drop_2         : (*_Assembler)._asm_OP_drop_2,
   236      _OP_recurse        : (*_Assembler)._asm_OP_recurse,
   237      _OP_is_nil         : (*_Assembler)._asm_OP_is_nil,
   238      _OP_is_nil_p1      : (*_Assembler)._asm_OP_is_nil_p1,
   239      _OP_is_zero_1      : (*_Assembler)._asm_OP_is_zero_1,
   240      _OP_is_zero_2      : (*_Assembler)._asm_OP_is_zero_2,
   241      _OP_is_zero_4      : (*_Assembler)._asm_OP_is_zero_4,
   242      _OP_is_zero_8      : (*_Assembler)._asm_OP_is_zero_8,
   243      _OP_is_zero_map    : (*_Assembler)._asm_OP_is_zero_map,
   244      _OP_goto           : (*_Assembler)._asm_OP_goto,
   245      _OP_map_iter       : (*_Assembler)._asm_OP_map_iter,
   246      _OP_map_stop       : (*_Assembler)._asm_OP_map_stop,
   247      _OP_map_check_key  : (*_Assembler)._asm_OP_map_check_key,
   248      _OP_map_write_key  : (*_Assembler)._asm_OP_map_write_key,
   249      _OP_map_value_next : (*_Assembler)._asm_OP_map_value_next,
   250      _OP_slice_len      : (*_Assembler)._asm_OP_slice_len,
   251      _OP_slice_next     : (*_Assembler)._asm_OP_slice_next,
   252      _OP_marshal        : (*_Assembler)._asm_OP_marshal,
   253      _OP_marshal_p      : (*_Assembler)._asm_OP_marshal_p,
   254      _OP_marshal_text   : (*_Assembler)._asm_OP_marshal_text,
   255      _OP_marshal_text_p : (*_Assembler)._asm_OP_marshal_text_p,
   256      _OP_cond_set       : (*_Assembler)._asm_OP_cond_set,
   257      _OP_cond_testc     : (*_Assembler)._asm_OP_cond_testc,
   258  }
   259  
   260  func (self *_Assembler) instr(v *_Instr) {
   261      if fn := _OpFuncTab[v.op()]; fn != nil {
   262          fn(self, v)
   263      } else {
   264          panic(fmt.Sprintf("invalid opcode: %d", v.op()))
   265      }
   266  }
   267  
   268  func (self *_Assembler) instrs() {
   269      for i, v := range self.p {
   270          self.Mark(i)
   271          self.instr(&v)
   272          self.debug_instr(i, &v)
   273      }
   274  }
   275  
   276  func (self *_Assembler) builtins() {
   277      self.more_space()
   278      self.error_too_deep()
   279      self.error_invalid_number()
   280      self.error_nan_or_infinite()
   281      self.go_panic()
   282  }
   283  
   284  func (self *_Assembler) epilogue() {
   285      self.Mark(len(self.p))
   286      self.Emit("XORL", _ET, _ET)
   287      self.Emit("XORL", _EP, _EP)
   288      self.Link(_LB_error)
   289      self.Emit("MOVQ", _ARG_rb, _AX)                 // MOVQ rb<>+0(FP), AX
   290      self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8))         // MOVQ RL, 8(AX)
   291      self.Emit("MOVQ", _ET, _RET_et)                 // MOVQ ET, et<>+24(FP)
   292      self.Emit("MOVQ", _EP, _RET_ep)                 // MOVQ EP, ep<>+32(FP)
   293      self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP)  // MOVQ _FP_offs(SP), BP
   294      self.Emit("ADDQ", jit.Imm(_FP_size), _SP)       // ADDQ $_FP_size, SP
   295      self.Emit("RET")                                // RET
   296  }
   297  
   298  func (self *_Assembler) prologue() {
   299      self.Emit("SUBQ", jit.Imm(_FP_size), _SP)       // SUBQ $_FP_size, SP
   300      self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs))  // MOVQ BP, _FP_offs(SP)
   301      self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP)  // LEAQ _FP_offs(SP), BP
   302      self.load_buffer()                              // LOAD {buf}
   303      self.Emit("MOVQ", _ARG_vp, _SP_p)               // MOVQ vp<>+8(FP), SP.p
   304      self.Emit("MOVQ", _ARG_sb, _ST)                 // MOVQ sb<>+16(FP), ST
   305      self.Emit("XORL", _SP_x, _SP_x)                 // XORL SP.x, SP.x
   306      self.Emit("XORL", _SP_f, _SP_f)                 // XORL SP.f, SP.f
   307      self.Emit("XORL", _SP_q, _SP_q)                 // XORL SP.q, SP.q
   308  }
   309  
   310  /** Assembler Inline Functions **/
   311  
   312  func (self *_Assembler) xsave(reg ...obj.Addr) {
   313      for i, v := range reg {
   314          if i > _FP_saves / 8 - 1 {
   315              panic("too many registers to save")
   316          } else {
   317              self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
   318          }
   319      }
   320  }
   321  
   322  func (self *_Assembler) xload(reg ...obj.Addr) {
   323      for i, v := range reg {
   324          if i > _FP_saves / 8 - 1 {
   325              panic("too many registers to load")
   326          } else {
   327              self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
   328          }
   329      }
   330  }
   331  
   332  func (self *_Assembler) rbuf_di() {
   333      if _RP.Reg != x86.REG_DI {
   334          panic("register allocation messed up: RP != DI")
   335      } else {
   336          self.Emit("ADDQ", _RL, _RP)
   337      }
   338  }
   339  
   340  func (self *_Assembler) store_int(nd int, fn obj.Addr, ins string) {
   341      self.check_size(nd)
   342      self.save_c()                           // SAVE   $C_regs
   343      self.rbuf_di()                          // MOVQ   RP, DI
   344      self.Emit(ins, jit.Ptr(_SP_p, 0), _SI)  // $ins   (SP.p), SI
   345      self.call_c(fn)                         // CALL_C $fn
   346      self.Emit("ADDQ", _AX, _RL)             // ADDQ   AX, RL
   347  }
   348  
   349  func (self *_Assembler) store_str(s string) {
   350      i := 0
   351      m := rt.Str2Mem(s)
   352  
   353      /* 8-byte stores */
   354      for i <= len(m) - 8 {
   355          self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX)        // MOVQ $s[i:], AX
   356          self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i)))  // MOVQ AX, i(RP)(RL)
   357          i += 8
   358      }
   359  
   360      /* 4-byte stores */
   361      if i <= len(m) - 4 {
   362          self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i)))  // MOVL $s[i:], i(RP)(RL)
   363          i += 4
   364      }
   365  
   366      /* 2-byte stores */
   367      if i <= len(m) - 2 {
   368          self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i)))  // MOVW $s[i:], i(RP)(RL)
   369          i += 2
   370      }
   371  
   372      /* last byte */
   373      if i < len(m) {
   374          self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i)))     // MOVB $s[i:], i(RP)(RL)
   375      }
   376  }
   377  
   378  func (self *_Assembler) check_size(n int) {
   379      self.check_size_rl(jit.Ptr(_RL, int64(n)))
   380  }
   381  
   382  func (self *_Assembler) check_size_r(r obj.Addr, d int) {
   383      self.check_size_rl(jit.Sib(_RL, r, 1, int64(d)))
   384  }
   385  
   386  func (self *_Assembler) check_size_rl(v obj.Addr) {
   387      idx := self.x
   388      key := _LB_more_space_return + strconv.Itoa(idx)
   389  
   390      /* the following code relies on LR == R9 to work */
   391      if _LR.Reg != x86.REG_R9 {
   392          panic("register allocation messed up: LR != R9")
   393      }
   394  
   395      /* check for buffer capacity */
   396      self.x++
   397      self.Emit("LEAQ", v, _AX)       // LEAQ $v, AX
   398      self.Emit("CMPQ", _AX, _RC)     // CMPQ AX, RC
   399      self.Sjmp("JBE" , key)          // JBE  _more_space_return_{n}
   400      self.slice_grow_ax(key)         // GROW $key
   401      self.Link(key)                  // _more_space_return_{n}:
   402  }
   403  
   404  func (self *_Assembler) slice_grow_ax(ret string) {
   405      self.Byte(0x4c, 0x8d, 0x0d)         // LEAQ ?(PC), R9
   406      self.Sref(ret, 4)                   // .... &ret
   407      self.Sjmp("JMP" , _LB_more_space)   // JMP  _more_space
   408  }
   409  
   410  /** State Stack Helpers **/
   411  
   412  const (
   413      _StateSize  = int64(unsafe.Sizeof(_State{}))
   414      _StackLimit = _MaxStack * _StateSize
   415  )
   416  
   417  func (self *_Assembler) save_state() {
   418      self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX)             // MOVQ (ST), CX
   419      self.Emit("LEAQ", jit.Ptr(_CX, _StateSize), _R8)    // LEAQ _StateSize(CX), R8
   420      self.Emit("CMPQ", _R8, jit.Imm(_StackLimit))        // CMPQ R8, $_StackLimit
   421      self.Sjmp("JAE" , _LB_error_too_deep)               // JA   _error_too_deep
   422      self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8))   // MOVQ SP.x, 8(ST)(CX)
   423      self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16))  // MOVQ SP.f, 16(ST)(CX)
   424      self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24)) // MOVQ SP.p, 24(ST)(CX)
   425      self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32)) // MOVQ SP.q, 32(ST)(CX)
   426      self.Emit("MOVQ", _R8, jit.Ptr(_ST, 0))             // MOVQ R8, (ST)
   427  }
   428  
   429  func (self *_Assembler) drop_state(decr int64) {
   430      self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX)                // MOVQ  (ST), AX
   431      self.Emit("SUBQ" , jit.Imm(decr), _AX)                  // SUBQ  $decr, AX
   432      self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0))                // MOVQ  AX, (ST)
   433      self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _SP_x)      // MOVQ  8(ST)(AX), SP.x
   434      self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 16), _SP_f)     // MOVQ  16(ST)(AX), SP.f
   435      self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 24), _SP_p)     // MOVQ  24(ST)(AX), SP.p
   436      self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 32), _SP_q)     // MOVQ  32(ST)(AX), SP.q
   437      self.Emit("PXOR" , _X0, _X0)                            // PXOR  X0, X0
   438      self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8))        // MOVOU X0, 8(ST)(AX)
   439      self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24))       // MOVOU X0, 24(ST)(AX)
   440  }
   441  
   442  /** Buffer Helpers **/
   443  
   444  func (self *_Assembler) add_char(ch byte) {
   445      self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0))  // MOVB $ch, (RP)(RL)
   446      self.Emit("ADDQ", jit.Imm(1), _RL)                              // ADDQ $1, RL
   447  }
   448  
   449  func (self *_Assembler) add_long(ch uint32, n int64) {
   450      self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0))  // MOVL $ch, (RP)(RL)
   451      self.Emit("ADDQ", jit.Imm(n), _RL)                              // ADDQ $n, RL
   452  }
   453  
   454  func (self *_Assembler) add_text(ss string) {
   455      self.store_str(ss)                                  // TEXT $ss
   456      self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL)     // ADDQ ${len(ss)}, RL
   457  }
   458  
   459  func (self *_Assembler) prep_buffer() {
   460      self.Emit("MOVQ", _ARG_rb, _AX)             // MOVQ rb<>+0(FP), AX
   461      self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8))     // MOVQ RL, 8(AX)
   462      self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))     // MOVQ AX, (SP)
   463  }
   464  
   465  func (self *_Assembler) prep_buffer_c() {
   466      self.Emit("MOVQ", _ARG_rb, _DI)             // MOVQ rb<>+0(FP), DI
   467      self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8))     // MOVQ RL, 8(DI)
   468  }
   469  
   470  func (self *_Assembler) save_buffer() {
   471      self.Emit("MOVQ", _ARG_rb, _CX)             // MOVQ rb<>+0(FP), CX
   472      self.Emit("MOVQ", _RP, jit.Ptr(_CX,  0))    // MOVQ RP, (CX)
   473      self.Emit("MOVQ", _RL, jit.Ptr(_CX,  8))    // MOVQ RL, 8(CX)
   474      self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16))    // MOVQ RC, 16(CX)
   475  }
   476  
   477  func (self *_Assembler) load_buffer() {
   478      self.Emit("MOVQ", _ARG_rb, _AX)             // MOVQ rb<>+0(FP), AX
   479      self.Emit("MOVQ", jit.Ptr(_AX,  0), _RP)    // MOVQ (AX), RP
   480      self.Emit("MOVQ", jit.Ptr(_AX,  8), _RL)    // MOVQ 8(AX), RL
   481      self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC)    // MOVQ 16(AX), RC
   482  }
   483  
   484  /** Function Interface Helpers **/
   485  
   486  func (self *_Assembler) call(pc obj.Addr) {
   487      self.Emit("MOVQ", pc, _AX)  // MOVQ $pc, AX
   488      self.Rjmp("CALL", _AX)      // CALL AX
   489  }
   490  
   491  func (self *_Assembler) save_c() {
   492      self.xsave(_REG_ffi...)     // SAVE $REG_ffi
   493  }
   494  
   495  func (self *_Assembler) call_c(pc obj.Addr) {
   496      self.call(pc)               // CALL $pc
   497      self.xload(_REG_ffi...)     // LOAD $REG_ffi
   498  }
   499  
   500  func (self *_Assembler) call_go(pc obj.Addr) {
   501      self.xsave(_REG_all...)     // SAVE $REG_all
   502      self.call(pc)               // CALL $pc
   503      self.xload(_REG_all...)     // LOAD $REG_all
   504  }
   505  
   506  func (self *_Assembler) call_encoder(pc obj.Addr) {
   507      self.xsave(_REG_enc...)     // SAVE $REG_enc
   508      self.call(pc)               // CALL $pc
   509      self.xload(_REG_enc...)     // LOAD $REG_enc
   510      self.load_buffer()          // LOAD {buf}
   511  }
   512  
   513  func (self *_Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) {
   514      switch vt.Kind() {
   515          case reflect.Interface       : self.call_marshaler_i(fn, it)
   516          case reflect.Ptr, reflect.Map: self.call_marshaler_v(fn, it, vt, true)
   517          // struct/array of 1 direct iface type can be direct
   518          default                      : self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect())
   519      }
   520  }
   521  
   522  func (self *_Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) {
   523      self.Emit("MOVQ" , jit.Gtype(it), _AX)                          // MOVQ    $it, AX
   524      self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))                        // MOVQ    AX, (SP)
   525      self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)                      // MOVQ    (SP.p), AX
   526      self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX)                      // MOVQ    8(SP.p), CX
   527      self.Emit("TESTQ", _AX, _AX)                                    // TESTQ   AX, AX
   528      self.Sjmp("JZ"   , "_null_{n}")                                 // JZ      _null_{n}
   529      self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8))                        // MOVQ    AX, 8(SP)
   530      self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 16))                       // MOVQ    CX, 16(SP)
   531      self.call_go(_F_assertI2I)                                      // CALL_GO assertI2I
   532      self.prep_buffer()                                              // MOVE    {buf}, (SP)
   533      self.Emit("MOVOU", jit.Ptr(_SP, 24), _X0)                       // MOVOU   24(SP), X0
   534      self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8))                        // MOVOU   X0, 8(SP)
   535      self.Emit("MOVQ", _ARG_fv,  _CX)                                // MOVQ   ARG.fv, CX
   536      self.Emit("MOVQ", _CX,  jit.Ptr(_SP, 24))                       // MOVQ   CX, 24(SP)
   537      self.call_encoder(fn)                                           // CALL    $fn
   538      self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET)                       // MOVQ    32(SP), ET
   539      self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP)                       // MOVQ    40(SP), EP
   540      self.Emit("TESTQ", _ET, _ET)                                    // TESTQ   ET, ET
   541      self.Sjmp("JNZ"  , _LB_error)                                   // JNZ     _error
   542      self.Sjmp("JMP"  , "_done_{n}")                                 // JMP     _done_{n}
   543      self.Link("_null_{n}")                                          // _null_{n}:
   544      self.check_size(4)                                              // SIZE    $4
   545      self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0))   // MOVL    $'null', (RP)(RL*1)
   546      self.Emit("ADDQ", jit.Imm(4), _RL)                              // ADDQ    $4, RL
   547      self.Link("_done_{n}")                                          // _done_{n}:
   548  }
   549  
   550  func (self *_Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) {
   551      self.prep_buffer()                          // MOVE {buf}, (SP)
   552      self.Emit("MOVQ", jit.Itab(it, vt), _AX)    // MOVQ $(itab(it, vt)), AX
   553      self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))     // MOVQ AX, 8(SP)
   554  
   555      /* dereference the pointer if needed */
   556      if !deref {
   557          self.Emit("MOVQ", _SP_p, jit.Ptr(_SP, 16))  // MOVQ SP.p, 16(SP)
   558      } else {
   559          self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX)   // MOVQ (SP.p), AX
   560          self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))    // MOVQ AX, 16(SP)
   561      }
   562  
   563      /* call the encoder, and perform error checks */
   564      self.Emit("MOVQ", _ARG_fv,  _CX)            // MOVQ   ARG.fv, CX
   565      self.Emit("MOVQ", _CX,  jit.Ptr(_SP, 24))   // MOVQ   CX, 24(SP)
   566      self.call_encoder(fn)                       // CALL  $fn
   567      self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET)   // MOVQ  32(SP), ET
   568      self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP)   // MOVQ  40(SP), EP
   569      self.Emit("TESTQ", _ET, _ET)                // TESTQ ET, ET
   570      self.Sjmp("JNZ"  , _LB_error)               // JNZ   _error
   571  }
   572  
   573  /** Builtin: _more_space **/
   574  
   575  var (
   576      _T_byte      = jit.Type(byteType)
   577      _F_growslice = jit.Func(rt.GrowSlice)
   578  )
   579  
   580  func (self *_Assembler) more_space() {
   581      self.Link(_LB_more_space)
   582      self.Emit("MOVQ", _T_byte, _AX)                 // MOVQ $_T_byte, _AX
   583      self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))         // MOVQ _AX, (SP)
   584      self.Emit("MOVQ", _RP, jit.Ptr(_SP, 8))         // MOVQ RP, 8(SP)
   585      self.Emit("MOVQ", _RL, jit.Ptr(_SP, 16))        // MOVQ RL, 16(SP)
   586      self.Emit("MOVQ", _RC, jit.Ptr(_SP, 24))        // MOVQ RC, 24(SP)
   587      self.Emit("MOVQ", _AX, jit.Ptr(_SP, 32))        // MOVQ AX, 32(SP)
   588      self.xsave(_REG_jsr...)                         // SAVE $REG_jsr
   589      self.call(_F_growslice)                         // CALL $pc
   590      self.xload(_REG_jsr...)                         // LOAD $REG_jsr
   591      self.Emit("MOVQ", jit.Ptr(_SP, 40), _RP)        // MOVQ 40(SP), RP
   592      self.Emit("MOVQ", jit.Ptr(_SP, 48), _RL)        // MOVQ 48(SP), RL
   593      self.Emit("MOVQ", jit.Ptr(_SP, 56), _RC)        // MOVQ 56(SP), RC
   594      self.save_buffer()                              // SAVE {buf}
   595      self.Rjmp("JMP" , _LR)                          // JMP  LR
   596  }
   597  
   598  /** Builtin Errors **/
   599  
   600  var (
   601      _V_ERR_too_deep               = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_too_deep))))
   602      _V_ERR_nan_or_infinite        = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_nan_or_infinite))))
   603      _I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(errorType), jsonUnsupportedValueType)
   604  )
   605  
   606  func (self *_Assembler) error_too_deep() {
   607      self.Link(_LB_error_too_deep)
   608      self.Emit("MOVQ", _V_ERR_too_deep, _EP)                 // MOVQ $_V_ERR_too_deep, EP
   609      self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET)   // MOVQ $_I_json_UnsupportedValuError, ET
   610      self.Sjmp("JMP" , _LB_error)                            // JMP  _error
   611  }
   612  
   613  func (self *_Assembler) error_invalid_number() {
   614      self.Link(_LB_error_invalid_number)
   615      self.call_go(_F_error_number)               // CALL_GO error_number
   616      self.Emit("MOVQ", jit.Ptr(_SP, 16), _ET)    // MOVQ    16(SP), ET
   617      self.Emit("MOVQ", jit.Ptr(_SP, 24), _EP)    // MOVQ    24(SP), EP
   618      self.Sjmp("JMP" , _LB_error)                // JMP     _error
   619  }
   620  
   621  func (self *_Assembler) error_nan_or_infinite()  {
   622      self.Link(_LB_error_nan_or_infinite)
   623      self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP)          // MOVQ $_V_ERR_nan_or_infinite, EP
   624      self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET)   // MOVQ $_I_json_UnsupportedValuError, ET
   625      self.Sjmp("JMP" , _LB_error)                            // JMP  _error
   626  }
   627  
   628  /** String Encoding Routine **/
   629  
   630  var (
   631      _F_quote = jit.Imm(int64(native.S_quote))
   632      _F_panic = jit.Func(goPanic)
   633  )
   634  
   635  func (self *_Assembler) go_panic() {
   636      self.Link(_LB_panic)
   637      self.Emit("MOVQ", _SP_p, jit.Ptr(_SP, 8))
   638      self.call_go(_F_panic)
   639  }
   640  
   641  func (self *_Assembler) encode_string(doubleQuote bool) {
   642      self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX)  // MOVQ  8(SP.p), AX
   643      self.Emit("TESTQ", _AX, _AX)                // TESTQ AX, AX
   644      self.Sjmp("JZ"   , "_str_empty_{n}")        // JZ    _str_empty_{n}
   645      self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
   646      self.Sjmp("JNE"   , "_str_next_{n}") 
   647      self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), jit.Ptr(_SP, 0))
   648      self.Sjmp("JMP", _LB_panic)
   649      self.Link("_str_next_{n}")
   650  
   651      /* openning quote, check for double quote */
   652      if !doubleQuote {
   653          self.check_size_r(_AX, 2)   // SIZE $2
   654          self.add_char('"')          // CHAR $'"'
   655      } else {
   656          self.check_size_r(_AX, 6)   // SIZE $6
   657          self.add_long(_IM_open, 3)  // TEXT $`"\"`
   658      }
   659  
   660      /* quoting loop */
   661      self.Emit("XORL", _AX, _AX)         // XORL AX, AX
   662      self.Emit("MOVQ", _AX, _VAR_sp)     // MOVQ AX, sp
   663      self.Link("_str_loop_{n}")          // _str_loop_{n}:
   664      self.save_c()                       // SAVE $REG_ffi
   665  
   666      /* load the output buffer first, and then input buffer,
   667       * because the parameter registers collide with RP / RL / RC */
   668      self.Emit("MOVQ", _RC, _CX)                         // MOVQ RC, CX
   669      self.Emit("SUBQ", _RL, _CX)                         // SUBQ RL, CX
   670      self.Emit("MOVQ", _CX, _VAR_dn)                     // MOVQ CX, dn
   671      self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX)     // LEAQ (RP)(RL), DX
   672      self.Emit("LEAQ", _VAR_dn, _CX)                     // LEAQ dn, CX
   673      self.Emit("MOVQ", _VAR_sp, _AX)                     // MOVQ sp, AX
   674      self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI)           // MOVQ (SP.p), DI
   675      self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI)           // MOVQ 8(SP.p), SI
   676      self.Emit("ADDQ", _AX, _DI)                         // ADDQ AX, DI
   677      self.Emit("SUBQ", _AX, _SI)                         // SUBQ AX, SI
   678  
   679      /* set the flags based on `doubleQuote` */
   680      if !doubleQuote {
   681          self.Emit("XORL", _R8, _R8)                                 // XORL R8, R8
   682      } else {
   683          self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8)     // MOVL ${types.F_DOUBLE_UNQUOTE}, R8
   684      }
   685  
   686      /* call the native quoter */
   687      self.call_c(_F_quote)                   // CALL  quote
   688      self.Emit("ADDQ" , _VAR_dn, _RL)        // ADDQ  dn, RL
   689      self.Emit("TESTQ", _AX, _AX)            // TESTQ AX, AX
   690      self.Sjmp("JS"   , "_str_space_{n}")    // JS    _str_space_{n}
   691  
   692      /* close the string, check for double quote */
   693      if !doubleQuote {
   694          self.check_size(1)                  // SIZE $1
   695          self.add_char('"')                  // CHAR $'"'
   696          self.Sjmp("JMP", "_str_end_{n}")    // JMP  _str_end_{n}
   697      } else {
   698          self.check_size(3)                  // SIZE $3
   699          self.add_text("\\\"\"")             // TEXT $'\""'
   700          self.Sjmp("JMP", "_str_end_{n}")    // JMP  _str_end_{n}
   701      }
   702  
   703      /* not enough space to contain the quoted string */
   704      self.Link("_str_space_{n}")                         // _str_space_{n}:
   705      self.Emit("NOTQ", _AX)                              // NOTQ AX
   706      self.Emit("ADDQ", _AX, _VAR_sp)                     // ADDQ AX, sp
   707      self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX)     // LEAQ (RC)(RC), AX
   708      self.slice_grow_ax("_str_loop_{n}")                 // GROW _str_loop_{n}
   709  
   710      /* empty string, check for double quote */
   711      if !doubleQuote {
   712          self.Link("_str_empty_{n}")     // _str_empty_{n}:
   713          self.check_size(2)              // SIZE $2
   714          self.add_text("\"\"")           // TEXT $'""'
   715          self.Link("_str_end_{n}")       // _str_end_{n}:
   716      } else {
   717          self.Link("_str_empty_{n}")     // _str_empty_{n}:
   718          self.check_size(6)              // SIZE $6
   719          self.add_text("\"\\\"\\\"\"")   // TEXT $'"\"\""'
   720          self.Link("_str_end_{n}")       // _str_end_{n}:
   721      }
   722  }
   723  
   724  /** OpCode Assembler Functions **/
   725  
   726  var (
   727      _T_json_Marshaler         = rt.UnpackType(jsonMarshalerType)
   728      _T_encoding_TextMarshaler = rt.UnpackType(encodingTextMarshalerType)
   729  )
   730  
   731  var (
   732      _F_f64toa    = jit.Imm(int64(native.S_f64toa))
   733      _F_f32toa    = jit.Imm(int64(native.S_f32toa))
   734      _F_i64toa    = jit.Imm(int64(native.S_i64toa))
   735      _F_u64toa    = jit.Imm(int64(native.S_u64toa))
   736      _F_b64encode = jit.Imm(int64(_subr__b64encode))
   737  )
   738  
   739  var (
   740      _F_memmove       = jit.Func(memmove)
   741      _F_error_number  = jit.Func(error_number)
   742      _F_isValidNumber = jit.Func(isValidNumber)
   743  )
   744  
   745  var (
   746      _F_iteratorStop  = jit.Func(iteratorStop)
   747      _F_iteratorNext  = jit.Func(iteratorNext)
   748      _F_iteratorStart = jit.Func(iteratorStart)
   749  )
   750  
   751  var (
   752      _F_encodeTypedPointer  obj.Addr
   753      _F_encodeJsonMarshaler obj.Addr
   754      _F_encodeTextMarshaler obj.Addr
   755  )
   756  
   757  const (
   758      _MODE_AVX2 = 1 << 2
   759  )
   760  
   761  func init() {
   762      _F_encodeTypedPointer  = jit.Func(encodeTypedPointer)
   763      _F_encodeJsonMarshaler = jit.Func(encodeJsonMarshaler)
   764      _F_encodeTextMarshaler = jit.Func(encodeTextMarshaler)
   765  }
   766  
   767  func (self *_Assembler) _asm_OP_null(_ *_Instr) {
   768      self.check_size(4)
   769      self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0))  // MOVL $'null', (RP)(RL*1)
   770      self.Emit("ADDQ", jit.Imm(4), _RL)                             // ADDQ $4, RL
   771  }
   772  
   773  func (self *_Assembler) _asm_OP_empty_arr(_ *_Instr) {
   774      self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
   775      self.Sjmp("JC", "_empty_arr_{n}")
   776      self._asm_OP_null(nil)
   777      self.Sjmp("JMP", "_empty_arr_end_{n}")
   778      self.Link("_empty_arr_{n}")
   779      self.check_size(2)
   780      self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0)) 
   781      self.Emit("ADDQ", jit.Imm(2), _RL)    
   782      self.Link("_empty_arr_end_{n}")                  
   783  }
   784  
   785  func (self *_Assembler) _asm_OP_empty_obj(_ *_Instr) {
   786      self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
   787      self.Sjmp("JC", "_empty_obj_{n}")
   788      self._asm_OP_null(nil)
   789      self.Sjmp("JMP", "_empty_obj_end_{n}")
   790      self.Link("_empty_obj_{n}")
   791      self.check_size(2)
   792      self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0))  
   793      self.Emit("ADDQ", jit.Imm(2), _RL) 
   794      self.Link("_empty_obj_end_{n}")                                             
   795  }
   796  
   797  func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
   798      self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0))                // CMPB (SP.p), $0
   799      self.Sjmp("JE"  , "_false_{n}")                                 // JE   _false_{n}
   800      self.check_size(4)                                              // SIZE $4
   801      self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0))   // MOVL $'true', (RP)(RL*1)
   802      self.Emit("ADDQ", jit.Imm(4), _RL)                              // ADDQ $4, RL
   803      self.Sjmp("JMP" , "_end_{n}")                                   // JMP  _end_{n}
   804      self.Link("_false_{n}")                                         // _false_{n}:
   805      self.check_size(5)                                              // SIZE $5
   806      self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0))   // MOVL $'fals', (RP)(RL*1)
   807      self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4))        // MOVB $'e', 4(RP)(RL*1)
   808      self.Emit("ADDQ", jit.Imm(5), _RL)                              // ADDQ $5, RL
   809      self.Link("_end_{n}")                                           // _end_{n}:
   810  }
   811  
   812  func (self *_Assembler) _asm_OP_i8(_ *_Instr) {
   813      self.store_int(4, _F_i64toa, "MOVBQSX")
   814  }
   815  
   816  func (self *_Assembler) _asm_OP_i16(_ *_Instr) {
   817      self.store_int(6, _F_i64toa, "MOVWQSX")
   818  }
   819  
   820  func (self *_Assembler) _asm_OP_i32(_ *_Instr) {
   821      self.store_int(17, _F_i64toa, "MOVLQSX")
   822  }
   823  
   824  func (self *_Assembler) _asm_OP_i64(_ *_Instr) {
   825      self.store_int(21, _F_i64toa, "MOVQ")
   826  }
   827  
   828  func (self *_Assembler) _asm_OP_u8(_ *_Instr) {
   829      self.store_int(3, _F_u64toa, "MOVBQZX")
   830  }
   831  
   832  func (self *_Assembler) _asm_OP_u16(_ *_Instr) {
   833      self.store_int(5, _F_u64toa, "MOVWQZX")
   834  }
   835  
   836  func (self *_Assembler) _asm_OP_u32(_ *_Instr) {
   837      self.store_int(16, _F_u64toa, "MOVLQZX")
   838  }
   839  
   840  func (self *_Assembler) _asm_OP_u64(_ *_Instr) {
   841      self.store_int(20, _F_u64toa, "MOVQ")
   842  }
   843  
   844  func (self *_Assembler) _asm_OP_f32(_ *_Instr) {
   845      self.check_size(32)
   846      self.Emit("MOVL"    , jit.Ptr(_SP_p, 0), _AX)       // MOVL     (SP.p), AX
   847      self.Emit("ANDL"    , jit.Imm(_FM_exp32), _AX)      // ANDL     $_FM_exp32, AX
   848      self.Emit("XORL"    , jit.Imm(_FM_exp32), _AX)      // XORL     $_FM_exp32, AX
   849      self.Sjmp("JZ"      , _LB_error_nan_or_infinite)    // JZ       _error_nan_or_infinite
   850      self.save_c()                                       // SAVE     $C_regs
   851      self.rbuf_di()                                      // MOVQ     RP, DI
   852      self.Emit("MOVSS"   , jit.Ptr(_SP_p, 0), _X0)       // MOVSS    (SP.p), X0
   853      self.call_c(_F_f32toa)                              // CALL_C   f64toa
   854      self.Emit("ADDQ"    , _AX, _RL)                     // ADDQ     AX, RL
   855  }
   856  
   857  func (self *_Assembler) _asm_OP_f64(_ *_Instr) {
   858      self.check_size(32)
   859      self.Emit("MOVQ"  , jit.Ptr(_SP_p, 0), _AX)     // MOVQ   (SP.p), AX
   860      self.Emit("MOVQ"  , jit.Imm(_FM_exp64), _CX)    // MOVQ   $_FM_exp64, CX
   861      self.Emit("ANDQ"  , _CX, _AX)                   // ANDQ   CX, AX
   862      self.Emit("XORQ"  , _CX, _AX)                   // XORQ   CX, AX
   863      self.Sjmp("JZ"    , _LB_error_nan_or_infinite)  // JZ     _error_nan_or_infinite
   864      self.save_c()                                   // SAVE   $C_regs
   865      self.rbuf_di()                                  // MOVQ   RP, DI
   866      self.Emit("MOVSD" , jit.Ptr(_SP_p, 0), _X0)     // MOVSD  (SP.p), X0
   867      self.call_c(_F_f64toa)                          // CALL_C f64toa
   868      self.Emit("ADDQ"  , _AX, _RL)                   // ADDQ   AX, RL
   869  }
   870  
   871  func (self *_Assembler) _asm_OP_str(_ *_Instr) {
   872      self.encode_string(false)
   873  }
   874  
   875  func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
   876      self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX)           // MOVQ 8(SP.p), AX
   877      self.Emit("ADDQ", jit.Imm(2), _AX)                  // ADDQ $2, AX
   878      self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX)           // MOVQ $_MF_mulv, CX
   879      self.Emit("MOVQ", _DX, _R8)                         // MOVQ DX, R8
   880      self.From("MULQ", _CX)                              // MULQ CX
   881      self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX)     // LEAQ 1(DX)(DX), AX
   882      self.Emit("ORQ" , jit.Imm(2), _AX)                  // ORQ  $2, AX
   883      self.Emit("MOVQ", _R8, _DX)                         // MOVQ R8, DX
   884      self.check_size_r(_AX, 0)                           // SIZE AX
   885      self.add_char('"')                                  // CHAR $'"'
   886      self.save_c()                                       // SAVE $REG_ffi
   887      self.prep_buffer_c()                                // MOVE {buf}, DI
   888      self.Emit("MOVQ", _SP_p, _SI)                       // MOVQ SP.p, SI
   889  
   890      /* check for AVX2 support */
   891      if !cpu.HasAVX2 {
   892          self.Emit("XORL", _DX, _DX)                     // XORL DX, DX
   893      } else {
   894          self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX)     // MOVL $_MODE_AVX2, DX
   895      }
   896  
   897      /* call the encoder */
   898      self.call_c(_F_b64encode)   // CALL b64encode
   899      self.load_buffer()          // LOAD {buf}
   900      self.add_char('"')          // CHAR $'"'
   901  }
   902  
   903  func (self *_Assembler) _asm_OP_quote(_ *_Instr) {
   904      self.encode_string(true)
   905  }
   906  
   907  func (self *_Assembler) _asm_OP_number(_ *_Instr) {
   908      self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX)          // MOVQ    (SP.p), CX
   909      self.Emit("TESTQ", _CX, _CX)                        // TESTQ   CX, CX
   910      self.Sjmp("JZ"   , "_empty_{n}")                    // JZ      _empty_{n}
   911      self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)          // MOVQ    (SP.p), AX
   912      self.Emit("TESTQ", _AX, _AX)                        // TESTQ   AX, AX
   913      self.Sjmp("JNZ"   , "_number_next_{n}") 
   914      self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), jit.Ptr(_SP, 0))
   915      self.Sjmp("JMP", _LB_panic)
   916      self.Link("_number_next_{n}")
   917      self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))            // MOVQ    AX, (SP)
   918      self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8))            // MOVQ    CX, 8(SP)
   919      self.call_go(_F_isValidNumber)                      // CALL_GO isValidNumber
   920      self.Emit("CMPB" , jit.Ptr(_SP, 16), jit.Imm(0))    // CMPB    16(SP), $0
   921      self.Sjmp("JE"   , _LB_error_invalid_number)        // JE      _error_invalid_number
   922      self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX)          // MOVQ    8(SP.p), AX
   923      self.check_size_r(_AX, 0)                           // SIZE    AX
   924      self.Emit("LEAQ" , jit.Sib(_RP, _RL, 1, 0), _AX)    // LEAQ    (RP)(RL), AX
   925      self.Emit("ADDQ" , jit.Ptr(_SP_p, 8), _RL)          // ADDQ    8(SP.p), RL
   926      self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))            // MOVQ    AX, (SP)
   927      self.Emit("MOVOU", jit.Ptr(_SP_p, 0), _X0)          // MOVOU   (SP.p), X0
   928      self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8))            // MOVOU   X0, 8(SP)
   929      self.call_go(_F_memmove)                            // CALL_GO memmove
   930      self.Sjmp("JMP"  , "_done_{n}")                     // JMP     _done_{n}
   931      self.Link("_empty_{n}")                             // _empty_{n}:
   932      self.check_size(1)                                  // SIZE    $1
   933      self.add_char('0')                                  // CHAR    $'0'
   934      self.Link("_done_{n}")                              // _done_{n}:
   935  }
   936  
   937  func (self *_Assembler) _asm_OP_eface(_ *_Instr) {
   938      self.prep_buffer()                          // MOVE  {buf}, (SP)s
   939      self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)  // MOVQ  (SP.p), AX
   940      self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8))    // MOVQ  AX, 8(SP)
   941      self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _AX)  // LEAQ  8(SP.p), AX
   942      self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16))   // MOVQ  AX, 16(SP)
   943      self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24))   // MOVQ  ST, 24(SP)
   944      self.Emit("MOVQ" , _ARG_fv, _AX)            // MOVQ  fv, AX
   945      self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32))   // MOVQ  AX, 32(SP)
   946      self.call_encoder(_F_encodeTypedPointer)    // CALL  encodeTypedPointer
   947      self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET)   // MOVQ  40(SP), ET
   948      self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP)   // MOVQ  48(SP), EP
   949      self.Emit("TESTQ", _ET, _ET)                // TESTQ ET, ET
   950      self.Sjmp("JNZ"  , _LB_error)               // JNZ   _error
   951  }
   952  
   953  func (self *_Assembler) _asm_OP_iface(_ *_Instr) {
   954      self.prep_buffer()                          // MOVE  {buf}, (SP)
   955      self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)  // MOVQ  (SP.p), AX
   956      self.Emit("MOVQ" , jit.Ptr(_AX, 8), _AX)    // MOVQ  8(AX), AX
   957      self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8))    // MOVQ  AX, 8(SP)
   958      self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _AX)  // LEAQ  8(SP.p), AX
   959      self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16))   // MOVQ  AX, 16(SP)
   960      self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24))   // MOVQ  ST, 24(SP)
   961      self.Emit("MOVQ" , _ARG_fv, _AX)            // MOVQ  fv, AX
   962      self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32))   // MOVQ  AX, 32(SP)
   963      self.call_encoder(_F_encodeTypedPointer)    // CALL  encodeTypedPointer
   964      self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET)   // MOVQ  40(SP), ET
   965      self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP)   // MOVQ  48(SP), EP
   966      self.Emit("TESTQ", _ET, _ET)                // TESTQ ET, ET
   967      self.Sjmp("JNZ"  , _LB_error)               // JNZ   _error
   968  }
   969  
   970  func (self *_Assembler) _asm_OP_byte(p *_Instr) {
   971      self.check_size(1)
   972      self.Emit("MOVB", jit.Imm(p.i64()), jit.Sib(_RP, _RL, 1, 0))    // MOVL p.vi(), (RP)(RL*1)
   973      self.Emit("ADDQ", jit.Imm(1), _RL)                              // ADDQ $1, RL
   974  }
   975  
   976  func (self *_Assembler) _asm_OP_text(p *_Instr) {
   977      self.check_size(len(p.vs()))    // SIZE ${len(p.vs())}
   978      self.add_text(p.vs())           // TEXT ${p.vs()}
   979  }
   980  
   981  func (self *_Assembler) _asm_OP_deref(_ *_Instr) {
   982      self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p)     // MOVQ (SP.p), SP.p
   983  }
   984  
   985  func (self *_Assembler) _asm_OP_index(p *_Instr) {
   986      self.Emit("MOVQ", jit.Imm(p.i64()), _AX)    // MOVQ $p.vi(), AX
   987      self.Emit("ADDQ", _AX, _SP_p)               // ADDQ AX, SP.p
   988  }
   989  
   990  func (self *_Assembler) _asm_OP_load(_ *_Instr) {
   991      self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)                 // MOVQ (ST), AX
   992      self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x)     // MOVQ -24(ST)(AX), SP.x
   993      self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p)      // MOVQ -8(ST)(AX), SP.p
   994      self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q)       // MOVQ (ST)(AX), SP.q
   995  }
   996  
   997  func (self *_Assembler) _asm_OP_save(_ *_Instr) {
   998      self.save_state()
   999  }
  1000  
  1001  func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
  1002      self.drop_state(_StateSize)
  1003  }
  1004  
  1005  func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
  1006      self.drop_state(_StateSize * 2)                     // DROP  $(_StateSize * 2)
  1007      self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56))   // MOVOU X0, 56(ST)(AX)
  1008  }
  1009  
  1010  func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
  1011      self.prep_buffer()                          // MOVE {buf}, (SP)
  1012      vt, pv := p.vp()
  1013      self.Emit("MOVQ", jit.Type(vt), _AX)    // MOVQ $(type(p.vt())), AX
  1014      self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))     // MOVQ AX, 8(SP)
  1015  
  1016      /* check for indirection */
  1017      if !rt.UnpackType(vt).Indirect() {
  1018          self.Emit("MOVQ", _SP_p, _AX)               // MOVQ SP.p, AX
  1019      } else {
  1020          self.Emit("MOVQ", _SP_p, _VAR_vp)  // MOVQ SP.p, 48(SP)
  1021          self.Emit("LEAQ", _VAR_vp, _AX)    // LEAQ 48(SP), AX
  1022      }
  1023  
  1024      /* call the encoder */
  1025      self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16))   // MOVQ  AX, 16(SP)
  1026      self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24))   // MOVQ  ST, 24(SP)
  1027      self.Emit("MOVQ" , _ARG_fv, _AX)            // MOVQ  fv, AX
  1028      if pv {
  1029          self.Emit("BTCQ", jit.Imm(bitPointerValue), _AX)  // BTCQ $1, AX
  1030      }
  1031      self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32))   // MOVQ  AX, 32(SP)
  1032      self.call_encoder(_F_encodeTypedPointer)    // CALL  encodeTypedPointer
  1033      self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET)   // MOVQ  40(SP), ET
  1034      self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP)   // MOVQ  48(SP), EP
  1035      self.Emit("TESTQ", _ET, _ET)                // TESTQ ET, ET
  1036      self.Sjmp("JNZ"  , _LB_error)               // JNZ   _error
  1037  }
  1038  
  1039  func (self *_Assembler) _asm_OP_is_nil(p *_Instr) {
  1040      self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))    // CMPQ (SP.p), $0
  1041      self.Xjmp("JE"  , p.vi())                           // JE   p.vi()
  1042  }
  1043  
  1044  func (self *_Assembler) _asm_OP_is_nil_p1(p *_Instr) {
  1045      self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0))    // CMPQ 8(SP.p), $0
  1046      self.Xjmp("JE"  , p.vi())                           // JE   p.vi()
  1047  }
  1048  
  1049  func (self *_Assembler) _asm_OP_is_zero_1(p *_Instr) {
  1050      self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0))    // CMPB (SP.p), $0
  1051      self.Xjmp("JE"  , p.vi())                           // JE   p.vi()
  1052  }
  1053  
  1054  func (self *_Assembler) _asm_OP_is_zero_2(p *_Instr) {
  1055      self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0))    // CMPW (SP.p), $0
  1056      self.Xjmp("JE"  , p.vi())                           // JE   p.vi()
  1057  }
  1058  
  1059  func (self *_Assembler) _asm_OP_is_zero_4(p *_Instr) {
  1060      self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0))    // CMPL (SP.p), $0
  1061      self.Xjmp("JE"  , p.vi())                           // JE   p.vi()
  1062  }
  1063  
  1064  func (self *_Assembler) _asm_OP_is_zero_8(p *_Instr) {
  1065      self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))    // CMPQ (SP.p), $0
  1066      self.Xjmp("JE"  , p.vi())                           // JE   p.vi()
  1067  }
  1068  
  1069  func (self *_Assembler) _asm_OP_is_zero_map(p *_Instr) {
  1070      self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)          // MOVQ  (SP.p), AX
  1071      self.Emit("TESTQ", _AX, _AX)                        // TESTQ AX, AX
  1072      self.Xjmp("JZ"   , p.vi())                          // JZ    p.vi()
  1073      self.Emit("CMPQ" , jit.Ptr(_AX, 0), jit.Imm(0))     // CMPQ  (AX), $0
  1074      self.Xjmp("JE"   , p.vi())                          // JE    p.vi()
  1075  }
  1076  
  1077  func (self *_Assembler) _asm_OP_goto(p *_Instr) {
  1078      self.Xjmp("JMP", p.vi())
  1079  }
  1080  
  1081  func (self *_Assembler) _asm_OP_map_iter(p *_Instr) {
  1082      self.Emit("MOVQ" , jit.Type(p.vt()), _AX)       // MOVQ    $p.vt(), AX
  1083      self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _CX)      // MOVQ    (SP.p), CX
  1084      self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))        // MOVQ    AX, (SP)
  1085      self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8))        // MOVQ    CX, 8(SP)
  1086      self.Emit("MOVQ" , _ARG_fv, _AX)                // MOVQ    fv, AX
  1087      self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16))       // MOVQ    AX, 16(SP)
  1088      self.call_go(_F_iteratorStart)                  // CALL_GO iteratorStart
  1089      self.Emit("MOVQ" , jit.Ptr(_SP, 24), _SP_q)     // MOVQ    24(SP), SP.q
  1090      self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET)       // MOVQ    32(SP), ET
  1091      self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP)       // MOVQ    40(SP), EP
  1092      self.Emit("TESTQ", _ET, _ET)                    // TESTQ   ET, ET
  1093      self.Sjmp("JNZ"  , _LB_error)                   // JNZ     _error
  1094  }
  1095  
  1096  func (self *_Assembler) _asm_OP_map_stop(_ *_Instr) {
  1097      self.Emit("MOVQ", _SP_q, jit.Ptr(_SP, 0))   // MOVQ    SP.q, 0(SP)
  1098      self.call_go(_F_iteratorStop)               // CALL_GO iteratorStop
  1099      self.Emit("XORL", _SP_q, _SP_q)             // XORL    SP.q, SP.q
  1100  }
  1101  
  1102  func (self *_Assembler) _asm_OP_map_check_key(p *_Instr) {
  1103      self.Emit("MOVQ" , jit.Ptr(_SP_q, 0), _SP_p)    // MOVQ    (SP.q), SP.p
  1104      self.Emit("TESTQ", _SP_p, _SP_p)                // TESTQ   SP.p, SP.p
  1105      self.Xjmp("JZ"   , p.vi())                      // JNZ     p.vi()
  1106  }
  1107  
  1108  func (self *_Assembler) _asm_OP_map_write_key(p *_Instr) {
  1109      self.Emit("BTQ", jit.Imm(bitSortMapKeys), _ARG_fv)      // BTQ ${SortMapKeys}, fv
  1110      self.Sjmp("JNC", "_unordered_key_{n}")                  // JNC _unordered_key_{n}
  1111      self.encode_string(false)                               // STR $false
  1112      self.Xjmp("JMP", p.vi())                                // JMP ${p.vi()}
  1113      self.Link("_unordered_key_{n}")                         // _unordered_key_{n}:
  1114  }
  1115  
  1116  func (self *_Assembler) _asm_OP_map_value_next(_ *_Instr) {
  1117      self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p)     // MOVQ    8(SP.q), SP.p
  1118      self.Emit("MOVQ", _SP_q, jit.Ptr(_SP, 0))       // MOVQ    SP.q, (SP)
  1119      self.call_go(_F_iteratorNext)                   // CALL_GO iteratorNext
  1120  }
  1121  
  1122  func (self *_Assembler) _asm_OP_slice_len(_ *_Instr) {
  1123      self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _SP_x)        // MOVQ  8(SP.p), SP.x
  1124      self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _SP_p)        // MOVQ  (SP.p), SP.p
  1125      self.Emit("ORQ"  , jit.Imm(1 << _S_init), _SP_f)    // ORQ   $(1<<_S_init), SP.f
  1126  }
  1127  
  1128  func (self *_Assembler) _asm_OP_slice_next(p *_Instr) {
  1129      self.Emit("TESTQ"  , _SP_x, _SP_x)                          // TESTQ   SP.x, SP.x
  1130      self.Xjmp("JZ"     , p.vi())                                // JZ      p.vi()
  1131      self.Emit("SUBQ"   , jit.Imm(1), _SP_x)                     // SUBQ    $1, SP.x
  1132      self.Emit("BTRQ"   , jit.Imm(_S_init), _SP_f)               // BTRQ    $_S_init, SP.f
  1133      self.Emit("LEAQ"   , jit.Ptr(_SP_p, int64(p.vlen())), _AX)  // LEAQ    $(p.vlen())(SP.p), AX
  1134      self.Emit("CMOVQCC", _AX, _SP_p)                            // CMOVQNC AX, SP.p
  1135  }
  1136  
  1137  func (self *_Assembler) _asm_OP_marshal(p *_Instr) {
  1138      self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt())
  1139  }
  1140  
  1141  func (self *_Assembler) _asm_OP_marshal_p(p *_Instr) {
  1142      if p.vk() != reflect.Ptr {
  1143          panic("marshal_p: invalid type")
  1144      } else {
  1145          self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt(), false)
  1146      }
  1147  }
  1148  
  1149  func (self *_Assembler) _asm_OP_marshal_text(p *_Instr) {
  1150      self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt())
  1151  }
  1152  
  1153  func (self *_Assembler) _asm_OP_marshal_text_p(p *_Instr) {
  1154      if p.vk() != reflect.Ptr {
  1155          panic("marshal_text_p: invalid type")
  1156      } else {
  1157          self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt(), false)
  1158      }
  1159  }
  1160  
  1161  func (self *_Assembler) _asm_OP_cond_set(_ *_Instr) {
  1162      self.Emit("ORQ", jit.Imm(1 << _S_cond), _SP_f)  // ORQ $(1<<_S_cond), SP.f
  1163  }
  1164  
  1165  func (self *_Assembler) _asm_OP_cond_testc(p *_Instr) {
  1166      self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f)      // BTRQ $_S_cond, SP.f
  1167      self.Xjmp("JC"  , p.vi())
  1168  }
  1169  
  1170  func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
  1171      self.Emit("MOVQ", jit.Imm(int64(p2.op())),  jit.Ptr(_SP, 16))// MOVQ $(p2.op()), 16(SP)
  1172      self.Emit("MOVQ", jit.Imm(int64(p1.op())),  jit.Ptr(_SP, 8)) // MOVQ $(p1.op()), 8(SP)
  1173      self.Emit("MOVQ", jit.Imm(int64(i)),  jit.Ptr(_SP, 0))       // MOVQ $(i), (SP)
  1174      self.call_go(_F_println)
  1175  }