github.com/cloudwego/frugal@v0.1.15/internal/atm/ssa/ir_amd64.go (about)

     1  /*
     2   * Copyright 2022 ByteDance Inc.
     3   *
     4   * Licensed under the Apache License, Version 2.0 (the "License");
     5   * you may not use this file except in compliance with the License.
     6   * You may obtain a copy of the License at
     7   *
     8   *     http://www.apache.org/licenses/LICENSE-2.0
     9   *
    10   * Unless required by applicable law or agreed to in writing, software
    11   * distributed under the License is distributed on an "AS IS" BASIS,
    12   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    13   * See the License for the specific language governing permissions and
    14   * limitations under the License.
    15   */
    16  
    17  package ssa
    18  
    19  import (
    20      `fmt`
    21      `unsafe`
    22  
    23      `github.com/cloudwego/iasm/x86_64`
    24      `github.com/cloudwego/frugal/internal/rt`
    25  )
    26  
    27  var ArchRegs = [...]x86_64.Register64 {
    28      x86_64.RAX,
    29      x86_64.RCX,
    30      x86_64.RDX,
    31      x86_64.RBX,
    32      x86_64.RSP,
    33      x86_64.RBP,
    34      x86_64.RSI,
    35      x86_64.RDI,
    36      x86_64.R8,
    37      x86_64.R9,
    38      x86_64.R10,
    39      x86_64.R11,
    40      x86_64.R12,
    41      x86_64.R13,
    42      x86_64.R14,
    43      x86_64.R15,
    44  }
    45  
    46  var ArchRegIds = map[x86_64.Register64]uint64 {
    47      x86_64.RAX : 0,
    48      x86_64.RCX : 1,
    49      x86_64.RDX : 2,
    50      x86_64.RBX : 3,
    51      x86_64.RSP : 4,
    52      x86_64.RBP : 5,
    53      x86_64.RSI : 6,
    54      x86_64.RDI : 7,
    55      x86_64.R8  : 8,
    56      x86_64.R9  : 9,
    57      x86_64.R10 : 10,
    58      x86_64.R11 : 11,
    59      x86_64.R12 : 12,
    60      x86_64.R13 : 13,
    61      x86_64.R14 : 14,
    62      x86_64.R15 : 15,
    63  }
    64  
    65  var ArchRegNames = map[x86_64.Register64]string {
    66      x86_64.RAX : "rax",
    67      x86_64.RCX : "rcx",
    68      x86_64.RDX : "rdx",
    69      x86_64.RBX : "rbx",
    70      x86_64.RSP : "rsp",
    71      x86_64.RBP : "rbp",
    72      x86_64.RSI : "rsi",
    73      x86_64.RDI : "rdi",
    74      x86_64.R8  : "r8",
    75      x86_64.R9  : "r9",
    76      x86_64.R10 : "r10",
    77      x86_64.R11 : "r11",
    78      x86_64.R12 : "r12",
    79      x86_64.R13 : "r13",
    80      x86_64.R14 : "r14",
    81      x86_64.R15 : "r15",
    82  }
    83  
    84  var ArchRegReserved = map[x86_64.Register64]bool {
    85      x86_64.RSP: true,
    86      x86_64.RBP: true,
    87  }
    88  
    89  func IrSetArch(rr Reg, reg x86_64.Register64) Reg {
    90      if id, ok := ArchRegIds[reg]; !ok {
    91          panic("invalid physical register: " + reg.String())
    92      } else if rr.Ptr() {
    93          return mkreg(1, K_arch, id).Derive(rr.Index())
    94      } else {
    95          return mkreg(0, K_arch, id).Derive(rr.Index())
    96      }
    97  }
    98  
    99  type Mem struct {
   100      M Reg
   101      I Reg
   102      S uint8
   103      D int32
   104  }
   105  
   106  func Ptr(r Reg, d int32) Mem {
   107      return Mem {
   108          M: r,
   109          I: Rz,
   110          S: 1,
   111          D: d,
   112      }
   113  }
   114  
   115  func (self Mem) String() string {
   116      if self.I.Kind() == K_zero {
   117          if self.D == 0 {
   118              return fmt.Sprintf("(%s)", self.M)
   119          } else {
   120              return fmt.Sprintf("%d(%s)", self.D, self.M)
   121          }
   122      } else if self.S == 1 {
   123          if self.D == 0 {
   124              return fmt.Sprintf("(%s,%s)", self.M, self.I)
   125          } else {
   126              return fmt.Sprintf("%d(%s,%s)", self.D, self.M, self.I)
   127          }
   128      } else {
   129          if self.D == 0 {
   130              return fmt.Sprintf("(%s,%s,%d)", self.M, self.I, self.S)
   131          } else {
   132              return fmt.Sprintf("%d(%s,%s,%d)", self.D, self.M, self.I, self.S)
   133          }
   134      }
   135  }
   136  
   137  type IrAMD64_MemOp interface {
   138      MemOp() *Mem
   139  }
   140  
   141  func (*IrAMD64_INT)    irnode() {}
   142  func (*IrAMD64_LEA)    irnode() {}
   143  func (*IrAMD64_NEG)    irnode() {}
   144  func (*IrAMD64_BSWAP)  irnode() {}
   145  func (*IrAMD64_MOVSLQ) irnode() {}
   146  
   147  func (*IrAMD64_MOV_abs)         irnode() {}
   148  func (*IrAMD64_MOV_ptr)         irnode() {}
   149  func (*IrAMD64_MOV_reg)         irnode() {}
   150  func (*IrAMD64_MOV_load)        irnode() {}
   151  func (*IrAMD64_MOV_store_r)     irnode() {}
   152  func (*IrAMD64_MOV_store_i)     irnode() {}
   153  func (*IrAMD64_MOV_store_p)     irnode() {}
   154  func (*IrAMD64_MOV_load_be)     irnode() {}
   155  func (*IrAMD64_MOV_store_be)    irnode() {}
   156  func (*IrAMD64_MOV_load_stack)  irnode() {}
   157  func (*IrAMD64_MOV_store_stack) irnode() {}
   158  
   159  func (*IrAMD64_BinOp_rr) irnode() {}
   160  func (*IrAMD64_BinOp_ri) irnode() {}
   161  func (*IrAMD64_BinOp_rm) irnode() {}
   162  
   163  func (*IrAMD64_BTSQ_rr) irnode() {}
   164  func (*IrAMD64_BTSQ_ri) irnode() {}
   165  
   166  func (*IrAMD64_CMPQ_rr) irnode() {}
   167  func (*IrAMD64_CMPQ_ri) irnode() {}
   168  func (*IrAMD64_CMPQ_rp) irnode() {}
   169  func (*IrAMD64_CMPQ_ir) irnode() {}
   170  func (*IrAMD64_CMPQ_pr) irnode() {}
   171  func (*IrAMD64_CMPQ_rm) irnode() {}
   172  func (*IrAMD64_CMPQ_mr) irnode() {}
   173  func (*IrAMD64_CMPQ_mi) irnode() {}
   174  func (*IrAMD64_CMPQ_mp) irnode() {}
   175  func (*IrAMD64_CMPQ_im) irnode() {}
   176  func (*IrAMD64_CMPQ_pm) irnode() {}
   177  
   178  func (*IrAMD64_CALL_reg)  irnode() {}
   179  func (*IrAMD64_CALL_mem)  irnode() {}
   180  func (*IrAMD64_CALL_gcwb) irnode() {}
   181  
   182  func (*IrAMD64_RET) irnode() {}
   183  func (*IrAMD64_JMP) irnode() {}
   184  func (*IrAMD64_JNC) irnode() {}
   185  
   186  func (*IrAMD64_Jcc_rr) irnode() {}
   187  func (*IrAMD64_Jcc_ri) irnode() {}
   188  func (*IrAMD64_Jcc_rp) irnode() {}
   189  func (*IrAMD64_Jcc_ir) irnode() {}
   190  func (*IrAMD64_Jcc_pr) irnode() {}
   191  func (*IrAMD64_Jcc_rm) irnode() {}
   192  func (*IrAMD64_Jcc_mr) irnode() {}
   193  func (*IrAMD64_Jcc_mi) irnode() {}
   194  func (*IrAMD64_Jcc_mp) irnode() {}
   195  func (*IrAMD64_Jcc_im) irnode() {}
   196  func (*IrAMD64_Jcc_pm) irnode() {}
   197  
   198  func (*IrAMD64_MOV_store_r)     irimpure() {}
   199  func (*IrAMD64_MOV_store_i)     irimpure() {}
   200  func (*IrAMD64_MOV_store_p)     irimpure() {}
   201  func (*IrAMD64_MOV_store_be)    irimpure() {}
   202  func (*IrAMD64_MOV_load_stack)  irimpure() {}
   203  func (*IrAMD64_MOV_store_stack) irimpure() {}
   204  
   205  func (*IrAMD64_CALL_reg)  irimpure() {}
   206  func (*IrAMD64_CALL_mem)  irimpure() {}
   207  func (*IrAMD64_CALL_gcwb) irimpure() {}
   208  
   209  func (*IrAMD64_MOV_load)        irimmovable() {}
   210  func (*IrAMD64_MOV_store_r)     irimmovable() {}
   211  func (*IrAMD64_MOV_store_i)     irimmovable() {}
   212  func (*IrAMD64_MOV_store_p)     irimmovable() {}
   213  func (*IrAMD64_MOV_load_be)     irimmovable() {}
   214  func (*IrAMD64_MOV_store_be)    irimmovable() {}
   215  func (*IrAMD64_MOV_load_stack)  irimmovable() {}
   216  func (*IrAMD64_MOV_store_stack) irimmovable() {}
   217  
   218  func (*IrAMD64_CMPQ_rm) irimmovable() {}
   219  func (*IrAMD64_CMPQ_mr) irimmovable() {}
   220  func (*IrAMD64_CMPQ_mi) irimmovable() {}
   221  func (*IrAMD64_CMPQ_mp) irimmovable() {}
   222  func (*IrAMD64_CMPQ_im) irimmovable() {}
   223  func (*IrAMD64_CMPQ_pm) irimmovable() {}
   224  
   225  func (*IrAMD64_CALL_reg)  irimmovable() {}
   226  func (*IrAMD64_CALL_mem)  irimmovable() {}
   227  func (*IrAMD64_CALL_gcwb) irimmovable() {}
   228  
   229  func (*IrAMD64_RET)    irterminator() {}
   230  func (*IrAMD64_JMP)    irterminator() {}
   231  func (*IrAMD64_JNC)    irterminator() {}
   232  
   233  func (*IrAMD64_Jcc_rr) irterminator() {}
   234  func (*IrAMD64_Jcc_ri) irterminator() {}
   235  func (*IrAMD64_Jcc_rp) irterminator() {}
   236  func (*IrAMD64_Jcc_ir) irterminator() {}
   237  func (*IrAMD64_Jcc_pr) irterminator() {}
   238  func (*IrAMD64_Jcc_rm) irterminator() {}
   239  func (*IrAMD64_Jcc_mr) irterminator() {}
   240  func (*IrAMD64_Jcc_mi) irterminator() {}
   241  func (*IrAMD64_Jcc_mp) irterminator() {}
   242  func (*IrAMD64_Jcc_im) irterminator() {}
   243  func (*IrAMD64_Jcc_pm) irterminator() {}
   244  
   245  type IrAMD64_INT struct {
   246      I uint8
   247  }
   248  
   249  func (self *IrAMD64_INT) Clone() IrNode {
   250      r := *self
   251      return &r
   252  }
   253  
   254  func (self *IrAMD64_INT) String() string {
   255      switch self.I {
   256          case 1  : return "int1"
   257          case 3  : return "int3"
   258          default : return fmt.Sprintf("int $%d  # %#x", self.I, self.I)
   259      }
   260  }
   261  
   262  type IrAMD64_LEA struct {
   263      R Reg
   264      M Mem
   265  }
   266  
   267  func (self *IrAMD64_LEA) MemOp() *Mem {
   268      return &self.M
   269  }
   270  
   271  func (self *IrAMD64_LEA) Clone() IrNode {
   272      r := *self
   273      return &r
   274  }
   275  
   276  func (self *IrAMD64_LEA) String() string {
   277      return fmt.Sprintf("leaq %s, %s", self.M, self.R)
   278  }
   279  
   280  func (self *IrAMD64_LEA) Usages() (r []*Reg) {
   281      if self.M.M.Kind() != K_zero { r = append(r, &self.M.M) }
   282      if self.M.I.Kind() != K_zero { r = append(r, &self.M.I) }
   283      return
   284  }
   285  
   286  func (self *IrAMD64_LEA) Definitions() []*Reg {
   287      return []*Reg { &self.R }
   288  }
   289  
   290  type IrAMD64_NEG struct {
   291      R Reg
   292      V Reg
   293  }
   294  
   295  func (self *IrAMD64_NEG) Clone() IrNode {
   296      r := *self
   297      return &r
   298  }
   299  
   300  func (self *IrAMD64_NEG) String() string {
   301      if self.R == self.V {
   302          return fmt.Sprintf("negq %s", self.R)
   303      } else {
   304          return fmt.Sprintf("movq %s, %s; negq %s", self.V, self.R, self.R)
   305      }
   306  }
   307  
   308  func (self *IrAMD64_NEG) Usages() []*Reg {
   309      return []*Reg { &self.V }
   310  }
   311  
   312  func (self *IrAMD64_NEG) Definitions() []*Reg {
   313      return []*Reg { &self.R }
   314  }
   315  
   316  type IrAMD64_BSWAP struct {
   317      R Reg
   318      V Reg
   319      N uint8
   320  }
   321  
   322  func (self *IrAMD64_BSWAP) Clone() IrNode {
   323      r := *self
   324      return &r
   325  }
   326  
   327  func (self *IrAMD64_BSWAP) String() string {
   328      if self.R == self.V {
   329          switch self.N {
   330              case 2  : return fmt.Sprintf("rolw $8, %s", self.R)
   331              case 4  : return fmt.Sprintf("bswapl %s", self.R)
   332              case 8  : return fmt.Sprintf("bswapq %s", self.R)
   333              default : panic("invalid bswap size")
   334          }
   335      } else {
   336          switch self.N {
   337              case 2  : return fmt.Sprintf("movq %s, %s; rolw $8, %s", self.V, self.R, self.R)
   338              case 4  : return fmt.Sprintf("movq %s, %s; bswapl %s", self.V, self.R, self.R)
   339              case 8  : return fmt.Sprintf("movq %s, %s; bswapq %s", self.V, self.R, self.R)
   340              default : panic("invalid bswap size")
   341          }
   342      }
   343  }
   344  
   345  func (self *IrAMD64_BSWAP) Usages() []*Reg {
   346      return []*Reg { &self.V }
   347  }
   348  
   349  func (self *IrAMD64_BSWAP) Definitions() []*Reg {
   350      return []*Reg { &self.R }
   351  }
   352  
   353  type IrAMD64_MOVSLQ struct {
   354      R Reg
   355      V Reg
   356  }
   357  
   358  func (self *IrAMD64_MOVSLQ) Clone() IrNode {
   359      r := *self
   360      return &r
   361  }
   362  
   363  func (self *IrAMD64_MOVSLQ) String() string {
   364      return fmt.Sprintf("movslq %s, %s", self.V, self.R)
   365  }
   366  
   367  func (self *IrAMD64_MOVSLQ) Usages() []*Reg {
   368      return []*Reg { &self.V }
   369  }
   370  
   371  func (self *IrAMD64_MOVSLQ) Definitions() []*Reg {
   372      return []*Reg { &self.R }
   373  }
   374  
   375  type IrAMD64_MOV_abs struct {
   376      R Reg
   377      V int64
   378  }
   379  
   380  func IrArchConstInt(r Reg, v int64) IrNode {
   381      return &IrAMD64_MOV_abs {
   382          R: r,
   383          V: v,
   384      }
   385  }
   386  
   387  func IrArchTryIntoConstInt(v IrNode) (Reg, int64, bool) {
   388      if p, ok := v.(*IrConstInt); ok {
   389          return p.R, p.V, true
   390      } else if p, ok := v.(*IrAMD64_MOV_abs); ok {
   391          return p.R, p.V, true
   392      } else {
   393          return 0, 0, false
   394      }
   395  }
   396  
   397  func (self *IrAMD64_MOV_abs) Clone() IrNode {
   398      r := *self
   399      return &r
   400  }
   401  
   402  func (self *IrAMD64_MOV_abs) String() string {
   403      return fmt.Sprintf("movabsq $%d, %s  # %#x", self.V, self.R, self.V)
   404  }
   405  
   406  func (self *IrAMD64_MOV_abs) Definitions() []*Reg {
   407      return []*Reg { &self.R }
   408  }
   409  
   410  type IrAMD64_MOV_ptr struct {
   411      R Reg
   412      P unsafe.Pointer
   413  }
   414  
   415  func IrArchConstPtr(r Reg, p unsafe.Pointer) IrNode {
   416      return &IrAMD64_MOV_ptr {
   417          R: r,
   418          P: p,
   419      }
   420  }
   421  
   422  func IrArchTryIntoConstPtr(v IrNode) (Reg, unsafe.Pointer, bool) {
   423      if p, ok := v.(*IrConstPtr); ok {
   424          return p.R, p.P, true
   425      } else if p, ok := v.(*IrAMD64_MOV_ptr); ok {
   426          return p.R, p.P, true
   427      } else {
   428          return 0, nil, false
   429      }
   430  }
   431  
   432  func (self *IrAMD64_MOV_ptr) Clone() IrNode {
   433      r := *self
   434      return &r
   435  }
   436  
   437  func (self *IrAMD64_MOV_ptr) String() string {
   438      return fmt.Sprintf("movabsq $%p, %s  # %s", self.P, self.R, rt.FuncName(self.P))
   439  }
   440  
   441  func (self *IrAMD64_MOV_ptr) Definitions() []*Reg {
   442      return []*Reg { &self.R }
   443  }
   444  
   445  func IrArchZero(r Reg) IrNode {
   446      if r.Ptr() {
   447          return &IrAMD64_MOV_ptr { R: r }
   448      } else {
   449          return &IrAMD64_MOV_abs { R: r }
   450      }
   451  }
   452  
   453  type IrAMD64_MOV_reg struct {
   454      R Reg
   455      V Reg
   456  }
   457  
   458  func IrArchCopy(r Reg, v Reg) IrNode {
   459      return &IrAMD64_MOV_reg { R: r, V: v }
   460  }
   461  
   462  func IrArchTryIntoCopy(v IrNode) (Reg, Reg, bool) {
   463      if p, ok := v.(*IrAMD64_MOV_reg); ok {
   464          return p.R, p.V, true
   465      } else {
   466          return IrTryIntoCopy(v)
   467      }
   468  }
   469  
   470  func (self *IrAMD64_MOV_reg) Clone() IrNode {
   471      r := *self
   472      return &r
   473  }
   474  
   475  func (self *IrAMD64_MOV_reg) String() string {
   476      return fmt.Sprintf("movq %s, %s", self.V, self.R)
   477  }
   478  
   479  func (self *IrAMD64_MOV_reg) Usages() []*Reg {
   480      return []*Reg { &self.V }
   481  }
   482  
   483  func (self *IrAMD64_MOV_reg) Definitions() []*Reg {
   484      return []*Reg { &self.R }
   485  }
   486  
   487  type IrAMD64_MOV_load struct {
   488      R Reg
   489      M Mem
   490      N uint8
   491  }
   492  
   493  func (self *IrAMD64_MOV_load) MemOp() *Mem {
   494      return &self.M
   495  }
   496  
   497  func (self *IrAMD64_MOV_load) Clone() IrNode {
   498      r := *self
   499      return &r
   500  }
   501  
   502  func (self *IrAMD64_MOV_load) String() string {
   503      switch self.N {
   504          case 1  : return fmt.Sprintf("movzbq %s, %s", self.M, self.R)
   505          case 2  : return fmt.Sprintf("movzwq %s, %s", self.M, self.R)
   506          case 4  : return fmt.Sprintf("movl %s, %s", self.M, self.R)
   507          case 8  : return fmt.Sprintf("movq %s, %s", self.M, self.R)
   508          case 16 : return fmt.Sprintf("movdqu %s, %s", self.M, self.R)
   509          default : panic("invalid load size")
   510      }
   511  }
   512  
   513  func (self *IrAMD64_MOV_load) Usages() (r []*Reg) {
   514      if self.M.M.Kind() != K_zero { r = append(r, &self.M.M) }
   515      if self.M.I.Kind() != K_zero { r = append(r, &self.M.I) }
   516      return
   517  }
   518  
   519  func (self *IrAMD64_MOV_load) Definitions() []*Reg {
   520      return []*Reg { &self.R }
   521  }
   522  
   523  type IrAMD64_MOV_store_r struct {
   524      R Reg
   525      M Mem
   526      N uint8
   527  }
   528  
   529  func (self *IrAMD64_MOV_store_r) MemOp() *Mem {
   530      return &self.M
   531  }
   532  
   533  func (self *IrAMD64_MOV_store_r) Clone() IrNode {
   534      r := *self
   535      return &r
   536  }
   537  
   538  func (self *IrAMD64_MOV_store_r) String() string {
   539      return fmt.Sprintf("mov%c %s, %s", memsizec(self.N), self.R, self.M)
   540  }
   541  
   542  func (self *IrAMD64_MOV_store_r) Usages() (r []*Reg) {
   543      r = []*Reg { &self.R }
   544      if self.M.M.Kind() != K_zero { r = append(r, &self.M.M) }
   545      if self.M.I.Kind() != K_zero { r = append(r, &self.M.I) }
   546      return
   547  }
   548  
   549  type IrAMD64_MOV_store_i struct {
   550      V int32
   551      M Mem
   552      N uint8
   553  }
   554  
   555  func (self *IrAMD64_MOV_store_i) MemOp() *Mem {
   556      return &self.M
   557  }
   558  
   559  func (self *IrAMD64_MOV_store_i) Clone() IrNode {
   560      r := *self
   561      return &r
   562  }
   563  
   564  func (self *IrAMD64_MOV_store_i) String() string {
   565      return fmt.Sprintf("mov%c $%d, %s  # %#0*x", memsizec(self.N), self.V, self.M, self.N * 2, self.V)
   566  }
   567  
   568  func (self *IrAMD64_MOV_store_i) Usages() (r []*Reg) {
   569      if self.M.M.Kind() != K_zero { r = append(r, &self.M.M) }
   570      if self.M.I.Kind() != K_zero { r = append(r, &self.M.I) }
   571      return
   572  }
   573  
   574  type IrAMD64_MOV_store_p struct {
   575      P unsafe.Pointer
   576      M Mem
   577  }
   578  
   579  func (self *IrAMD64_MOV_store_p) MemOp() *Mem {
   580      return &self.M
   581  }
   582  
   583  func (self *IrAMD64_MOV_store_p) Clone() IrNode {
   584      r := *self
   585      return &r
   586  }
   587  
   588  func (self *IrAMD64_MOV_store_p) String() string {
   589      return fmt.Sprintf("movq $%p, %s  # %s", self.P, self.M, rt.FuncName(self.P))
   590  }
   591  
   592  func (self *IrAMD64_MOV_store_p) Usages() (r []*Reg) {
   593      if self.M.M.Kind() != K_zero { r = append(r, &self.M.M) }
   594      if self.M.I.Kind() != K_zero { r = append(r, &self.M.I) }
   595      return
   596  }
   597  
   598  type IrAMD64_MOV_load_be struct {
   599      R Reg
   600      M Mem
   601      N uint8
   602  }
   603  
   604  func (self *IrAMD64_MOV_load_be) MemOp() *Mem {
   605      return &self.M
   606  }
   607  
   608  func (self *IrAMD64_MOV_load_be) Clone() IrNode {
   609      r := *self
   610      return &r
   611  }
   612  
   613  func (self *IrAMD64_MOV_load_be) String() string {
   614      switch self.N {
   615          case 2  : return fmt.Sprintf("movbew %s, %s; movzwl %s, %s", self.M, self.R, self.R, self.R)
   616          case 4  : return fmt.Sprintf("movbel %s, %s", self.M, self.R)
   617          case 8  : return fmt.Sprintf("movbeq %s, %s", self.M, self.R)
   618          default : panic("invalid load size")
   619      }
   620  }
   621  
   622  func (self *IrAMD64_MOV_load_be) Usages() (r []*Reg) {
   623      if self.M.M.Kind() != K_zero { r = append(r, &self.M.M) }
   624      if self.M.I.Kind() != K_zero { r = append(r, &self.M.I) }
   625      return
   626  }
   627  
   628  func (self *IrAMD64_MOV_load_be) Definitions() []*Reg {
   629      return []*Reg { &self.R }
   630  }
   631  
   632  type IrAMD64_MOV_store_be struct {
   633      R Reg
   634      M Mem
   635      N uint8
   636  }
   637  
   638  func (self *IrAMD64_MOV_store_be) MemOp() *Mem {
   639      return &self.M
   640  }
   641  
   642  func (self *IrAMD64_MOV_store_be) Clone() IrNode {
   643      r := *self
   644      return &r
   645  }
   646  
   647  func (self *IrAMD64_MOV_store_be) String() string {
   648      switch self.N {
   649          case 2  : return fmt.Sprintf("movbew %s, %s", self.R, self.M)
   650          case 4  : return fmt.Sprintf("movbel %s, %s", self.R, self.M)
   651          case 8  : return fmt.Sprintf("movbeq %s, %s", self.R, self.M)
   652          default : panic("invalid store size")
   653      }
   654  }
   655  
   656  func (self *IrAMD64_MOV_store_be) Usages() (r []*Reg) {
   657      r = []*Reg { &self.R }
   658      if self.M.M.Kind() != K_zero { r = append(r, &self.M.M) }
   659      if self.M.I.Kind() != K_zero { r = append(r, &self.M.I) }
   660      return
   661  }
   662  
   663  type (
   664      IrSlotKind uint8
   665  )
   666  
   667  const (
   668      IrSlotArgs IrSlotKind = iota
   669      IrSlotCall
   670      IrSlotLocal
   671  )
   672  
   673  func (self IrSlotKind) String() string {
   674      switch self {
   675          case IrSlotArgs  : return "args"
   676          case IrSlotCall  : return "call"
   677          case IrSlotLocal : return "local"
   678          default          : return "???"
   679      }
   680  }
   681  
   682  type IrAMD64_MOV_load_stack struct {
   683      R Reg
   684      S uintptr
   685      K IrSlotKind
   686  }
   687  
   688  func IrArchLoadStack(reg Reg, offs uintptr, kind IrSlotKind) IrNode {
   689      return &IrAMD64_MOV_load_stack {
   690          R: reg,
   691          S: offs,
   692          K: kind,
   693      }
   694  }
   695  
   696  func (self *IrAMD64_MOV_load_stack) Clone() IrNode {
   697      r := *self
   698      return &r
   699  }
   700  
   701  func (self *IrAMD64_MOV_load_stack) String() string {
   702      return fmt.Sprintf("movq %s+%d<>(FP), %s", self.K, self.S, self.R)
   703  }
   704  
   705  func (self *IrAMD64_MOV_load_stack) Definitions() (r []*Reg) {
   706      return []*Reg { &self.R }
   707  }
   708  
   709  type IrAMD64_MOV_store_stack struct {
   710      R Reg
   711      S uintptr
   712      K IrSlotKind
   713  }
   714  
   715  func IrArchStoreStack(reg Reg, offs uintptr, kind IrSlotKind) IrNode {
   716      return &IrAMD64_MOV_store_stack {
   717          R: reg,
   718          S: offs,
   719          K: kind,
   720      }
   721  }
   722  
   723  func (self *IrAMD64_MOV_store_stack) Clone() IrNode {
   724      r := *self
   725      return &r
   726  }
   727  
   728  func (self *IrAMD64_MOV_store_stack) String() string {
   729      return fmt.Sprintf("movq %s, %s+%d<>(FP)", self.R, self.K, self.S)
   730  }
   731  
   732  func (self *IrAMD64_MOV_store_stack) Usages() (r []*Reg) {
   733      return []*Reg { &self.R }
   734  }
   735  
   736  type (
   737      IrAMD64_BinOp uint8
   738      IrAMD64_CmpOp uint8
   739  )
   740  
   741  const (
   742      IrAMD64_BinAdd IrAMD64_BinOp = iota
   743      IrAMD64_BinSub
   744      IrAMD64_BinMul
   745      IrAMD64_BinAnd
   746      IrAMD64_BinOr
   747      IrAMD64_BinXor
   748      IrAMD64_BinShr
   749  )
   750  
   751  const (
   752      IrAMD64_CmpEq IrAMD64_CmpOp = iota
   753      IrAMD64_CmpNe
   754      IrAMD64_CmpLt
   755      IrAMD64_CmpGe
   756      IrAMD64_CmpLtu
   757      IrAMD64_CmpGeu
   758  )
   759  
   760  func (self IrAMD64_BinOp) String() string {
   761      switch self {
   762          case IrAMD64_BinAdd : return "addq"
   763          case IrAMD64_BinSub : return "subq"
   764          case IrAMD64_BinMul : return "imulq"
   765          case IrAMD64_BinAnd : return "andq"
   766          case IrAMD64_BinOr  : return "orq"
   767          case IrAMD64_BinXor : return "xorq"
   768          case IrAMD64_BinShr : return "shrq"
   769          default             : panic("unreachable")
   770      }
   771  }
   772  
   773  func (self IrAMD64_BinOp) IsAdditive() bool {
   774      switch self {
   775          case IrAMD64_BinAdd : fallthrough
   776          case IrAMD64_BinSub : return true
   777          default             : return false
   778      }
   779  }
   780  
   781  func (self IrAMD64_BinOp) ScaleFactor() int32 {
   782      switch self {
   783          case IrAMD64_BinAdd : return 1
   784          case IrAMD64_BinSub : return -1
   785          default             : panic("not an additive operator: " + self.String())
   786      }
   787  }
   788  
   789  func (self IrAMD64_CmpOp) String() string {
   790      switch self {
   791          case IrAMD64_CmpEq  : return "e"
   792          case IrAMD64_CmpNe  : return "ne"
   793          case IrAMD64_CmpLt  : return "l"
   794          case IrAMD64_CmpGe  : return "ge"
   795          case IrAMD64_CmpLtu : return "b"
   796          case IrAMD64_CmpGeu : return "ae"
   797          default             : panic("unreachable")
   798      }
   799  }
   800  
   801  func (self IrAMD64_CmpOp) Negated() IrAMD64_CmpOp {
   802      switch self {
   803          case IrAMD64_CmpEq  : return IrAMD64_CmpNe
   804          case IrAMD64_CmpNe  : return IrAMD64_CmpEq
   805          case IrAMD64_CmpLt  : return IrAMD64_CmpGe
   806          case IrAMD64_CmpGe  : return IrAMD64_CmpLt
   807          case IrAMD64_CmpLtu : return IrAMD64_CmpGeu
   808          case IrAMD64_CmpGeu : return IrAMD64_CmpLtu
   809          default             : panic("unreachable")
   810      }
   811  }
   812  
   813  type IrAMD64_BinOp_rr struct {
   814      R  Reg
   815      X  Reg
   816      Y  Reg
   817      Op IrAMD64_BinOp
   818  }
   819  
   820  func (self *IrAMD64_BinOp_rr) Clone() IrNode {
   821      r := *self
   822      return &r
   823  }
   824  
   825  func (self *IrAMD64_BinOp_rr) String() string {
   826      if self.R == self.X {
   827          return fmt.Sprintf("%s %s, %s", self.Op, self.Y, self.X)
   828      } else {
   829          return fmt.Sprintf("movq %s, %s; %s %s, %s", self.X, self.R, self.Op, self.Y, self.R)
   830      }
   831  }
   832  
   833  func (self *IrAMD64_BinOp_rr) Usages() []*Reg {
   834      return []*Reg { &self.X, &self.Y }
   835  }
   836  
   837  func (self *IrAMD64_BinOp_rr) Definitions() []*Reg {
   838      return []*Reg { &self.R }
   839  }
   840  
   841  type IrAMD64_BinOp_ri struct {
   842      R  Reg
   843      X  Reg
   844      Y  int32
   845      Op IrAMD64_BinOp
   846  }
   847  
   848  func (self *IrAMD64_BinOp_ri) Clone() IrNode {
   849      r := *self
   850      return &r
   851  }
   852  
   853  func (self *IrAMD64_BinOp_ri) String() string {
   854      if self.Op == IrAMD64_BinMul {
   855          return fmt.Sprintf("imulq $%d, %s, %s  # %#x", self.Y, self.X, self.R, self.Y)
   856      } else if self.R == self.X {
   857          return fmt.Sprintf("%s $%d, %s  # %#x", self.Op, self.Y, self.X, self.Y)
   858      } else {
   859          return fmt.Sprintf("movq %s, %s; %s $%d, %s  # %#x", self.X, self.R, self.Op, self.Y, self.R, self.Y)
   860      }
   861  }
   862  
   863  func (self *IrAMD64_BinOp_ri) Usages() []*Reg {
   864      return []*Reg { &self.X }
   865  }
   866  
   867  func (self *IrAMD64_BinOp_ri) Definitions() []*Reg {
   868      return []*Reg { &self.R }
   869  }
   870  
   871  type IrAMD64_BinOp_rm struct {
   872      R  Reg
   873      X  Reg
   874      Y  Mem
   875      Op IrAMD64_BinOp
   876  }
   877  
   878  func (self *IrAMD64_BinOp_rm) MemOp() *Mem {
   879      return &self.Y
   880  }
   881  
   882  func (self *IrAMD64_BinOp_rm) Clone() IrNode {
   883      r := *self
   884      return &r
   885  }
   886  
   887  func (self *IrAMD64_BinOp_rm) String() string {
   888      if self.R == self.X {
   889          return fmt.Sprintf("%s %s, %s", self.Op, self.Y, self.X)
   890      } else {
   891          return fmt.Sprintf("movq %s, %s; %s %s, %s", self.X, self.R, self.Op, self.Y, self.R)
   892      }
   893  }
   894  
   895  func (self *IrAMD64_BinOp_rm) Usages() []*Reg {
   896      if self.Y.I == Rz {
   897          return []*Reg { &self.X, &self.Y.M }
   898      } else {
   899          return []*Reg { &self.X, &self.Y.M, &self.Y.I }
   900      }
   901  }
   902  
   903  func (self *IrAMD64_BinOp_rm) Definitions() []*Reg {
   904      return []*Reg { &self.R }
   905  }
   906  
   907  type IrAMD64_BTSQ_rr struct {
   908      T Reg
   909      S Reg
   910      X Reg
   911      Y Reg
   912  }
   913  
   914  func (self *IrAMD64_BTSQ_rr) Clone() IrNode {
   915      r := *self
   916      return &r
   917  }
   918  
   919  func (self *IrAMD64_BTSQ_rr) String() string {
   920      if self.T.Kind() == K_zero {
   921          if self.S == self.X {
   922              return fmt.Sprintf("btsq %s, %s", self.Y, self.X)
   923          } else {
   924              return fmt.Sprintf("movq %s, %s; btsq %s, %s", self.X, self.S, self.Y, self.S)
   925          }
   926      } else {
   927          if self.S == self.X {
   928              return fmt.Sprintf("btsq %s, %s; setc %s", self.Y, self.X, self.T)
   929          } else {
   930              return fmt.Sprintf("movq %s, %s; btsq %s, %s; setc %s", self.X, self.S, self.Y, self.S, self.T)
   931          }
   932      }
   933  }
   934  
   935  func (self *IrAMD64_BTSQ_rr) Usages() []*Reg {
   936      return []*Reg { &self.X, &self.Y }
   937  }
   938  
   939  func (self *IrAMD64_BTSQ_rr) Definitions() []*Reg {
   940      return []*Reg { &self.T, &self.S }
   941  }
   942  
   943  type IrAMD64_BTSQ_ri struct {
   944      T Reg
   945      S Reg
   946      X Reg
   947      Y uint8
   948  }
   949  
   950  func (self *IrAMD64_BTSQ_ri) Clone() IrNode {
   951      r := *self
   952      return &r
   953  }
   954  
   955  func (self *IrAMD64_BTSQ_ri) String() string {
   956      if self.T.Kind() == K_zero {
   957          if self.S == self.X {
   958              return fmt.Sprintf("btsq $%d, %s", self.Y, self.X)
   959          } else {
   960              return fmt.Sprintf("movq %s, %s; btsq $%d, %s", self.X, self.S, self.Y, self.S)
   961          }
   962      } else {
   963          if self.S == self.X {
   964              return fmt.Sprintf("btsq $%d, %s; setc %s", self.Y, self.X, self.T)
   965          } else {
   966              return fmt.Sprintf("movq %s, %s; btsq $%d, %s; setc %s", self.X, self.S, self.Y, self.S, self.T)
   967          }
   968      }
   969  }
   970  
   971  func (self *IrAMD64_BTSQ_ri) Usages() []*Reg {
   972      return []*Reg { &self.X }
   973  }
   974  
   975  func (self *IrAMD64_BTSQ_ri) Definitions() []*Reg {
   976      return []*Reg { &self.T, &self.S }
   977  }
   978  
   979  type IrAMD64_CMPQ_rr struct {
   980      R  Reg
   981      X  Reg
   982      Y  Reg
   983      Op IrAMD64_CmpOp
   984  }
   985  
   986  func (self *IrAMD64_CMPQ_rr) Clone() IrNode {
   987      r := *self
   988      return &r
   989  }
   990  
   991  func (self *IrAMD64_CMPQ_rr) String() string {
   992      if self.R == Rz {
   993          return fmt.Sprintf("cmpq %s, %s", self.X, self.Y)
   994      } else {
   995          return fmt.Sprintf("cmpq %s, %s; set%s %s", self.X, self.Y, self.Op, self.R)
   996      }
   997  }
   998  
   999  func (self *IrAMD64_CMPQ_rr) Usages() []*Reg {
  1000      return []*Reg { &self.X, &self.Y }
  1001  }
  1002  
  1003  func (self *IrAMD64_CMPQ_rr) Definitions() []*Reg {
  1004      return []*Reg { &self.R }
  1005  }
  1006  
  1007  type IrAMD64_CMPQ_ri struct {
  1008      R  Reg
  1009      X  Reg
  1010      Y  int32
  1011      Op IrAMD64_CmpOp
  1012  }
  1013  
  1014  func (self *IrAMD64_CMPQ_ri) Clone() IrNode {
  1015      r := *self
  1016      return &r
  1017  }
  1018  
  1019  func (self *IrAMD64_CMPQ_ri) String() string {
  1020      return fmt.Sprintf("cmpq %s, $%d; set%s %s  # %#x", self.X, self.Y, self.Op, self.R, self.Y)
  1021  }
  1022  
  1023  func (self *IrAMD64_CMPQ_ri) Usages() []*Reg {
  1024      return []*Reg { &self.X }
  1025  }
  1026  
  1027  func (self *IrAMD64_CMPQ_ri) Definitions() []*Reg {
  1028      return []*Reg { &self.R }
  1029  }
  1030  
  1031  type IrAMD64_CMPQ_rp struct {
  1032      R  Reg
  1033      X  Reg
  1034      Y  unsafe.Pointer
  1035      Op IrAMD64_CmpOp
  1036  }
  1037  
  1038  func (self *IrAMD64_CMPQ_rp) Clone() IrNode {
  1039      r := *self
  1040      return &r
  1041  }
  1042  
  1043  func (self *IrAMD64_CMPQ_rp) String() string {
  1044      return fmt.Sprintf("cmpq %s, $%p; set%s %s", self.X, self.Y, self.Op, self.R)
  1045  }
  1046  
  1047  func (self *IrAMD64_CMPQ_rp) Usages() []*Reg {
  1048      return []*Reg { &self.X }
  1049  }
  1050  
  1051  func (self *IrAMD64_CMPQ_rp) Definitions() []*Reg {
  1052      return []*Reg { &self.R }
  1053  }
  1054  
  1055  type IrAMD64_CMPQ_ir struct {
  1056      R  Reg
  1057      X  int32
  1058      Y  Reg
  1059      Op IrAMD64_CmpOp
  1060  }
  1061  
  1062  func (self *IrAMD64_CMPQ_ir) Clone() IrNode {
  1063      r := *self
  1064      return &r
  1065  }
  1066  
  1067  func (self *IrAMD64_CMPQ_ir) String() string {
  1068      return fmt.Sprintf("cmpq $%d, %s; set%s %s  # %#x", self.X, self.Y, self.Op, self.R, self.X)
  1069  }
  1070  
  1071  func (self *IrAMD64_CMPQ_ir) Usages() []*Reg {
  1072      return []*Reg { &self.Y }
  1073  }
  1074  
  1075  func (self *IrAMD64_CMPQ_ir) Definitions() []*Reg {
  1076      return []*Reg { &self.R }
  1077  }
  1078  
  1079  type IrAMD64_CMPQ_pr struct {
  1080      R  Reg
  1081      X  unsafe.Pointer
  1082      Y  Reg
  1083      Op IrAMD64_CmpOp
  1084  }
  1085  
  1086  func (self *IrAMD64_CMPQ_pr) Clone() IrNode {
  1087      r := *self
  1088      return &r
  1089  }
  1090  
  1091  func (self *IrAMD64_CMPQ_pr) String() string {
  1092      return fmt.Sprintf("cmpq $%p, %s; set%s %s", self.X, self.Y, self.Op, self.R)
  1093  }
  1094  
  1095  func (self *IrAMD64_CMPQ_pr) Usages() []*Reg {
  1096      return []*Reg { &self.Y }
  1097  }
  1098  
  1099  func (self *IrAMD64_CMPQ_pr) Definitions() []*Reg {
  1100      return []*Reg { &self.R }
  1101  }
  1102  
  1103  type IrAMD64_CMPQ_rm struct {
  1104      R  Reg
  1105      X  Reg
  1106      Y  Mem
  1107      N  uint8
  1108      Op IrAMD64_CmpOp
  1109  }
  1110  
  1111  func (self *IrAMD64_CMPQ_rm) MemOp() *Mem {
  1112      return &self.Y
  1113  }
  1114  
  1115  func (self *IrAMD64_CMPQ_rm) Clone() IrNode {
  1116      r := *self
  1117      return &r
  1118  }
  1119  
  1120  func (self *IrAMD64_CMPQ_rm) String() string {
  1121      return fmt.Sprintf(
  1122          "cmp%c %s, %s; set%s %s",
  1123          memsizec(self.N),
  1124          self.X,
  1125          self.Y,
  1126          self.Op,
  1127          self.R,
  1128      )
  1129  }
  1130  
  1131  func (self *IrAMD64_CMPQ_rm) Usages() []*Reg {
  1132      if self.Y.I == Rz {
  1133          return []*Reg { &self.X, &self.Y.M }
  1134      } else {
  1135          return []*Reg { &self.X, &self.Y.M, &self.Y.I }
  1136      }
  1137  }
  1138  
  1139  func (self *IrAMD64_CMPQ_rm) Definitions() []*Reg {
  1140      return []*Reg { &self.R }
  1141  }
  1142  
  1143  type IrAMD64_CMPQ_mr struct {
  1144      R  Reg
  1145      X  Mem
  1146      Y  Reg
  1147      N  uint8
  1148      Op IrAMD64_CmpOp
  1149  }
  1150  
  1151  func (self *IrAMD64_CMPQ_mr) MemOp() *Mem {
  1152      return &self.X
  1153  }
  1154  
  1155  func (self *IrAMD64_CMPQ_mr) Clone() IrNode {
  1156      r := *self
  1157      return &r
  1158  }
  1159  
  1160  func (self *IrAMD64_CMPQ_mr) String() string {
  1161      return fmt.Sprintf(
  1162          "cmp%c %s, %s; set%s %s",
  1163          memsizec(self.N),
  1164          self.X,
  1165          self.Y,
  1166          self.Op,
  1167          self.R,
  1168      )
  1169  }
  1170  
  1171  func (self *IrAMD64_CMPQ_mr) Usages() []*Reg {
  1172      if self.X.I == Rz {
  1173          return []*Reg { &self.X.M, &self.Y }
  1174      } else {
  1175          return []*Reg { &self.X.M, &self.X.I, &self.Y }
  1176      }
  1177  }
  1178  
  1179  func (self *IrAMD64_CMPQ_mr) Definitions() []*Reg {
  1180      return []*Reg { &self.R }
  1181  }
  1182  
  1183  type IrAMD64_CMPQ_mi struct {
  1184      R  Reg
  1185      X  Mem
  1186      Y  int32
  1187      N  uint8
  1188      Op IrAMD64_CmpOp
  1189  }
  1190  
  1191  func (self *IrAMD64_CMPQ_mi) MemOp() *Mem {
  1192      return &self.X
  1193  }
  1194  
  1195  func (self *IrAMD64_CMPQ_mi) Clone() IrNode {
  1196      r := *self
  1197      return &r
  1198  }
  1199  
  1200  func (self *IrAMD64_CMPQ_mi) String() string {
  1201      return fmt.Sprintf(
  1202          "cmp%c %s, $%d; set%s %s  # %#x",
  1203          memsizec(self.N),
  1204          self.X,
  1205          self.Y,
  1206          self.Op,
  1207          self.R,
  1208          self.Y,
  1209      )
  1210  }
  1211  
  1212  func (self *IrAMD64_CMPQ_mi) Usages() []*Reg {
  1213      if self.X.I == Rz {
  1214          return []*Reg { &self.X.M }
  1215      } else {
  1216          return []*Reg { &self.X.M, &self.X.I }
  1217      }
  1218  }
  1219  
  1220  func (self *IrAMD64_CMPQ_mi) Definitions() []*Reg {
  1221      return []*Reg { &self.R }
  1222  }
  1223  
  1224  type IrAMD64_CMPQ_mp struct {
  1225      R  Reg
  1226      X  Mem
  1227      Y  unsafe.Pointer
  1228      Op IrAMD64_CmpOp
  1229  }
  1230  
  1231  func (self *IrAMD64_CMPQ_mp) MemOp() *Mem {
  1232      return &self.X
  1233  }
  1234  
  1235  func (self *IrAMD64_CMPQ_mp) Clone() IrNode {
  1236      r := *self
  1237      return &r
  1238  }
  1239  
  1240  func (self *IrAMD64_CMPQ_mp) String() string {
  1241      return fmt.Sprintf("cmpq %s, $%d; set%s %s  # %#x", self.X, self.Y, self.Op, self.R, self.Y)
  1242  }
  1243  
  1244  func (self *IrAMD64_CMPQ_mp) Usages() []*Reg {
  1245      if self.X.I == Rz {
  1246          return []*Reg { &self.X.M }
  1247      } else {
  1248          return []*Reg { &self.X.M, &self.X.I }
  1249      }
  1250  }
  1251  
  1252  func (self *IrAMD64_CMPQ_mp) Definitions() []*Reg {
  1253      return []*Reg { &self.R }
  1254  }
  1255  
  1256  type IrAMD64_CMPQ_im struct {
  1257      R  Reg
  1258      X  int32
  1259      Y  Mem
  1260      N  uint8
  1261      Op IrAMD64_CmpOp
  1262  }
  1263  
  1264  func (self *IrAMD64_CMPQ_im) MemOp() *Mem {
  1265      return &self.Y
  1266  }
  1267  
  1268  func (self *IrAMD64_CMPQ_im) Clone() IrNode {
  1269      r := *self
  1270      return &r
  1271  }
  1272  
  1273  func (self *IrAMD64_CMPQ_im) String() string {
  1274      return fmt.Sprintf(
  1275          "cmp%c $%d, %s; set%s %s  # %#x",
  1276          memsizec(self.N),
  1277          self.X,
  1278          self.Y,
  1279          self.Op,
  1280          self.R,
  1281          self.X,
  1282      )
  1283  }
  1284  
  1285  func (self *IrAMD64_CMPQ_im) Usages() []*Reg {
  1286      if self.Y.I == Rz {
  1287          return []*Reg { &self.Y.M }
  1288      } else {
  1289          return []*Reg { &self.Y.M, &self.Y.I }
  1290      }
  1291  }
  1292  
  1293  func (self *IrAMD64_CMPQ_im) Definitions() []*Reg {
  1294      return []*Reg { &self.R }
  1295  }
  1296  
  1297  type IrAMD64_CMPQ_pm struct {
  1298      R  Reg
  1299      X  unsafe.Pointer
  1300      Y  Mem
  1301      Op IrAMD64_CmpOp
  1302  }
  1303  
  1304  func (self *IrAMD64_CMPQ_pm) MemOp() *Mem {
  1305      return &self.Y
  1306  }
  1307  
  1308  func (self *IrAMD64_CMPQ_pm) Clone() IrNode {
  1309      r := *self
  1310      return &r
  1311  }
  1312  
  1313  func (self *IrAMD64_CMPQ_pm) String() string {
  1314      return fmt.Sprintf("cmpq $%p, %s; set%s %s", self.X, self.Y, self.Op, self.R)
  1315  }
  1316  
  1317  func (self *IrAMD64_CMPQ_pm) Usages() []*Reg {
  1318      if self.Y.I == Rz {
  1319          return []*Reg { &self.Y.M }
  1320      } else {
  1321          return []*Reg { &self.Y.M, &self.Y.I }
  1322      }
  1323  }
  1324  
  1325  func (self *IrAMD64_CMPQ_pm) Definitions() []*Reg {
  1326      return []*Reg { &self.R }
  1327  }
  1328  
  1329  type IrAMD64_CALL_reg struct {
  1330      Fn   Reg
  1331      In   []Reg
  1332      Out  []Reg
  1333      Clob []Reg
  1334  }
  1335  
  1336  func (self *IrAMD64_CALL_reg) Clone() IrNode {
  1337      r := new(IrAMD64_CALL_reg)
  1338      r.Fn = self.Fn
  1339      r.In = make([]Reg, len(self.In))
  1340      r.Out = make([]Reg, len(self.Out))
  1341      r.Clob = make([]Reg, len(self.Clob))
  1342      copy(r.In, self.In)
  1343      copy(r.Out, self.Out)
  1344      copy(r.Clob, self.Clob)
  1345      return r
  1346  }
  1347  
  1348  func (self *IrAMD64_CALL_reg) String() string {
  1349      return fmt.Sprintf(
  1350          "rcall *%s, {%s}, {%s}",
  1351          self.Fn,
  1352          regslicerepr(self.In),
  1353          regslicerepr(self.Out),
  1354      )
  1355  }
  1356  
  1357  func (self *IrAMD64_CALL_reg) Usages() []*Reg {
  1358      return append(regsliceref(self.In), &self.Fn)
  1359  }
  1360  
  1361  func (self *IrAMD64_CALL_reg) Definitions() []*Reg {
  1362      return append(regsliceref(self.Out), regsliceref(self.Clob)...)
  1363  }
  1364  
  1365  type IrAMD64_CALL_mem struct {
  1366      Fn   Mem
  1367      In   []Reg
  1368      Out  []Reg
  1369      Clob []Reg
  1370  }
  1371  
  1372  func (self *IrAMD64_CALL_mem) MemOp() *Mem {
  1373      return &self.Fn
  1374  }
  1375  
  1376  func (self *IrAMD64_CALL_mem) Clone() IrNode {
  1377      r := new(IrAMD64_CALL_mem)
  1378      r.Fn = self.Fn
  1379      r.In = make([]Reg, len(self.In))
  1380      r.Out = make([]Reg, len(self.Out))
  1381      r.Clob = make([]Reg, len(self.Clob))
  1382      copy(r.In, self.In)
  1383      copy(r.Out, self.Out)
  1384      copy(r.Clob, self.Clob)
  1385      return r
  1386  }
  1387  
  1388  func (self *IrAMD64_CALL_mem) String() string {
  1389      return fmt.Sprintf(
  1390          "mcall *%s, {%s}, {%s}",
  1391          self.Fn,
  1392          regslicerepr(self.In),
  1393          regslicerepr(self.Out),
  1394      )
  1395  }
  1396  
  1397  func (self *IrAMD64_CALL_mem) Usages() []*Reg {
  1398      if self.Fn.I == Rz {
  1399          return append(regsliceref(self.In), &self.Fn.M)
  1400      } else {
  1401          return append(regsliceref(self.In), &self.Fn.M, &self.Fn.I)
  1402      }
  1403  }
  1404  
  1405  func (self *IrAMD64_CALL_mem) Definitions() []*Reg {
  1406      return append(regsliceref(self.Out), regsliceref(self.Clob)...)
  1407  }
  1408  
  1409  type IrAMD64_CALL_gcwb struct {
  1410      R  Reg
  1411      M  Reg
  1412      Fn unsafe.Pointer
  1413  }
  1414  
  1415  func (self *IrAMD64_CALL_gcwb) Clone() IrNode {
  1416      r := *self
  1417      return &r
  1418  }
  1419  
  1420  func (self *IrAMD64_CALL_gcwb) String() string {
  1421      return fmt.Sprintf("scall *%p [%s], %s -> (%s)", self.Fn, rt.FuncName(self.Fn), self.R, self.M)
  1422  }
  1423  
  1424  func (self *IrAMD64_CALL_gcwb) Usages() []*Reg {
  1425      return []*Reg { &self.R, &self.M }
  1426  }
  1427  
  1428  type IrAMD64_RET struct {
  1429      R []Reg
  1430  }
  1431  
  1432  func IrArchReturn(rr []Reg) IrTerminator {
  1433      return &IrAMD64_RET { rr }
  1434  }
  1435  
  1436  func IrTryIntoArchReturn(p IrNode) ([]Reg, bool) {
  1437      if r, ok := p.(*IrAMD64_RET); ok {
  1438          return r.R, true
  1439      } else {
  1440          return nil, false
  1441      }
  1442  }
  1443  
  1444  func (self *IrAMD64_RET) Clone() IrNode {
  1445      r := new(IrAMD64_RET)
  1446      r.R = make([]Reg, len(self.R))
  1447      copy(r.R, self.R)
  1448      return r
  1449  }
  1450  
  1451  func (self *IrAMD64_RET) String() string {
  1452      return "retq"
  1453  }
  1454  
  1455  func (self *IrAMD64_RET) Usages() []*Reg {
  1456      return regsliceref(self.R)
  1457  }
  1458  
  1459  func (self *IrAMD64_RET) Successors() IrSuccessors {
  1460      return _EmptySuccessor{}
  1461  }
  1462  
  1463  type IrAMD64_JMP struct {
  1464      To *IrBranch
  1465  }
  1466  
  1467  func IrArchJump(to *BasicBlock) IrTerminator {
  1468      return &IrAMD64_JMP { IrLikely(to) }
  1469  }
  1470  
  1471  func (self *IrAMD64_JMP) Clone() IrNode {
  1472      return &IrAMD64_JMP { self.To.Clone() }
  1473  }
  1474  
  1475  func (self *IrAMD64_JMP) String() string {
  1476      return "jmp " + self.To.String()
  1477  }
  1478  
  1479  func (self *IrAMD64_JMP) Successors() IrSuccessors {
  1480      return &_SwitchSuccessors {
  1481          i: -1,
  1482          t: []_SwitchTarget {{ b: self.To }},
  1483      }
  1484  }
  1485  
  1486  type IrAMD64_JNC struct {
  1487      To *IrBranch
  1488      Ln *IrBranch
  1489  }
  1490  
  1491  func (self *IrAMD64_JNC) Clone() IrNode {
  1492      return &IrAMD64_JNC {
  1493          To: self.To.Clone(),
  1494          Ln: self.Ln.Clone(),
  1495      }
  1496  }
  1497  
  1498  func (self *IrAMD64_JNC) String() string {
  1499      return fmt.Sprintf("jnc %s; jmp %s", self.To, self.Ln)
  1500  }
  1501  
  1502  func (self *IrAMD64_JNC) Successors() IrSuccessors {
  1503      return &_SwitchSuccessors {
  1504          i: -1,
  1505          t: []_SwitchTarget {
  1506              { b: self.Ln, i: 0 },
  1507              { b: self.To },
  1508          },
  1509      }
  1510  }
  1511  
  1512  type IrAMD64_Jcc_rr struct {
  1513      X  Reg
  1514      Y  Reg
  1515      To *IrBranch
  1516      Ln *IrBranch
  1517      Op IrAMD64_CmpOp
  1518  }
  1519  
  1520  func (self *IrAMD64_Jcc_rr) Clone() IrNode {
  1521      r := *self
  1522      r.To = self.To.Clone()
  1523      r.Ln = self.Ln.Clone()
  1524      return &r
  1525  }
  1526  
  1527  func (self *IrAMD64_Jcc_rr) String() string {
  1528      return fmt.Sprintf(
  1529          "cmpq %s, %s; j%s %s; jmp %s",
  1530          self.X,
  1531          self.Y,
  1532          self.Op,
  1533          self.To,
  1534          self.Ln,
  1535      )
  1536  }
  1537  
  1538  func (self *IrAMD64_Jcc_rr) Usages() []*Reg {
  1539      return []*Reg { &self.X, &self.Y }
  1540  }
  1541  
  1542  func (self *IrAMD64_Jcc_rr) Successors() IrSuccessors {
  1543      return &_SwitchSuccessors {
  1544          i: -1,
  1545          t: []_SwitchTarget {
  1546              { b: self.Ln, i: 0 },
  1547              { b: self.To },
  1548          },
  1549      }
  1550  }
  1551  
  1552  type IrAMD64_Jcc_ri struct {
  1553      X  Reg
  1554      Y  int32
  1555      To *IrBranch
  1556      Ln *IrBranch
  1557      Op IrAMD64_CmpOp
  1558  }
  1559  
  1560  func (self *IrAMD64_Jcc_ri) Clone() IrNode {
  1561      r := *self
  1562      r.To = self.To.Clone()
  1563      r.Ln = self.Ln.Clone()
  1564      return &r
  1565  }
  1566  
  1567  func (self *IrAMD64_Jcc_ri) String() string {
  1568      return fmt.Sprintf(
  1569          "cmpq %s, $%d; j%s %s; jmp %s  # %#x",
  1570          self.X,
  1571          self.Y,
  1572          self.Op,
  1573          self.To,
  1574          self.Ln,
  1575          self.Y,
  1576      )
  1577  }
  1578  
  1579  func (self *IrAMD64_Jcc_ri) Usages() []*Reg {
  1580      return []*Reg { &self.X }
  1581  }
  1582  
  1583  func (self *IrAMD64_Jcc_ri) Successors() IrSuccessors {
  1584      return &_SwitchSuccessors {
  1585          i: -1,
  1586          t: []_SwitchTarget {
  1587              { b: self.Ln, i: 0 },
  1588              { b: self.To },
  1589          },
  1590      }
  1591  }
  1592  
  1593  type IrAMD64_Jcc_rp struct {
  1594      X  Reg
  1595      Y  unsafe.Pointer
  1596      To *IrBranch
  1597      Ln *IrBranch
  1598      Op IrAMD64_CmpOp
  1599  }
  1600  
  1601  func (self *IrAMD64_Jcc_rp) Clone() IrNode {
  1602      r := *self
  1603      r.To = self.To.Clone()
  1604      r.Ln = self.Ln.Clone()
  1605      return &r
  1606  }
  1607  
  1608  func (self *IrAMD64_Jcc_rp) String() string {
  1609      return fmt.Sprintf(
  1610          "cmpq %s, $%p; j%s %s; jmp %s",
  1611          self.X,
  1612          self.Y,
  1613          self.Op,
  1614          self.To,
  1615          self.Ln,
  1616      )
  1617  }
  1618  
  1619  func (self *IrAMD64_Jcc_rp) Usages() []*Reg {
  1620      return []*Reg { &self.X }
  1621  }
  1622  
  1623  func (self *IrAMD64_Jcc_rp) Successors() IrSuccessors {
  1624      return &_SwitchSuccessors {
  1625          i: -1,
  1626          t: []_SwitchTarget {
  1627              { b: self.Ln, i: 0 },
  1628              { b: self.To },
  1629          },
  1630      }
  1631  }
  1632  
  1633  type IrAMD64_Jcc_ir struct {
  1634      X  int32
  1635      Y  Reg
  1636      To *IrBranch
  1637      Ln *IrBranch
  1638      Op IrAMD64_CmpOp
  1639  }
  1640  
  1641  func (self *IrAMD64_Jcc_ir) Clone() IrNode {
  1642      r := *self
  1643      r.To = self.To.Clone()
  1644      r.Ln = self.Ln.Clone()
  1645      return &r
  1646  }
  1647  
  1648  func (self *IrAMD64_Jcc_ir) String() string {
  1649      return fmt.Sprintf(
  1650          "cmpq $%d, %s; j%s %s; jmp %s  # %#x",
  1651          self.X,
  1652          self.Y,
  1653          self.Op,
  1654          self.To,
  1655          self.Ln,
  1656          self.X,
  1657      )
  1658  }
  1659  
  1660  func (self *IrAMD64_Jcc_ir) Usages() []*Reg {
  1661      return []*Reg { &self.Y }
  1662  }
  1663  
  1664  func (self *IrAMD64_Jcc_ir) Successors() IrSuccessors {
  1665      return &_SwitchSuccessors {
  1666          i: -1,
  1667          t: []_SwitchTarget {
  1668              { b: self.Ln, i: 0 },
  1669              { b: self.To },
  1670          },
  1671      }
  1672  }
  1673  
  1674  type IrAMD64_Jcc_pr struct {
  1675      X  unsafe.Pointer
  1676      Y  Reg
  1677      To *IrBranch
  1678      Ln *IrBranch
  1679      Op IrAMD64_CmpOp
  1680  }
  1681  
  1682  func (self *IrAMD64_Jcc_pr) Clone() IrNode {
  1683      r := *self
  1684      r.To = self.To.Clone()
  1685      r.Ln = self.Ln.Clone()
  1686      return &r
  1687  }
  1688  
  1689  func (self *IrAMD64_Jcc_pr) String() string {
  1690      return fmt.Sprintf(
  1691          "cmpq $%p, %s; j%s %s; jmp %s",
  1692          self.X,
  1693          self.Y,
  1694          self.Op,
  1695          self.To,
  1696          self.Ln,
  1697      )
  1698  }
  1699  
  1700  func (self *IrAMD64_Jcc_pr) Usages() []*Reg {
  1701      return []*Reg { &self.Y }
  1702  }
  1703  
  1704  func (self *IrAMD64_Jcc_pr) Successors() IrSuccessors {
  1705      return &_SwitchSuccessors {
  1706          i: -1,
  1707          t: []_SwitchTarget {
  1708              { b: self.Ln, i: 0 },
  1709              { b: self.To },
  1710          },
  1711      }
  1712  }
  1713  
  1714  type IrAMD64_Jcc_rm struct {
  1715      X  Reg
  1716      Y  Mem
  1717      N  uint8
  1718      To *IrBranch
  1719      Ln *IrBranch
  1720      Op IrAMD64_CmpOp
  1721  }
  1722  
  1723  func (self *IrAMD64_Jcc_rm) MemOp() *Mem {
  1724      return &self.Y
  1725  }
  1726  
  1727  func (self *IrAMD64_Jcc_rm) Clone() IrNode {
  1728      r := *self
  1729      r.To = self.To.Clone()
  1730      r.Ln = self.Ln.Clone()
  1731      return &r
  1732  }
  1733  
  1734  func (self *IrAMD64_Jcc_rm) String() string {
  1735      return fmt.Sprintf(
  1736          "cmp%c %s, %s; j%s %s; jmp %s",
  1737          memsizec(self.N),
  1738          self.X,
  1739          self.Y,
  1740          self.Op,
  1741          self.To,
  1742          self.Ln,
  1743      )
  1744  }
  1745  
  1746  func (self *IrAMD64_Jcc_rm) Usages() []*Reg {
  1747      if self.Y.I == Rz {
  1748          return []*Reg { &self.X, &self.Y.M }
  1749      } else {
  1750          return []*Reg { &self.X, &self.Y.M, &self.Y.I }
  1751      }
  1752  }
  1753  
  1754  func (self *IrAMD64_Jcc_rm) Successors() IrSuccessors {
  1755      return &_SwitchSuccessors {
  1756          i: -1,
  1757          t: []_SwitchTarget {
  1758              { b: self.Ln, i: 0 },
  1759              { b: self.To },
  1760          },
  1761      }
  1762  }
  1763  
  1764  type IrAMD64_Jcc_mr struct {
  1765      X  Mem
  1766      Y  Reg
  1767      N  uint8
  1768      To *IrBranch
  1769      Ln *IrBranch
  1770      Op IrAMD64_CmpOp
  1771  }
  1772  
  1773  func (self *IrAMD64_Jcc_mr) MemOp() *Mem {
  1774      return &self.X
  1775  }
  1776  
  1777  func (self *IrAMD64_Jcc_mr) Clone() IrNode {
  1778      r := *self
  1779      r.To = self.To.Clone()
  1780      r.Ln = self.Ln.Clone()
  1781      return &r
  1782  }
  1783  
  1784  func (self *IrAMD64_Jcc_mr) String() string {
  1785      return fmt.Sprintf(
  1786          "cmp%c %s, %s; j%s %s; jmp %s",
  1787          memsizec(self.N),
  1788          self.X,
  1789          self.Y,
  1790          self.Op,
  1791          self.To,
  1792          self.Ln,
  1793      )
  1794  }
  1795  
  1796  func (self *IrAMD64_Jcc_mr) Usages() []*Reg {
  1797      if self.X.I == Rz {
  1798          return []*Reg { &self.X.M, &self.Y }
  1799      } else {
  1800          return []*Reg { &self.X.M, &self.X.I, &self.Y }
  1801      }
  1802  }
  1803  
  1804  func (self *IrAMD64_Jcc_mr) Successors() IrSuccessors {
  1805      return &_SwitchSuccessors {
  1806          i: -1,
  1807          t: []_SwitchTarget {
  1808              { b: self.Ln, i: 0 },
  1809              { b: self.To },
  1810          },
  1811      }
  1812  }
  1813  
  1814  type IrAMD64_Jcc_mi struct {
  1815      X  Mem
  1816      Y  int32
  1817      N  uint8
  1818      To *IrBranch
  1819      Ln *IrBranch
  1820      Op IrAMD64_CmpOp
  1821  }
  1822  
  1823  func (self *IrAMD64_Jcc_mi) MemOp() *Mem {
  1824      return &self.X
  1825  }
  1826  
  1827  func (self *IrAMD64_Jcc_mi) Clone() IrNode {
  1828      r := *self
  1829      r.To = self.To.Clone()
  1830      r.Ln = self.Ln.Clone()
  1831      return &r
  1832  }
  1833  
  1834  func (self *IrAMD64_Jcc_mi) String() string {
  1835      return fmt.Sprintf(
  1836          "cmp%c %s, $%d; j%s %s; jmp %s  # %#x",
  1837          memsizec(self.N),
  1838          self.X,
  1839          self.Y,
  1840          self.Op,
  1841          self.To,
  1842          self.Ln,
  1843          self.Y,
  1844      )
  1845  }
  1846  
  1847  func (self *IrAMD64_Jcc_mi) Usages() []*Reg {
  1848      if self.X.I == Rz {
  1849          return []*Reg { &self.X.M }
  1850      } else {
  1851          return []*Reg { &self.X.M, &self.X.I }
  1852      }
  1853  }
  1854  
  1855  func (self *IrAMD64_Jcc_mi) Successors() IrSuccessors {
  1856      return &_SwitchSuccessors {
  1857          i: -1,
  1858          t: []_SwitchTarget {
  1859              { b: self.Ln, i: 0 },
  1860              { b: self.To },
  1861          },
  1862      }
  1863  }
  1864  
  1865  type IrAMD64_Jcc_mp struct {
  1866      X  Mem
  1867      Y  unsafe.Pointer
  1868      To *IrBranch
  1869      Ln *IrBranch
  1870      Op IrAMD64_CmpOp
  1871  }
  1872  
  1873  func (self *IrAMD64_Jcc_mp) MemOp() *Mem {
  1874      return &self.X
  1875  }
  1876  
  1877  func (self *IrAMD64_Jcc_mp) Clone() IrNode {
  1878      r := *self
  1879      r.To = self.To.Clone()
  1880      r.Ln = self.Ln.Clone()
  1881      return &r
  1882  }
  1883  
  1884  func (self *IrAMD64_Jcc_mp) String() string {
  1885      return fmt.Sprintf(
  1886          "cmpq %s, $%d; j%s %s; jmp %s  # %#x",
  1887          self.X,
  1888          self.Y,
  1889          self.Op,
  1890          self.To,
  1891          self.Ln,
  1892          self.Y,
  1893      )
  1894  }
  1895  
  1896  func (self *IrAMD64_Jcc_mp) Usages() []*Reg {
  1897      if self.X.I == Rz {
  1898          return []*Reg { &self.X.M }
  1899      } else {
  1900          return []*Reg { &self.X.M, &self.X.I }
  1901      }
  1902  }
  1903  
  1904  func (self *IrAMD64_Jcc_mp) Successors() IrSuccessors {
  1905      return &_SwitchSuccessors {
  1906          i: -1,
  1907          t: []_SwitchTarget {
  1908              { b: self.Ln, i: 0 },
  1909              { b: self.To },
  1910          },
  1911      }
  1912  }
  1913  
  1914  type IrAMD64_Jcc_im struct {
  1915      X  int32
  1916      Y  Mem
  1917      N  uint8
  1918      To *IrBranch
  1919      Ln *IrBranch
  1920      Op IrAMD64_CmpOp
  1921  }
  1922  
  1923  func (self *IrAMD64_Jcc_im) MemOp() *Mem {
  1924      return &self.Y
  1925  }
  1926  
  1927  func (self *IrAMD64_Jcc_im) Clone() IrNode {
  1928      r := *self
  1929      r.To = self.To.Clone()
  1930      r.Ln = self.Ln.Clone()
  1931      return &r
  1932  }
  1933  
  1934  func (self *IrAMD64_Jcc_im) String() string {
  1935      return fmt.Sprintf(
  1936          "cmp%c $%d, %s; j%s %s; jmp %s  # %#x",
  1937          memsizec(self.N),
  1938          self.X,
  1939          self.Y,
  1940          self.Op,
  1941          self.To,
  1942          self.Ln,
  1943          self.X,
  1944      )
  1945  }
  1946  
  1947  func (self *IrAMD64_Jcc_im) Usages() []*Reg {
  1948      if self.Y.I == Rz {
  1949          return []*Reg { &self.Y.M }
  1950      } else {
  1951          return []*Reg { &self.Y.M, &self.Y.I }
  1952      }
  1953  }
  1954  
  1955  func (self *IrAMD64_Jcc_im) Successors() IrSuccessors {
  1956      return &_SwitchSuccessors {
  1957          i: -1,
  1958          t: []_SwitchTarget {
  1959              { b: self.Ln, i: 0 },
  1960              { b: self.To },
  1961          },
  1962      }
  1963  }
  1964  
  1965  type IrAMD64_Jcc_pm struct {
  1966      X  unsafe.Pointer
  1967      Y  Mem
  1968      To *IrBranch
  1969      Ln *IrBranch
  1970      Op IrAMD64_CmpOp
  1971  }
  1972  
  1973  func (self *IrAMD64_Jcc_pm) MemOp() *Mem {
  1974      return &self.Y
  1975  }
  1976  
  1977  func (self *IrAMD64_Jcc_pm) Clone() IrNode {
  1978      r := *self
  1979      r.To = self.To.Clone()
  1980      r.Ln = self.Ln.Clone()
  1981      return &r
  1982  }
  1983  
  1984  func (self *IrAMD64_Jcc_pm) String() string {
  1985      return fmt.Sprintf(
  1986          "cmpq $%p, %s; j%s %s; jmp %s",
  1987          self.X,
  1988          self.Y,
  1989          self.Op,
  1990          self.To,
  1991          self.Ln,
  1992      )
  1993  }
  1994  
  1995  func (self *IrAMD64_Jcc_pm) Usages() []*Reg {
  1996      if self.Y.I == Rz {
  1997          return []*Reg { &self.Y.M }
  1998      } else {
  1999          return []*Reg { &self.Y.M, &self.Y.I }
  2000      }
  2001  }
  2002  
  2003  func (self *IrAMD64_Jcc_pm) Successors() IrSuccessors {
  2004      return &_SwitchSuccessors {
  2005          i: -1,
  2006          t: []_SwitchTarget {
  2007              { b: self.Ln, i: 0 },
  2008              { b: self.To },
  2009          },
  2010      }
  2011  }