github.com/cloudwego/frugal@v0.1.15/internal/atm/emu/emu.go (about)

     1  /*
     2   * Copyright 2022 ByteDance Inc.
     3   *
     4   * Licensed under the Apache License, Version 2.0 (the "License");
     5   * you may not use this file except in compliance with the License.
     6   * You may obtain a copy of the License at
     7   *
     8   *     http://www.apache.org/licenses/LICENSE-2.0
     9   *
    10   * Unless required by applicable law or agreed to in writing, software
    11   * distributed under the License is distributed on an "AS IS" BASIS,
    12   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    13   * See the License for the specific language governing permissions and
    14   * limitations under the License.
    15   */
    16  
    17  package emu
    18  
    19  import (
    20      `fmt`
    21      `math/bits`
    22      `runtime`
    23      `sync`
    24      `unsafe`
    25  
    26      `github.com/cloudwego/frugal/internal/atm/hir`
    27  )
    28  
    29  type Value struct {
    30      U uint64
    31      P unsafe.Pointer
    32  }
    33  
    34  type Emulator struct {
    35      pc *hir.Ir
    36      uv [6]uint64
    37      pv [7]unsafe.Pointer
    38      ar [8]Value
    39      rv [8]Value
    40  }
    41  
    42  var (
    43      emulatorPool sync.Pool
    44  )
    45  
    46  func LoadProgram(p hir.Program) (e *Emulator) {
    47      if v := emulatorPool.Get(); v == nil {
    48          return &Emulator{pc: p.Head}
    49      } else {
    50          return v.(*Emulator).Reset(p)
    51      }
    52  }
    53  
    54  func bool2u64(val bool) uint64 {
    55      if val {
    56          return 1
    57      } else {
    58          return 0
    59      }
    60  }
    61  
    62  func checkptr(p unsafe.Pointer) unsafe.Pointer {
    63      if p != nil { _ = *(*uint8)(p) }
    64      return p
    65  }
    66  
    67  func (self *Emulator) trap() {
    68      println("****** DEBUGGER BREAK ******")
    69      println("Current State:", self.String())
    70      runtime.Breakpoint()
    71  }
    72  
    73  func (self *Emulator) Ru(i int) uint64         { return self.rv[i].U }
    74  func (self *Emulator) Rp(i int) unsafe.Pointer { return self.rv[i].P }
    75  
    76  func (self *Emulator) Au(i int, v uint64)         *Emulator { self.ar[i].U = v; return self }
    77  func (self *Emulator) Ap(i int, v unsafe.Pointer) *Emulator { self.ar[i].P = v; return self }
    78  
    79  func (self *Emulator) Run() {
    80      var i uint8
    81      var v uint64
    82      var p *hir.Ir
    83      var q *hir.Ir
    84  
    85      /* run until end */
    86      for self.pc != nil {
    87          p, self.pc = self.pc, self.pc.Ln
    88          self.uv[hir.Rz], self.pv[hir.Pn] = 0, nil
    89  
    90          /* main switch on OpCode */
    91          switch p.Op {
    92              case hir.OP_nop   : break
    93              case hir.OP_ip    : self.pv[p.Pd] = checkptr(p.Pr)
    94              case hir.OP_lb    : self.uv[p.Rx] = uint64(*(*uint8)(unsafe.Pointer(uintptr(self.pv[p.Ps]) + uintptr(p.Iv))))
    95              case hir.OP_lw    : self.uv[p.Rx] = uint64(*(*uint16)(unsafe.Pointer(uintptr(self.pv[p.Ps]) + uintptr(p.Iv))))
    96              case hir.OP_ll    : self.uv[p.Rx] = uint64(*(*uint32)(unsafe.Pointer(uintptr(self.pv[p.Ps]) + uintptr(p.Iv))))
    97              case hir.OP_lq    : self.uv[p.Rx] = *(*uint64)(unsafe.Pointer(uintptr(self.pv[p.Ps]) + uintptr(p.Iv)))
    98              case hir.OP_lp    : self.pv[p.Pd] = checkptr(*(*unsafe.Pointer)(unsafe.Pointer(uintptr(self.pv[p.Ps]) + uintptr(p.Iv))))
    99              case hir.OP_sb    : *(*uint8)(unsafe.Pointer(uintptr(self.pv[p.Pd]) + uintptr(p.Iv))) = uint8(self.uv[p.Rx])
   100              case hir.OP_sw    : *(*uint16)(unsafe.Pointer(uintptr(self.pv[p.Pd]) + uintptr(p.Iv))) = uint16(self.uv[p.Rx])
   101              case hir.OP_sl    : *(*uint32)(unsafe.Pointer(uintptr(self.pv[p.Pd]) + uintptr(p.Iv))) = uint32(self.uv[p.Rx])
   102              case hir.OP_sq    : *(*uint64)(unsafe.Pointer(uintptr(self.pv[p.Pd]) + uintptr(p.Iv))) = self.uv[p.Rx]
   103              case hir.OP_sp    : *(*unsafe.Pointer)(unsafe.Pointer(uintptr(self.pv[p.Pd]) + uintptr(p.Iv))) = self.pv[p.Ps]
   104              case hir.OP_ldaq  : self.uv[p.Rx] = self.ar[p.Iv].U
   105              case hir.OP_ldap  : self.pv[p.Pd] = checkptr(self.ar[p.Iv].P)
   106              case hir.OP_addp  : self.pv[p.Pd] = checkptr(unsafe.Pointer(uintptr(self.pv[p.Ps]) + uintptr(self.uv[p.Rx])))
   107              case hir.OP_subp  : self.pv[p.Pd] = checkptr(unsafe.Pointer(uintptr(self.pv[p.Ps]) - uintptr(self.uv[p.Rx])))
   108              case hir.OP_addpi : self.pv[p.Pd] = checkptr(unsafe.Pointer(uintptr(self.pv[p.Ps]) + uintptr(p.Iv)))
   109              case hir.OP_add   : self.uv[p.Rz] = self.uv[p.Rx] + self.uv[p.Ry]
   110              case hir.OP_sub   : self.uv[p.Rz] = self.uv[p.Rx] - self.uv[p.Ry]
   111              case hir.OP_addi  : self.uv[p.Ry] = self.uv[p.Rx] + uint64(p.Iv)
   112              case hir.OP_muli  : self.uv[p.Ry] = self.uv[p.Rx] * uint64(p.Iv)
   113              case hir.OP_andi  : self.uv[p.Ry] = self.uv[p.Rx] & uint64(p.Iv)
   114              case hir.OP_xori  : self.uv[p.Ry] = self.uv[p.Rx] ^ uint64(p.Iv)
   115              case hir.OP_shri  : self.uv[p.Ry] = self.uv[p.Rx] >> p.Iv
   116              case hir.OP_bsi   : self.uv[p.Ry] = self.uv[p.Rx] | (1 << p.Iv)
   117              case hir.OP_swapw : self.uv[p.Ry] = uint64(bits.ReverseBytes16(uint16(self.uv[p.Rx])))
   118              case hir.OP_swapl : self.uv[p.Ry] = uint64(bits.ReverseBytes32(uint32(self.uv[p.Rx])))
   119              case hir.OP_swapq : self.uv[p.Ry] = bits.ReverseBytes64(self.uv[p.Rx])
   120              case hir.OP_sxlq  : self.uv[p.Ry] = uint64(int32(self.uv[p.Rx]))
   121              case hir.OP_beq   : if       self.uv[p.Rx]  ==       self.uv[p.Ry]  { self.pc = p.Br }
   122              case hir.OP_bne   : if       self.uv[p.Rx]  !=       self.uv[p.Ry]  { self.pc = p.Br }
   123              case hir.OP_blt   : if int64(self.uv[p.Rx]) <  int64(self.uv[p.Ry]) { self.pc = p.Br }
   124              case hir.OP_bltu  : if       self.uv[p.Rx]  <        self.uv[p.Ry]  { self.pc = p.Br }
   125              case hir.OP_bgeu  : if       self.uv[p.Rx]  >=       self.uv[p.Ry]  { self.pc = p.Br }
   126              case hir.OP_beqp  : if       self.pv[p.Ps]  ==       self.pv[p.Pd]  { self.pc = p.Br }
   127              case hir.OP_bnep  : if       self.pv[p.Ps]  !=       self.pv[p.Pd]  { self.pc = p.Br }
   128              case hir.OP_jmp   : self.pc = p.Br
   129              case hir.OP_bzero : memclrNoHeapPointers(self.pv[p.Pd], uintptr(p.Iv))
   130              case hir.OP_bcopy : memmove(self.pv[p.Pd], self.pv[p.Ps], uintptr(self.uv[p.Rx]))
   131              case hir.OP_break : self.trap()
   132  
   133              /* call to C / Go / Go interface functions */
   134              case hir.OP_ccall: fallthrough
   135              case hir.OP_gcall: fallthrough
   136              case hir.OP_icall: hir.LookupCall(p.Iv).Call(self, p)
   137  
   138              /* bit test and set */
   139              case hir.OP_bts: {
   140                  bi := self.uv[p.Rx]
   141                  bv := self.uv[p.Ry]
   142                  self.uv[p.Ry] = bv | (1 << (bi % 64))
   143                  self.uv[p.Rz] = bool2u64(bv & (1 << (bi % 64)) != 0)
   144              }
   145  
   146              /* table switch */
   147              case hir.OP_bsw: {
   148                  if v = self.uv[p.Rx]; v < uint64(p.Iv) {
   149                      if q = *(**hir.Ir)(unsafe.Pointer(uintptr(p.Pr) + uintptr(v) * 8)); q != nil {
   150                          self.pc = q
   151                      }
   152                  }
   153              }
   154  
   155              /* return from function */
   156              case hir.OP_ret: {
   157                  for i, self.pc = 0, nil; i < p.Rn; i++ {
   158                      if r := p.Rr[i]; r & hir.ArgPointer == 0 {
   159                          self.rv[i].U = self.uv[r & hir.ArgMask]
   160                      } else {
   161                          self.rv[i].P = self.pv[r & hir.ArgMask]
   162                      }
   163                  }
   164              }
   165  
   166              /* illegal OpCode */
   167              default: {
   168                  panic(fmt.Sprintf("illegal OpCode: %#02x", p.Op))
   169              }
   170          }
   171      }
   172  }
   173  
   174  
   175  func (self *Emulator) Free() {
   176      emulatorPool.Put(self)
   177  }
   178  
   179  func (self *Emulator) Reset(p hir.Program) *Emulator {
   180      *self = Emulator{pc: p.Head}
   181      return self
   182  }
   183  
   184  /** Implementation of ir.CallState **/
   185  
   186  func (self *Emulator) Gr(id hir.GenericRegister) uint64 {
   187      return self.uv[id]
   188  }
   189  
   190  func (self *Emulator) Pr(id hir.PointerRegister) unsafe.Pointer {
   191      return self.pv[id]
   192  }
   193  
   194  func (self *Emulator) SetGr(id hir.GenericRegister, val uint64) {
   195      self.uv[id] = val
   196  }
   197  
   198  func (self *Emulator) SetPr(id hir.PointerRegister, val unsafe.Pointer) {
   199      self.pv[id] = val
   200  }
   201  
   202  /** State Dumping **/
   203  
   204  const _F_emulator = `Emulator {
   205      pc  (%p)%s
   206      r0  %#x
   207      r1  %#x
   208      r2  %#x
   209      r3  %#x
   210      r4  %#x
   211      r5  %#x
   212     ----
   213      p0  %p
   214      p1  %p
   215      p2  %p
   216      p3  %p
   217      p4  %p
   218      p5  %p
   219      p6  %p
   220  }`
   221  
   222  func (self *Emulator) String() string {
   223      return fmt.Sprintf(
   224          _F_emulator,
   225          self.pc,
   226          self.pc.Disassemble(nil),
   227          self.uv[0],
   228          self.uv[1],
   229          self.uv[2],
   230          self.uv[3],
   231          self.uv[4],
   232          self.uv[5],
   233          self.pv[0],
   234          self.pv[1],
   235          self.pv[2],
   236          self.pv[3],
   237          self.pv[4],
   238          self.pv[5],
   239          self.pv[6],
   240      )
   241  }