github.com/gagliardetto/golang-go@v0.0.0-20201020153340-53909ea70814/cmd/compile/internal/wasm/ssa.go (about)

     1  // Copyright 2018 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package wasm
     6  
     7  import (
     8  	"github.com/gagliardetto/golang-go/cmd/compile/internal/gc"
     9  	"github.com/gagliardetto/golang-go/cmd/compile/internal/logopt"
    10  	"github.com/gagliardetto/golang-go/cmd/compile/internal/ssa"
    11  	"github.com/gagliardetto/golang-go/cmd/compile/internal/types"
    12  	"github.com/gagliardetto/golang-go/cmd/internal/obj"
    13  	"github.com/gagliardetto/golang-go/cmd/internal/obj/wasm"
    14  	"github.com/gagliardetto/golang-go/cmd/internal/objabi"
    15  )
    16  
    17  func Init(arch *gc.Arch) {
    18  	arch.LinkArch = &wasm.Linkwasm
    19  	arch.REGSP = wasm.REG_SP
    20  	arch.MAXWIDTH = 1 << 50
    21  
    22  	arch.ZeroRange = zeroRange
    23  	arch.Ginsnop = ginsnop
    24  	arch.Ginsnopdefer = ginsnop
    25  
    26  	arch.SSAMarkMoves = ssaMarkMoves
    27  	arch.SSAGenValue = ssaGenValue
    28  	arch.SSAGenBlock = ssaGenBlock
    29  }
    30  
    31  func zeroRange(pp *gc.Progs, p *obj.Prog, off, cnt int64, state *uint32) *obj.Prog {
    32  	if cnt == 0 {
    33  		return p
    34  	}
    35  	if cnt%8 != 0 {
    36  		gc.Fatalf("zerorange count not a multiple of widthptr %d", cnt)
    37  	}
    38  
    39  	for i := int64(0); i < cnt; i += 8 {
    40  		p = pp.Appendpp(p, wasm.AGet, obj.TYPE_REG, wasm.REG_SP, 0, 0, 0, 0)
    41  		p = pp.Appendpp(p, wasm.AI64Const, obj.TYPE_CONST, 0, 0, 0, 0, 0)
    42  		p = pp.Appendpp(p, wasm.AI64Store, 0, 0, 0, obj.TYPE_CONST, 0, off+i)
    43  	}
    44  
    45  	return p
    46  }
    47  
    48  func ginsnop(pp *gc.Progs) *obj.Prog {
    49  	return pp.Prog(wasm.ANop)
    50  }
    51  
    52  func ssaMarkMoves(s *gc.SSAGenState, b *ssa.Block) {
    53  }
    54  
    55  func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) {
    56  	switch b.Kind {
    57  	case ssa.BlockPlain:
    58  		if next != b.Succs[0].Block() {
    59  			s.Br(obj.AJMP, b.Succs[0].Block())
    60  		}
    61  
    62  	case ssa.BlockIf:
    63  		switch next {
    64  		case b.Succs[0].Block():
    65  			// if false, jump to b.Succs[1]
    66  			getValue32(s, b.Controls[0])
    67  			s.Prog(wasm.AI32Eqz)
    68  			s.Prog(wasm.AIf)
    69  			s.Br(obj.AJMP, b.Succs[1].Block())
    70  			s.Prog(wasm.AEnd)
    71  		case b.Succs[1].Block():
    72  			// if true, jump to b.Succs[0]
    73  			getValue32(s, b.Controls[0])
    74  			s.Prog(wasm.AIf)
    75  			s.Br(obj.AJMP, b.Succs[0].Block())
    76  			s.Prog(wasm.AEnd)
    77  		default:
    78  			// if true, jump to b.Succs[0], else jump to b.Succs[1]
    79  			getValue32(s, b.Controls[0])
    80  			s.Prog(wasm.AIf)
    81  			s.Br(obj.AJMP, b.Succs[0].Block())
    82  			s.Prog(wasm.AEnd)
    83  			s.Br(obj.AJMP, b.Succs[1].Block())
    84  		}
    85  
    86  	case ssa.BlockRet:
    87  		s.Prog(obj.ARET)
    88  
    89  	case ssa.BlockRetJmp:
    90  		p := s.Prog(obj.ARET)
    91  		p.To.Type = obj.TYPE_MEM
    92  		p.To.Name = obj.NAME_EXTERN
    93  		p.To.Sym = b.Aux.(*obj.LSym)
    94  
    95  	case ssa.BlockExit:
    96  
    97  	case ssa.BlockDefer:
    98  		p := s.Prog(wasm.AGet)
    99  		p.From = obj.Addr{Type: obj.TYPE_REG, Reg: wasm.REG_RET0}
   100  		s.Prog(wasm.AI64Eqz)
   101  		s.Prog(wasm.AI32Eqz)
   102  		s.Prog(wasm.AIf)
   103  		s.Br(obj.AJMP, b.Succs[1].Block())
   104  		s.Prog(wasm.AEnd)
   105  		if next != b.Succs[0].Block() {
   106  			s.Br(obj.AJMP, b.Succs[0].Block())
   107  		}
   108  
   109  	default:
   110  		panic("unexpected block")
   111  	}
   112  
   113  	// Entry point for the next block. Used by the JMP in goToBlock.
   114  	s.Prog(wasm.ARESUMEPOINT)
   115  
   116  	if s.OnWasmStackSkipped != 0 {
   117  		panic("wasm: bad stack")
   118  	}
   119  }
   120  
   121  func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
   122  	switch v.Op {
   123  	case ssa.OpWasmLoweredStaticCall, ssa.OpWasmLoweredClosureCall, ssa.OpWasmLoweredInterCall:
   124  		s.PrepareCall(v)
   125  		if v.Aux == gc.Deferreturn {
   126  			// add a resume point before call to deferreturn so it can be called again via jmpdefer
   127  			s.Prog(wasm.ARESUMEPOINT)
   128  		}
   129  		if v.Op == ssa.OpWasmLoweredClosureCall {
   130  			getValue64(s, v.Args[1])
   131  			setReg(s, wasm.REG_CTXT)
   132  		}
   133  		if sym, ok := v.Aux.(*obj.LSym); ok {
   134  			p := s.Prog(obj.ACALL)
   135  			p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: sym}
   136  			p.Pos = v.Pos
   137  		} else {
   138  			getValue64(s, v.Args[0])
   139  			p := s.Prog(obj.ACALL)
   140  			p.To = obj.Addr{Type: obj.TYPE_NONE}
   141  			p.Pos = v.Pos
   142  		}
   143  
   144  	case ssa.OpWasmLoweredMove:
   145  		getValue32(s, v.Args[0])
   146  		getValue32(s, v.Args[1])
   147  		i32Const(s, int32(v.AuxInt))
   148  		p := s.Prog(wasm.ACall)
   149  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmMove}
   150  
   151  	case ssa.OpWasmLoweredZero:
   152  		getValue32(s, v.Args[0])
   153  		i32Const(s, int32(v.AuxInt))
   154  		p := s.Prog(wasm.ACall)
   155  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmZero}
   156  
   157  	case ssa.OpWasmLoweredNilCheck:
   158  		getValue64(s, v.Args[0])
   159  		s.Prog(wasm.AI64Eqz)
   160  		s.Prog(wasm.AIf)
   161  		p := s.Prog(wasm.ACALLNORESUME)
   162  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.SigPanic}
   163  		s.Prog(wasm.AEnd)
   164  		if logopt.Enabled() {
   165  			logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
   166  		}
   167  		if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
   168  			gc.Warnl(v.Pos, "generated nil check")
   169  		}
   170  
   171  	case ssa.OpWasmLoweredWB:
   172  		getValue64(s, v.Args[0])
   173  		getValue64(s, v.Args[1])
   174  		p := s.Prog(wasm.ACALLNORESUME) // TODO(neelance): If possible, turn this into a simple wasm.ACall).
   175  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: v.Aux.(*obj.LSym)}
   176  
   177  	case ssa.OpWasmI64Store8, ssa.OpWasmI64Store16, ssa.OpWasmI64Store32, ssa.OpWasmI64Store, ssa.OpWasmF32Store, ssa.OpWasmF64Store:
   178  		getValue32(s, v.Args[0])
   179  		getValue64(s, v.Args[1])
   180  		p := s.Prog(v.Op.Asm())
   181  		p.To = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
   182  
   183  	case ssa.OpStoreReg:
   184  		getReg(s, wasm.REG_SP)
   185  		getValue64(s, v.Args[0])
   186  		p := s.Prog(storeOp(v.Type))
   187  		gc.AddrAuto(&p.To, v)
   188  
   189  	default:
   190  		if v.Type.IsMemory() {
   191  			return
   192  		}
   193  		if v.OnWasmStack {
   194  			s.OnWasmStackSkipped++
   195  			// If a Value is marked OnWasmStack, we don't generate the value and store it to a register now.
   196  			// Instead, we delay the generation to when the value is used and then directly generate it on the WebAssembly stack.
   197  			return
   198  		}
   199  		ssaGenValueOnStack(s, v, true)
   200  		if s.OnWasmStackSkipped != 0 {
   201  			panic("wasm: bad stack")
   202  		}
   203  		setReg(s, v.Reg())
   204  	}
   205  }
   206  
   207  func ssaGenValueOnStack(s *gc.SSAGenState, v *ssa.Value, extend bool) {
   208  	switch v.Op {
   209  	case ssa.OpWasmLoweredGetClosurePtr:
   210  		getReg(s, wasm.REG_CTXT)
   211  
   212  	case ssa.OpWasmLoweredGetCallerPC:
   213  		p := s.Prog(wasm.AI64Load)
   214  		// Caller PC is stored 8 bytes below first parameter.
   215  		p.From = obj.Addr{
   216  			Type:   obj.TYPE_MEM,
   217  			Name:   obj.NAME_PARAM,
   218  			Offset: -8,
   219  		}
   220  
   221  	case ssa.OpWasmLoweredGetCallerSP:
   222  		p := s.Prog(wasm.AGet)
   223  		// Caller SP is the address of the first parameter.
   224  		p.From = obj.Addr{
   225  			Type:   obj.TYPE_ADDR,
   226  			Name:   obj.NAME_PARAM,
   227  			Reg:    wasm.REG_SP,
   228  			Offset: 0,
   229  		}
   230  
   231  	case ssa.OpWasmLoweredAddr:
   232  		p := s.Prog(wasm.AGet)
   233  		p.From.Type = obj.TYPE_ADDR
   234  		switch v.Aux.(type) {
   235  		case *obj.LSym:
   236  			gc.AddAux(&p.From, v)
   237  		case *gc.Node:
   238  			p.From.Reg = v.Args[0].Reg()
   239  			gc.AddAux(&p.From, v)
   240  		default:
   241  			panic("wasm: bad LoweredAddr")
   242  		}
   243  
   244  	case ssa.OpWasmLoweredConvert:
   245  		getValue64(s, v.Args[0])
   246  
   247  	case ssa.OpWasmSelect:
   248  		getValue64(s, v.Args[0])
   249  		getValue64(s, v.Args[1])
   250  		getValue32(s, v.Args[2])
   251  		s.Prog(v.Op.Asm())
   252  
   253  	case ssa.OpWasmI64AddConst:
   254  		getValue64(s, v.Args[0])
   255  		i64Const(s, v.AuxInt)
   256  		s.Prog(v.Op.Asm())
   257  
   258  	case ssa.OpWasmI64Const:
   259  		i64Const(s, v.AuxInt)
   260  
   261  	case ssa.OpWasmF32Const:
   262  		f32Const(s, v.AuxFloat())
   263  
   264  	case ssa.OpWasmF64Const:
   265  		f64Const(s, v.AuxFloat())
   266  
   267  	case ssa.OpWasmI64Load8U, ssa.OpWasmI64Load8S, ssa.OpWasmI64Load16U, ssa.OpWasmI64Load16S, ssa.OpWasmI64Load32U, ssa.OpWasmI64Load32S, ssa.OpWasmI64Load, ssa.OpWasmF32Load, ssa.OpWasmF64Load:
   268  		getValue32(s, v.Args[0])
   269  		p := s.Prog(v.Op.Asm())
   270  		p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
   271  
   272  	case ssa.OpWasmI64Eqz:
   273  		getValue64(s, v.Args[0])
   274  		s.Prog(v.Op.Asm())
   275  		if extend {
   276  			s.Prog(wasm.AI64ExtendI32U)
   277  		}
   278  
   279  	case ssa.OpWasmI64Eq, ssa.OpWasmI64Ne, ssa.OpWasmI64LtS, ssa.OpWasmI64LtU, ssa.OpWasmI64GtS, ssa.OpWasmI64GtU, ssa.OpWasmI64LeS, ssa.OpWasmI64LeU, ssa.OpWasmI64GeS, ssa.OpWasmI64GeU,
   280  		ssa.OpWasmF32Eq, ssa.OpWasmF32Ne, ssa.OpWasmF32Lt, ssa.OpWasmF32Gt, ssa.OpWasmF32Le, ssa.OpWasmF32Ge,
   281  		ssa.OpWasmF64Eq, ssa.OpWasmF64Ne, ssa.OpWasmF64Lt, ssa.OpWasmF64Gt, ssa.OpWasmF64Le, ssa.OpWasmF64Ge:
   282  		getValue64(s, v.Args[0])
   283  		getValue64(s, v.Args[1])
   284  		s.Prog(v.Op.Asm())
   285  		if extend {
   286  			s.Prog(wasm.AI64ExtendI32U)
   287  		}
   288  
   289  	case ssa.OpWasmI64Add, ssa.OpWasmI64Sub, ssa.OpWasmI64Mul, ssa.OpWasmI64DivU, ssa.OpWasmI64RemS, ssa.OpWasmI64RemU, ssa.OpWasmI64And, ssa.OpWasmI64Or, ssa.OpWasmI64Xor, ssa.OpWasmI64Shl, ssa.OpWasmI64ShrS, ssa.OpWasmI64ShrU, ssa.OpWasmI64Rotl,
   290  		ssa.OpWasmF32Add, ssa.OpWasmF32Sub, ssa.OpWasmF32Mul, ssa.OpWasmF32Div, ssa.OpWasmF32Copysign,
   291  		ssa.OpWasmF64Add, ssa.OpWasmF64Sub, ssa.OpWasmF64Mul, ssa.OpWasmF64Div, ssa.OpWasmF64Copysign:
   292  		getValue64(s, v.Args[0])
   293  		getValue64(s, v.Args[1])
   294  		s.Prog(v.Op.Asm())
   295  
   296  	case ssa.OpWasmI32Rotl:
   297  		getValue32(s, v.Args[0])
   298  		getValue32(s, v.Args[1])
   299  		s.Prog(wasm.AI32Rotl)
   300  		s.Prog(wasm.AI64ExtendI32U)
   301  
   302  	case ssa.OpWasmI64DivS:
   303  		getValue64(s, v.Args[0])
   304  		getValue64(s, v.Args[1])
   305  		if v.Type.Size() == 8 {
   306  			// Division of int64 needs helper function wasmDiv to handle the MinInt64 / -1 case.
   307  			p := s.Prog(wasm.ACall)
   308  			p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmDiv}
   309  			break
   310  		}
   311  		s.Prog(wasm.AI64DivS)
   312  
   313  	case ssa.OpWasmI64TruncSatF32S, ssa.OpWasmI64TruncSatF64S:
   314  		getValue64(s, v.Args[0])
   315  		if objabi.GOWASM.SatConv {
   316  			s.Prog(v.Op.Asm())
   317  		} else {
   318  			if v.Op == ssa.OpWasmI64TruncSatF32S {
   319  				s.Prog(wasm.AF64PromoteF32)
   320  			}
   321  			p := s.Prog(wasm.ACall)
   322  			p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmTruncS}
   323  		}
   324  
   325  	case ssa.OpWasmI64TruncSatF32U, ssa.OpWasmI64TruncSatF64U:
   326  		getValue64(s, v.Args[0])
   327  		if objabi.GOWASM.SatConv {
   328  			s.Prog(v.Op.Asm())
   329  		} else {
   330  			if v.Op == ssa.OpWasmI64TruncSatF32U {
   331  				s.Prog(wasm.AF64PromoteF32)
   332  			}
   333  			p := s.Prog(wasm.ACall)
   334  			p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmTruncU}
   335  		}
   336  
   337  	case ssa.OpWasmF32DemoteF64:
   338  		getValue64(s, v.Args[0])
   339  		s.Prog(v.Op.Asm())
   340  
   341  	case ssa.OpWasmF64PromoteF32:
   342  		getValue64(s, v.Args[0])
   343  		s.Prog(v.Op.Asm())
   344  
   345  	case ssa.OpWasmF32ConvertI64S, ssa.OpWasmF32ConvertI64U,
   346  		ssa.OpWasmF64ConvertI64S, ssa.OpWasmF64ConvertI64U,
   347  		ssa.OpWasmI64Extend8S, ssa.OpWasmI64Extend16S, ssa.OpWasmI64Extend32S,
   348  		ssa.OpWasmF32Neg, ssa.OpWasmF32Sqrt, ssa.OpWasmF32Trunc, ssa.OpWasmF32Ceil, ssa.OpWasmF32Floor, ssa.OpWasmF32Nearest, ssa.OpWasmF32Abs,
   349  		ssa.OpWasmF64Neg, ssa.OpWasmF64Sqrt, ssa.OpWasmF64Trunc, ssa.OpWasmF64Ceil, ssa.OpWasmF64Floor, ssa.OpWasmF64Nearest, ssa.OpWasmF64Abs,
   350  		ssa.OpWasmI64Ctz, ssa.OpWasmI64Clz, ssa.OpWasmI64Popcnt:
   351  		getValue64(s, v.Args[0])
   352  		s.Prog(v.Op.Asm())
   353  
   354  	case ssa.OpLoadReg:
   355  		p := s.Prog(loadOp(v.Type))
   356  		gc.AddrAuto(&p.From, v.Args[0])
   357  
   358  	case ssa.OpCopy:
   359  		getValue64(s, v.Args[0])
   360  
   361  	default:
   362  		v.Fatalf("unexpected op: %s", v.Op)
   363  
   364  	}
   365  }
   366  
   367  func isCmp(v *ssa.Value) bool {
   368  	switch v.Op {
   369  	case ssa.OpWasmI64Eqz, ssa.OpWasmI64Eq, ssa.OpWasmI64Ne, ssa.OpWasmI64LtS, ssa.OpWasmI64LtU, ssa.OpWasmI64GtS, ssa.OpWasmI64GtU, ssa.OpWasmI64LeS, ssa.OpWasmI64LeU, ssa.OpWasmI64GeS, ssa.OpWasmI64GeU,
   370  		ssa.OpWasmF32Eq, ssa.OpWasmF32Ne, ssa.OpWasmF32Lt, ssa.OpWasmF32Gt, ssa.OpWasmF32Le, ssa.OpWasmF32Ge,
   371  		ssa.OpWasmF64Eq, ssa.OpWasmF64Ne, ssa.OpWasmF64Lt, ssa.OpWasmF64Gt, ssa.OpWasmF64Le, ssa.OpWasmF64Ge:
   372  		return true
   373  	default:
   374  		return false
   375  	}
   376  }
   377  
   378  func getValue32(s *gc.SSAGenState, v *ssa.Value) {
   379  	if v.OnWasmStack {
   380  		s.OnWasmStackSkipped--
   381  		ssaGenValueOnStack(s, v, false)
   382  		if !isCmp(v) {
   383  			s.Prog(wasm.AI32WrapI64)
   384  		}
   385  		return
   386  	}
   387  
   388  	reg := v.Reg()
   389  	getReg(s, reg)
   390  	if reg != wasm.REG_SP {
   391  		s.Prog(wasm.AI32WrapI64)
   392  	}
   393  }
   394  
   395  func getValue64(s *gc.SSAGenState, v *ssa.Value) {
   396  	if v.OnWasmStack {
   397  		s.OnWasmStackSkipped--
   398  		ssaGenValueOnStack(s, v, true)
   399  		return
   400  	}
   401  
   402  	reg := v.Reg()
   403  	getReg(s, reg)
   404  	if reg == wasm.REG_SP {
   405  		s.Prog(wasm.AI64ExtendI32U)
   406  	}
   407  }
   408  
   409  func i32Const(s *gc.SSAGenState, val int32) {
   410  	p := s.Prog(wasm.AI32Const)
   411  	p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: int64(val)}
   412  }
   413  
   414  func i64Const(s *gc.SSAGenState, val int64) {
   415  	p := s.Prog(wasm.AI64Const)
   416  	p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: val}
   417  }
   418  
   419  func f32Const(s *gc.SSAGenState, val float64) {
   420  	p := s.Prog(wasm.AF32Const)
   421  	p.From = obj.Addr{Type: obj.TYPE_FCONST, Val: val}
   422  }
   423  
   424  func f64Const(s *gc.SSAGenState, val float64) {
   425  	p := s.Prog(wasm.AF64Const)
   426  	p.From = obj.Addr{Type: obj.TYPE_FCONST, Val: val}
   427  }
   428  
   429  func getReg(s *gc.SSAGenState, reg int16) {
   430  	p := s.Prog(wasm.AGet)
   431  	p.From = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
   432  }
   433  
   434  func setReg(s *gc.SSAGenState, reg int16) {
   435  	p := s.Prog(wasm.ASet)
   436  	p.To = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
   437  }
   438  
   439  func loadOp(t *types.Type) obj.As {
   440  	if t.IsFloat() {
   441  		switch t.Size() {
   442  		case 4:
   443  			return wasm.AF32Load
   444  		case 8:
   445  			return wasm.AF64Load
   446  		default:
   447  			panic("bad load type")
   448  		}
   449  	}
   450  
   451  	switch t.Size() {
   452  	case 1:
   453  		if t.IsSigned() {
   454  			return wasm.AI64Load8S
   455  		}
   456  		return wasm.AI64Load8U
   457  	case 2:
   458  		if t.IsSigned() {
   459  			return wasm.AI64Load16S
   460  		}
   461  		return wasm.AI64Load16U
   462  	case 4:
   463  		if t.IsSigned() {
   464  			return wasm.AI64Load32S
   465  		}
   466  		return wasm.AI64Load32U
   467  	case 8:
   468  		return wasm.AI64Load
   469  	default:
   470  		panic("bad load type")
   471  	}
   472  }
   473  
   474  func storeOp(t *types.Type) obj.As {
   475  	if t.IsFloat() {
   476  		switch t.Size() {
   477  		case 4:
   478  			return wasm.AF32Store
   479  		case 8:
   480  			return wasm.AF64Store
   481  		default:
   482  			panic("bad store type")
   483  		}
   484  	}
   485  
   486  	switch t.Size() {
   487  	case 1:
   488  		return wasm.AI64Store8
   489  	case 2:
   490  		return wasm.AI64Store16
   491  	case 4:
   492  		return wasm.AI64Store32
   493  	case 8:
   494  		return wasm.AI64Store
   495  	default:
   496  		panic("bad store type")
   497  	}
   498  }