github.com/hikaru7719/go@v0.0.0-20181025140707-c8b2ac68906a/src/cmd/compile/internal/wasm/ssa.go (about)

     1  // Copyright 2018 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package wasm
     6  
     7  import (
     8  	"cmd/compile/internal/gc"
     9  	"cmd/compile/internal/ssa"
    10  	"cmd/compile/internal/types"
    11  	"cmd/internal/obj"
    12  	"cmd/internal/obj/wasm"
    13  )
    14  
    15  func Init(arch *gc.Arch) {
    16  	arch.LinkArch = &wasm.Linkwasm
    17  	arch.REGSP = wasm.REG_SP
    18  	arch.MAXWIDTH = 1 << 50
    19  
    20  	arch.ZeroRange = zeroRange
    21  	arch.ZeroAuto = zeroAuto
    22  	arch.Ginsnop = ginsnop
    23  
    24  	arch.SSAMarkMoves = ssaMarkMoves
    25  	arch.SSAGenValue = ssaGenValue
    26  	arch.SSAGenBlock = ssaGenBlock
    27  }
    28  
    29  func zeroRange(pp *gc.Progs, p *obj.Prog, off, cnt int64, state *uint32) *obj.Prog {
    30  	if cnt == 0 {
    31  		return p
    32  	}
    33  	if cnt%8 != 0 {
    34  		gc.Fatalf("zerorange count not a multiple of widthptr %d", cnt)
    35  	}
    36  
    37  	for i := int64(0); i < cnt; i += 8 {
    38  		p = pp.Appendpp(p, wasm.AGet, obj.TYPE_REG, wasm.REG_SP, 0, 0, 0, 0)
    39  		p = pp.Appendpp(p, wasm.AI64Const, obj.TYPE_CONST, 0, 0, 0, 0, 0)
    40  		p = pp.Appendpp(p, wasm.AI64Store, 0, 0, 0, obj.TYPE_CONST, 0, off+i)
    41  	}
    42  
    43  	return p
    44  }
    45  
    46  func zeroAuto(pp *gc.Progs, n *gc.Node) {
    47  	sym := n.Sym.Linksym()
    48  	size := n.Type.Size()
    49  	for i := int64(0); i < size; i += 8 {
    50  		p := pp.Prog(wasm.AGet)
    51  		p.From = obj.Addr{Type: obj.TYPE_REG, Reg: wasm.REG_SP}
    52  
    53  		p = pp.Prog(wasm.AI64Const)
    54  		p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: 0}
    55  
    56  		p = pp.Prog(wasm.AI64Store)
    57  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_AUTO, Offset: n.Xoffset + i, Sym: sym}
    58  	}
    59  }
    60  
    61  func ginsnop(pp *gc.Progs) {
    62  	pp.Prog(wasm.ANop)
    63  }
    64  
    65  func ssaMarkMoves(s *gc.SSAGenState, b *ssa.Block) {
    66  }
    67  
    68  func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) {
    69  	goToBlock := func(block *ssa.Block, canFallthrough bool) {
    70  		if canFallthrough && block == next {
    71  			return
    72  		}
    73  		s.Br(obj.AJMP, block)
    74  	}
    75  
    76  	switch b.Kind {
    77  	case ssa.BlockPlain:
    78  		goToBlock(b.Succs[0].Block(), true)
    79  
    80  	case ssa.BlockIf:
    81  		getValue32(s, b.Control)
    82  		s.Prog(wasm.AI32Eqz)
    83  		s.Prog(wasm.AIf)
    84  		goToBlock(b.Succs[1].Block(), false)
    85  		s.Prog(wasm.AEnd)
    86  		goToBlock(b.Succs[0].Block(), true)
    87  
    88  	case ssa.BlockRet:
    89  		s.Prog(obj.ARET)
    90  
    91  	case ssa.BlockRetJmp:
    92  		p := s.Prog(obj.ARET)
    93  		p.To.Type = obj.TYPE_MEM
    94  		p.To.Name = obj.NAME_EXTERN
    95  		p.To.Sym = b.Aux.(*obj.LSym)
    96  
    97  	case ssa.BlockExit:
    98  		s.Prog(obj.AUNDEF)
    99  
   100  	case ssa.BlockDefer:
   101  		p := s.Prog(wasm.AGet)
   102  		p.From = obj.Addr{Type: obj.TYPE_REG, Reg: wasm.REG_RET0}
   103  		s.Prog(wasm.AI64Eqz)
   104  		s.Prog(wasm.AI32Eqz)
   105  		s.Prog(wasm.AIf)
   106  		goToBlock(b.Succs[1].Block(), false)
   107  		s.Prog(wasm.AEnd)
   108  		goToBlock(b.Succs[0].Block(), true)
   109  
   110  	default:
   111  		panic("unexpected block")
   112  	}
   113  
   114  	// Entry point for the next block. Used by the JMP in goToBlock.
   115  	s.Prog(wasm.ARESUMEPOINT)
   116  
   117  	if s.OnWasmStackSkipped != 0 {
   118  		panic("wasm: bad stack")
   119  	}
   120  }
   121  
   122  func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
   123  	switch v.Op {
   124  	case ssa.OpWasmLoweredStaticCall, ssa.OpWasmLoweredClosureCall, ssa.OpWasmLoweredInterCall:
   125  		s.PrepareCall(v)
   126  		if v.Aux == gc.Deferreturn {
   127  			// add a resume point before call to deferreturn so it can be called again via jmpdefer
   128  			s.Prog(wasm.ARESUMEPOINT)
   129  		}
   130  		if v.Op == ssa.OpWasmLoweredClosureCall {
   131  			getValue64(s, v.Args[1])
   132  			setReg(s, wasm.REG_CTXT)
   133  		}
   134  		if sym, ok := v.Aux.(*obj.LSym); ok {
   135  			p := s.Prog(obj.ACALL)
   136  			p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: sym}
   137  		} else {
   138  			getValue64(s, v.Args[0])
   139  			p := s.Prog(obj.ACALL)
   140  			p.To = obj.Addr{Type: obj.TYPE_NONE}
   141  		}
   142  
   143  	case ssa.OpWasmLoweredMove:
   144  		getValue32(s, v.Args[0])
   145  		getValue32(s, v.Args[1])
   146  		i32Const(s, int32(v.AuxInt))
   147  		p := s.Prog(wasm.ACall)
   148  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmMove}
   149  
   150  	case ssa.OpWasmLoweredZero:
   151  		getValue32(s, v.Args[0])
   152  		i32Const(s, int32(v.AuxInt))
   153  		p := s.Prog(wasm.ACall)
   154  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmZero}
   155  
   156  	case ssa.OpWasmLoweredNilCheck:
   157  		getValue64(s, v.Args[0])
   158  		s.Prog(wasm.AI64Eqz)
   159  		s.Prog(wasm.AIf)
   160  		p := s.Prog(wasm.ACALLNORESUME)
   161  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.SigPanic}
   162  		s.Prog(wasm.AEnd)
   163  		if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
   164  			gc.Warnl(v.Pos, "generated nil check")
   165  		}
   166  
   167  	case ssa.OpWasmLoweredWB:
   168  		getValue64(s, v.Args[0])
   169  		getValue64(s, v.Args[1])
   170  		p := s.Prog(wasm.ACALLNORESUME) // TODO(neelance): If possible, turn this into a simple wasm.ACall).
   171  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: v.Aux.(*obj.LSym)}
   172  
   173  	case ssa.OpWasmI64Store8, ssa.OpWasmI64Store16, ssa.OpWasmI64Store32, ssa.OpWasmI64Store, ssa.OpWasmF32Store, ssa.OpWasmF64Store:
   174  		getValue32(s, v.Args[0])
   175  		getValue64(s, v.Args[1])
   176  		if v.Op == ssa.OpWasmF32Store {
   177  			s.Prog(wasm.AF32DemoteF64)
   178  		}
   179  		p := s.Prog(v.Op.Asm())
   180  		p.To = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
   181  
   182  	case ssa.OpStoreReg:
   183  		getReg(s, wasm.REG_SP)
   184  		getValue64(s, v.Args[0])
   185  		if v.Type.Etype == types.TFLOAT32 {
   186  			s.Prog(wasm.AF32DemoteF64)
   187  		}
   188  		p := s.Prog(storeOp(v.Type))
   189  		gc.AddrAuto(&p.To, v)
   190  
   191  	default:
   192  		if v.Type.IsMemory() {
   193  			return
   194  		}
   195  		if v.OnWasmStack {
   196  			s.OnWasmStackSkipped++
   197  			// If a Value is marked OnWasmStack, we don't generate the value and store it to a register now.
   198  			// Instead, we delay the generation to when the value is used and then directly generate it on the WebAssembly stack.
   199  			return
   200  		}
   201  		ssaGenValueOnStack(s, v)
   202  		if s.OnWasmStackSkipped != 0 {
   203  			panic("wasm: bad stack")
   204  		}
   205  		setReg(s, v.Reg())
   206  	}
   207  }
   208  
   209  func ssaGenValueOnStack(s *gc.SSAGenState, v *ssa.Value) {
   210  	switch v.Op {
   211  	case ssa.OpWasmLoweredGetClosurePtr:
   212  		getReg(s, wasm.REG_CTXT)
   213  
   214  	case ssa.OpWasmLoweredGetCallerPC:
   215  		p := s.Prog(wasm.AI64Load)
   216  		// Caller PC is stored 8 bytes below first parameter.
   217  		p.From = obj.Addr{
   218  			Type:   obj.TYPE_MEM,
   219  			Name:   obj.NAME_PARAM,
   220  			Offset: -8,
   221  		}
   222  
   223  	case ssa.OpWasmLoweredGetCallerSP:
   224  		p := s.Prog(wasm.AGet)
   225  		// Caller SP is the address of the first parameter.
   226  		p.From = obj.Addr{
   227  			Type:   obj.TYPE_ADDR,
   228  			Name:   obj.NAME_PARAM,
   229  			Reg:    wasm.REG_SP,
   230  			Offset: 0,
   231  		}
   232  
   233  	case ssa.OpWasmLoweredAddr:
   234  		p := s.Prog(wasm.AGet)
   235  		p.From.Type = obj.TYPE_ADDR
   236  		switch v.Aux.(type) {
   237  		case *obj.LSym:
   238  			gc.AddAux(&p.From, v)
   239  		case *gc.Node:
   240  			p.From.Reg = v.Args[0].Reg()
   241  			gc.AddAux(&p.From, v)
   242  		default:
   243  			panic("wasm: bad LoweredAddr")
   244  		}
   245  
   246  	case ssa.OpWasmLoweredRound32F:
   247  		getValue64(s, v.Args[0])
   248  		s.Prog(wasm.AF32DemoteF64)
   249  		s.Prog(wasm.AF64PromoteF32)
   250  
   251  	case ssa.OpWasmLoweredConvert:
   252  		getValue64(s, v.Args[0])
   253  
   254  	case ssa.OpWasmSelect:
   255  		getValue64(s, v.Args[0])
   256  		getValue64(s, v.Args[1])
   257  		getValue64(s, v.Args[2])
   258  		s.Prog(wasm.AI32WrapI64)
   259  		s.Prog(v.Op.Asm())
   260  
   261  	case ssa.OpWasmI64AddConst:
   262  		getValue64(s, v.Args[0])
   263  		i64Const(s, v.AuxInt)
   264  		s.Prog(v.Op.Asm())
   265  
   266  	case ssa.OpWasmI64Const:
   267  		i64Const(s, v.AuxInt)
   268  
   269  	case ssa.OpWasmF64Const:
   270  		f64Const(s, v.AuxFloat())
   271  
   272  	case ssa.OpWasmI64Load8U, ssa.OpWasmI64Load8S, ssa.OpWasmI64Load16U, ssa.OpWasmI64Load16S, ssa.OpWasmI64Load32U, ssa.OpWasmI64Load32S, ssa.OpWasmI64Load, ssa.OpWasmF32Load, ssa.OpWasmF64Load:
   273  		getValue32(s, v.Args[0])
   274  		p := s.Prog(v.Op.Asm())
   275  		p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
   276  		if v.Op == ssa.OpWasmF32Load {
   277  			s.Prog(wasm.AF64PromoteF32)
   278  		}
   279  
   280  	case ssa.OpWasmI64Eqz:
   281  		getValue64(s, v.Args[0])
   282  		s.Prog(v.Op.Asm())
   283  		s.Prog(wasm.AI64ExtendUI32)
   284  
   285  	case ssa.OpWasmI64Eq, ssa.OpWasmI64Ne, ssa.OpWasmI64LtS, ssa.OpWasmI64LtU, ssa.OpWasmI64GtS, ssa.OpWasmI64GtU, ssa.OpWasmI64LeS, ssa.OpWasmI64LeU, ssa.OpWasmI64GeS, ssa.OpWasmI64GeU, ssa.OpWasmF64Eq, ssa.OpWasmF64Ne, ssa.OpWasmF64Lt, ssa.OpWasmF64Gt, ssa.OpWasmF64Le, ssa.OpWasmF64Ge:
   286  		getValue64(s, v.Args[0])
   287  		getValue64(s, v.Args[1])
   288  		s.Prog(v.Op.Asm())
   289  		s.Prog(wasm.AI64ExtendUI32)
   290  
   291  	case ssa.OpWasmI64Add, ssa.OpWasmI64Sub, ssa.OpWasmI64Mul, ssa.OpWasmI64DivU, ssa.OpWasmI64RemS, ssa.OpWasmI64RemU, ssa.OpWasmI64And, ssa.OpWasmI64Or, ssa.OpWasmI64Xor, ssa.OpWasmI64Shl, ssa.OpWasmI64ShrS, ssa.OpWasmI64ShrU, ssa.OpWasmF64Add, ssa.OpWasmF64Sub, ssa.OpWasmF64Mul, ssa.OpWasmF64Div:
   292  		getValue64(s, v.Args[0])
   293  		getValue64(s, v.Args[1])
   294  		s.Prog(v.Op.Asm())
   295  
   296  	case ssa.OpWasmI64DivS:
   297  		getValue64(s, v.Args[0])
   298  		getValue64(s, v.Args[1])
   299  		if v.Type.Size() == 8 {
   300  			// Division of int64 needs helper function wasmDiv to handle the MinInt64 / -1 case.
   301  			p := s.Prog(wasm.ACall)
   302  			p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmDiv}
   303  			break
   304  		}
   305  		s.Prog(wasm.AI64DivS)
   306  
   307  	case ssa.OpWasmI64TruncSF64:
   308  		getValue64(s, v.Args[0])
   309  		p := s.Prog(wasm.ACall)
   310  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmTruncS}
   311  
   312  	case ssa.OpWasmI64TruncUF64:
   313  		getValue64(s, v.Args[0])
   314  		p := s.Prog(wasm.ACall)
   315  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmTruncU}
   316  
   317  	case ssa.OpWasmF64Neg, ssa.OpWasmF64ConvertSI64, ssa.OpWasmF64ConvertUI64:
   318  		getValue64(s, v.Args[0])
   319  		s.Prog(v.Op.Asm())
   320  
   321  	case ssa.OpLoadReg:
   322  		p := s.Prog(loadOp(v.Type))
   323  		gc.AddrAuto(&p.From, v.Args[0])
   324  		if v.Type.Etype == types.TFLOAT32 {
   325  			s.Prog(wasm.AF64PromoteF32)
   326  		}
   327  
   328  	case ssa.OpCopy:
   329  		getValue64(s, v.Args[0])
   330  
   331  	default:
   332  		v.Fatalf("unexpected op: %s", v.Op)
   333  
   334  	}
   335  }
   336  
   337  func getValue32(s *gc.SSAGenState, v *ssa.Value) {
   338  	if v.OnWasmStack {
   339  		s.OnWasmStackSkipped--
   340  		ssaGenValueOnStack(s, v)
   341  		s.Prog(wasm.AI32WrapI64)
   342  		return
   343  	}
   344  
   345  	reg := v.Reg()
   346  	getReg(s, reg)
   347  	if reg != wasm.REG_SP {
   348  		s.Prog(wasm.AI32WrapI64)
   349  	}
   350  }
   351  
   352  func getValue64(s *gc.SSAGenState, v *ssa.Value) {
   353  	if v.OnWasmStack {
   354  		s.OnWasmStackSkipped--
   355  		ssaGenValueOnStack(s, v)
   356  		return
   357  	}
   358  
   359  	reg := v.Reg()
   360  	getReg(s, reg)
   361  	if reg == wasm.REG_SP {
   362  		s.Prog(wasm.AI64ExtendUI32)
   363  	}
   364  }
   365  
   366  func i32Const(s *gc.SSAGenState, val int32) {
   367  	p := s.Prog(wasm.AI32Const)
   368  	p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: int64(val)}
   369  }
   370  
   371  func i64Const(s *gc.SSAGenState, val int64) {
   372  	p := s.Prog(wasm.AI64Const)
   373  	p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: val}
   374  }
   375  
   376  func f64Const(s *gc.SSAGenState, val float64) {
   377  	p := s.Prog(wasm.AF64Const)
   378  	p.From = obj.Addr{Type: obj.TYPE_FCONST, Val: val}
   379  }
   380  
   381  func getReg(s *gc.SSAGenState, reg int16) {
   382  	p := s.Prog(wasm.AGet)
   383  	p.From = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
   384  }
   385  
   386  func setReg(s *gc.SSAGenState, reg int16) {
   387  	p := s.Prog(wasm.ASet)
   388  	p.To = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
   389  }
   390  
   391  func loadOp(t *types.Type) obj.As {
   392  	if t.IsFloat() {
   393  		switch t.Size() {
   394  		case 4:
   395  			return wasm.AF32Load
   396  		case 8:
   397  			return wasm.AF64Load
   398  		default:
   399  			panic("bad load type")
   400  		}
   401  	}
   402  
   403  	switch t.Size() {
   404  	case 1:
   405  		if t.IsSigned() {
   406  			return wasm.AI64Load8S
   407  		}
   408  		return wasm.AI64Load8U
   409  	case 2:
   410  		if t.IsSigned() {
   411  			return wasm.AI64Load16S
   412  		}
   413  		return wasm.AI64Load16U
   414  	case 4:
   415  		if t.IsSigned() {
   416  			return wasm.AI64Load32S
   417  		}
   418  		return wasm.AI64Load32U
   419  	case 8:
   420  		return wasm.AI64Load
   421  	default:
   422  		panic("bad load type")
   423  	}
   424  }
   425  
   426  func storeOp(t *types.Type) obj.As {
   427  	if t.IsFloat() {
   428  		switch t.Size() {
   429  		case 4:
   430  			return wasm.AF32Store
   431  		case 8:
   432  			return wasm.AF64Store
   433  		default:
   434  			panic("bad store type")
   435  		}
   436  	}
   437  
   438  	switch t.Size() {
   439  	case 1:
   440  		return wasm.AI64Store8
   441  	case 2:
   442  		return wasm.AI64Store16
   443  	case 4:
   444  		return wasm.AI64Store32
   445  	case 8:
   446  		return wasm.AI64Store
   447  	default:
   448  		panic("bad store type")
   449  	}
   450  }