github.com/megatontech/mynoteforgo@v0.0.0-20200507084910-5d0c6ea6e890/源码/cmd/compile/internal/wasm/ssa.go (about)

     1  // Copyright 2018 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package wasm
     6  
     7  import (
     8  	"cmd/compile/internal/gc"
     9  	"cmd/compile/internal/ssa"
    10  	"cmd/compile/internal/types"
    11  	"cmd/internal/obj"
    12  	"cmd/internal/obj/wasm"
    13  )
    14  
    15  func Init(arch *gc.Arch) {
    16  	arch.LinkArch = &wasm.Linkwasm
    17  	arch.REGSP = wasm.REG_SP
    18  	arch.MAXWIDTH = 1 << 50
    19  
    20  	arch.ZeroRange = zeroRange
    21  	arch.ZeroAuto = zeroAuto
    22  	arch.Ginsnop = ginsnop
    23  	arch.Ginsnopdefer = ginsnop
    24  
    25  	arch.SSAMarkMoves = ssaMarkMoves
    26  	arch.SSAGenValue = ssaGenValue
    27  	arch.SSAGenBlock = ssaGenBlock
    28  }
    29  
    30  func zeroRange(pp *gc.Progs, p *obj.Prog, off, cnt int64, state *uint32) *obj.Prog {
    31  	if cnt == 0 {
    32  		return p
    33  	}
    34  	if cnt%8 != 0 {
    35  		gc.Fatalf("zerorange count not a multiple of widthptr %d", cnt)
    36  	}
    37  
    38  	for i := int64(0); i < cnt; i += 8 {
    39  		p = pp.Appendpp(p, wasm.AGet, obj.TYPE_REG, wasm.REG_SP, 0, 0, 0, 0)
    40  		p = pp.Appendpp(p, wasm.AI64Const, obj.TYPE_CONST, 0, 0, 0, 0, 0)
    41  		p = pp.Appendpp(p, wasm.AI64Store, 0, 0, 0, obj.TYPE_CONST, 0, off+i)
    42  	}
    43  
    44  	return p
    45  }
    46  
    47  func zeroAuto(pp *gc.Progs, n *gc.Node) {
    48  	sym := n.Sym.Linksym()
    49  	size := n.Type.Size()
    50  	for i := int64(0); i < size; i += 8 {
    51  		p := pp.Prog(wasm.AGet)
    52  		p.From = obj.Addr{Type: obj.TYPE_REG, Reg: wasm.REG_SP}
    53  
    54  		p = pp.Prog(wasm.AI64Const)
    55  		p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: 0}
    56  
    57  		p = pp.Prog(wasm.AI64Store)
    58  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_AUTO, Offset: n.Xoffset + i, Sym: sym}
    59  	}
    60  }
    61  
    62  func ginsnop(pp *gc.Progs) *obj.Prog {
    63  	return pp.Prog(wasm.ANop)
    64  }
    65  
    66  func ssaMarkMoves(s *gc.SSAGenState, b *ssa.Block) {
    67  }
    68  
    69  func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) {
    70  	goToBlock := func(block *ssa.Block, canFallthrough bool) {
    71  		if canFallthrough && block == next {
    72  			return
    73  		}
    74  		s.Br(obj.AJMP, block)
    75  	}
    76  
    77  	switch b.Kind {
    78  	case ssa.BlockPlain:
    79  		goToBlock(b.Succs[0].Block(), true)
    80  
    81  	case ssa.BlockIf:
    82  		getValue32(s, b.Control)
    83  		s.Prog(wasm.AI32Eqz)
    84  		s.Prog(wasm.AIf)
    85  		goToBlock(b.Succs[1].Block(), false)
    86  		s.Prog(wasm.AEnd)
    87  		goToBlock(b.Succs[0].Block(), true)
    88  
    89  	case ssa.BlockRet:
    90  		s.Prog(obj.ARET)
    91  
    92  	case ssa.BlockRetJmp:
    93  		p := s.Prog(obj.ARET)
    94  		p.To.Type = obj.TYPE_MEM
    95  		p.To.Name = obj.NAME_EXTERN
    96  		p.To.Sym = b.Aux.(*obj.LSym)
    97  
    98  	case ssa.BlockExit:
    99  		s.Prog(obj.AUNDEF)
   100  
   101  	case ssa.BlockDefer:
   102  		p := s.Prog(wasm.AGet)
   103  		p.From = obj.Addr{Type: obj.TYPE_REG, Reg: wasm.REG_RET0}
   104  		s.Prog(wasm.AI64Eqz)
   105  		s.Prog(wasm.AI32Eqz)
   106  		s.Prog(wasm.AIf)
   107  		goToBlock(b.Succs[1].Block(), false)
   108  		s.Prog(wasm.AEnd)
   109  		goToBlock(b.Succs[0].Block(), true)
   110  
   111  	default:
   112  		panic("unexpected block")
   113  	}
   114  
   115  	// Entry point for the next block. Used by the JMP in goToBlock.
   116  	s.Prog(wasm.ARESUMEPOINT)
   117  
   118  	if s.OnWasmStackSkipped != 0 {
   119  		panic("wasm: bad stack")
   120  	}
   121  }
   122  
   123  func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
   124  	switch v.Op {
   125  	case ssa.OpWasmLoweredStaticCall, ssa.OpWasmLoweredClosureCall, ssa.OpWasmLoweredInterCall:
   126  		s.PrepareCall(v)
   127  		if v.Aux == gc.Deferreturn {
   128  			// add a resume point before call to deferreturn so it can be called again via jmpdefer
   129  			s.Prog(wasm.ARESUMEPOINT)
   130  		}
   131  		if v.Op == ssa.OpWasmLoweredClosureCall {
   132  			getValue64(s, v.Args[1])
   133  			setReg(s, wasm.REG_CTXT)
   134  		}
   135  		if sym, ok := v.Aux.(*obj.LSym); ok {
   136  			p := s.Prog(obj.ACALL)
   137  			p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: sym}
   138  			p.Pos = v.Pos
   139  		} else {
   140  			getValue64(s, v.Args[0])
   141  			p := s.Prog(obj.ACALL)
   142  			p.To = obj.Addr{Type: obj.TYPE_NONE}
   143  			p.Pos = v.Pos
   144  		}
   145  
   146  	case ssa.OpWasmLoweredMove:
   147  		getValue32(s, v.Args[0])
   148  		getValue32(s, v.Args[1])
   149  		i32Const(s, int32(v.AuxInt))
   150  		p := s.Prog(wasm.ACall)
   151  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmMove}
   152  
   153  	case ssa.OpWasmLoweredZero:
   154  		getValue32(s, v.Args[0])
   155  		i32Const(s, int32(v.AuxInt))
   156  		p := s.Prog(wasm.ACall)
   157  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmZero}
   158  
   159  	case ssa.OpWasmLoweredNilCheck:
   160  		getValue64(s, v.Args[0])
   161  		s.Prog(wasm.AI64Eqz)
   162  		s.Prog(wasm.AIf)
   163  		p := s.Prog(wasm.ACALLNORESUME)
   164  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.SigPanic}
   165  		s.Prog(wasm.AEnd)
   166  		if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
   167  			gc.Warnl(v.Pos, "generated nil check")
   168  		}
   169  
   170  	case ssa.OpWasmLoweredWB:
   171  		getValue64(s, v.Args[0])
   172  		getValue64(s, v.Args[1])
   173  		p := s.Prog(wasm.ACALLNORESUME) // TODO(neelance): If possible, turn this into a simple wasm.ACall).
   174  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: v.Aux.(*obj.LSym)}
   175  
   176  	case ssa.OpWasmI64Store8, ssa.OpWasmI64Store16, ssa.OpWasmI64Store32, ssa.OpWasmI64Store, ssa.OpWasmF32Store, ssa.OpWasmF64Store:
   177  		getValue32(s, v.Args[0])
   178  		getValue64(s, v.Args[1])
   179  		if v.Op == ssa.OpWasmF32Store {
   180  			s.Prog(wasm.AF32DemoteF64)
   181  		}
   182  		p := s.Prog(v.Op.Asm())
   183  		p.To = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
   184  
   185  	case ssa.OpStoreReg:
   186  		getReg(s, wasm.REG_SP)
   187  		getValue64(s, v.Args[0])
   188  		if v.Type.Etype == types.TFLOAT32 {
   189  			s.Prog(wasm.AF32DemoteF64)
   190  		}
   191  		p := s.Prog(storeOp(v.Type))
   192  		gc.AddrAuto(&p.To, v)
   193  
   194  	default:
   195  		if v.Type.IsMemory() {
   196  			return
   197  		}
   198  		if v.OnWasmStack {
   199  			s.OnWasmStackSkipped++
   200  			// If a Value is marked OnWasmStack, we don't generate the value and store it to a register now.
   201  			// Instead, we delay the generation to when the value is used and then directly generate it on the WebAssembly stack.
   202  			return
   203  		}
   204  		ssaGenValueOnStack(s, v)
   205  		if s.OnWasmStackSkipped != 0 {
   206  			panic("wasm: bad stack")
   207  		}
   208  		setReg(s, v.Reg())
   209  	}
   210  }
   211  
   212  func ssaGenValueOnStack(s *gc.SSAGenState, v *ssa.Value) {
   213  	switch v.Op {
   214  	case ssa.OpWasmLoweredGetClosurePtr:
   215  		getReg(s, wasm.REG_CTXT)
   216  
   217  	case ssa.OpWasmLoweredGetCallerPC:
   218  		p := s.Prog(wasm.AI64Load)
   219  		// Caller PC is stored 8 bytes below first parameter.
   220  		p.From = obj.Addr{
   221  			Type:   obj.TYPE_MEM,
   222  			Name:   obj.NAME_PARAM,
   223  			Offset: -8,
   224  		}
   225  
   226  	case ssa.OpWasmLoweredGetCallerSP:
   227  		p := s.Prog(wasm.AGet)
   228  		// Caller SP is the address of the first parameter.
   229  		p.From = obj.Addr{
   230  			Type:   obj.TYPE_ADDR,
   231  			Name:   obj.NAME_PARAM,
   232  			Reg:    wasm.REG_SP,
   233  			Offset: 0,
   234  		}
   235  
   236  	case ssa.OpWasmLoweredAddr:
   237  		p := s.Prog(wasm.AGet)
   238  		p.From.Type = obj.TYPE_ADDR
   239  		switch v.Aux.(type) {
   240  		case *obj.LSym:
   241  			gc.AddAux(&p.From, v)
   242  		case *gc.Node:
   243  			p.From.Reg = v.Args[0].Reg()
   244  			gc.AddAux(&p.From, v)
   245  		default:
   246  			panic("wasm: bad LoweredAddr")
   247  		}
   248  
   249  	case ssa.OpWasmLoweredRound32F:
   250  		getValue64(s, v.Args[0])
   251  		s.Prog(wasm.AF32DemoteF64)
   252  		s.Prog(wasm.AF64PromoteF32)
   253  
   254  	case ssa.OpWasmLoweredConvert:
   255  		getValue64(s, v.Args[0])
   256  
   257  	case ssa.OpWasmSelect:
   258  		getValue64(s, v.Args[0])
   259  		getValue64(s, v.Args[1])
   260  		getValue64(s, v.Args[2])
   261  		s.Prog(wasm.AI32WrapI64)
   262  		s.Prog(v.Op.Asm())
   263  
   264  	case ssa.OpWasmI64AddConst:
   265  		getValue64(s, v.Args[0])
   266  		i64Const(s, v.AuxInt)
   267  		s.Prog(v.Op.Asm())
   268  
   269  	case ssa.OpWasmI64Const:
   270  		i64Const(s, v.AuxInt)
   271  
   272  	case ssa.OpWasmF64Const:
   273  		f64Const(s, v.AuxFloat())
   274  
   275  	case ssa.OpWasmI64Load8U, ssa.OpWasmI64Load8S, ssa.OpWasmI64Load16U, ssa.OpWasmI64Load16S, ssa.OpWasmI64Load32U, ssa.OpWasmI64Load32S, ssa.OpWasmI64Load, ssa.OpWasmF32Load, ssa.OpWasmF64Load:
   276  		getValue32(s, v.Args[0])
   277  		p := s.Prog(v.Op.Asm())
   278  		p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
   279  		if v.Op == ssa.OpWasmF32Load {
   280  			s.Prog(wasm.AF64PromoteF32)
   281  		}
   282  
   283  	case ssa.OpWasmI64Eqz:
   284  		getValue64(s, v.Args[0])
   285  		s.Prog(v.Op.Asm())
   286  		s.Prog(wasm.AI64ExtendUI32)
   287  
   288  	case ssa.OpWasmI64Eq, ssa.OpWasmI64Ne, ssa.OpWasmI64LtS, ssa.OpWasmI64LtU, ssa.OpWasmI64GtS, ssa.OpWasmI64GtU, ssa.OpWasmI64LeS, ssa.OpWasmI64LeU, ssa.OpWasmI64GeS, ssa.OpWasmI64GeU, ssa.OpWasmF64Eq, ssa.OpWasmF64Ne, ssa.OpWasmF64Lt, ssa.OpWasmF64Gt, ssa.OpWasmF64Le, ssa.OpWasmF64Ge:
   289  		getValue64(s, v.Args[0])
   290  		getValue64(s, v.Args[1])
   291  		s.Prog(v.Op.Asm())
   292  		s.Prog(wasm.AI64ExtendUI32)
   293  
   294  	case ssa.OpWasmI64Add, ssa.OpWasmI64Sub, ssa.OpWasmI64Mul, ssa.OpWasmI64DivU, ssa.OpWasmI64RemS, ssa.OpWasmI64RemU, ssa.OpWasmI64And, ssa.OpWasmI64Or, ssa.OpWasmI64Xor, ssa.OpWasmI64Shl, ssa.OpWasmI64ShrS, ssa.OpWasmI64ShrU, ssa.OpWasmF64Add, ssa.OpWasmF64Sub, ssa.OpWasmF64Mul, ssa.OpWasmF64Div:
   295  		getValue64(s, v.Args[0])
   296  		getValue64(s, v.Args[1])
   297  		s.Prog(v.Op.Asm())
   298  
   299  	case ssa.OpWasmI64DivS:
   300  		getValue64(s, v.Args[0])
   301  		getValue64(s, v.Args[1])
   302  		if v.Type.Size() == 8 {
   303  			// Division of int64 needs helper function wasmDiv to handle the MinInt64 / -1 case.
   304  			p := s.Prog(wasm.ACall)
   305  			p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmDiv}
   306  			break
   307  		}
   308  		s.Prog(wasm.AI64DivS)
   309  
   310  	case ssa.OpWasmI64TruncSF64:
   311  		getValue64(s, v.Args[0])
   312  		p := s.Prog(wasm.ACall)
   313  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmTruncS}
   314  
   315  	case ssa.OpWasmI64TruncUF64:
   316  		getValue64(s, v.Args[0])
   317  		p := s.Prog(wasm.ACall)
   318  		p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmTruncU}
   319  
   320  	case ssa.OpWasmF64Neg, ssa.OpWasmF64ConvertSI64, ssa.OpWasmF64ConvertUI64:
   321  		getValue64(s, v.Args[0])
   322  		s.Prog(v.Op.Asm())
   323  
   324  	case ssa.OpLoadReg:
   325  		p := s.Prog(loadOp(v.Type))
   326  		gc.AddrAuto(&p.From, v.Args[0])
   327  		if v.Type.Etype == types.TFLOAT32 {
   328  			s.Prog(wasm.AF64PromoteF32)
   329  		}
   330  
   331  	case ssa.OpCopy:
   332  		getValue64(s, v.Args[0])
   333  
   334  	default:
   335  		v.Fatalf("unexpected op: %s", v.Op)
   336  
   337  	}
   338  }
   339  
   340  func getValue32(s *gc.SSAGenState, v *ssa.Value) {
   341  	if v.OnWasmStack {
   342  		s.OnWasmStackSkipped--
   343  		ssaGenValueOnStack(s, v)
   344  		s.Prog(wasm.AI32WrapI64)
   345  		return
   346  	}
   347  
   348  	reg := v.Reg()
   349  	getReg(s, reg)
   350  	if reg != wasm.REG_SP {
   351  		s.Prog(wasm.AI32WrapI64)
   352  	}
   353  }
   354  
   355  func getValue64(s *gc.SSAGenState, v *ssa.Value) {
   356  	if v.OnWasmStack {
   357  		s.OnWasmStackSkipped--
   358  		ssaGenValueOnStack(s, v)
   359  		return
   360  	}
   361  
   362  	reg := v.Reg()
   363  	getReg(s, reg)
   364  	if reg == wasm.REG_SP {
   365  		s.Prog(wasm.AI64ExtendUI32)
   366  	}
   367  }
   368  
   369  func i32Const(s *gc.SSAGenState, val int32) {
   370  	p := s.Prog(wasm.AI32Const)
   371  	p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: int64(val)}
   372  }
   373  
   374  func i64Const(s *gc.SSAGenState, val int64) {
   375  	p := s.Prog(wasm.AI64Const)
   376  	p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: val}
   377  }
   378  
   379  func f64Const(s *gc.SSAGenState, val float64) {
   380  	p := s.Prog(wasm.AF64Const)
   381  	p.From = obj.Addr{Type: obj.TYPE_FCONST, Val: val}
   382  }
   383  
   384  func getReg(s *gc.SSAGenState, reg int16) {
   385  	p := s.Prog(wasm.AGet)
   386  	p.From = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
   387  }
   388  
   389  func setReg(s *gc.SSAGenState, reg int16) {
   390  	p := s.Prog(wasm.ASet)
   391  	p.To = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
   392  }
   393  
   394  func loadOp(t *types.Type) obj.As {
   395  	if t.IsFloat() {
   396  		switch t.Size() {
   397  		case 4:
   398  			return wasm.AF32Load
   399  		case 8:
   400  			return wasm.AF64Load
   401  		default:
   402  			panic("bad load type")
   403  		}
   404  	}
   405  
   406  	switch t.Size() {
   407  	case 1:
   408  		if t.IsSigned() {
   409  			return wasm.AI64Load8S
   410  		}
   411  		return wasm.AI64Load8U
   412  	case 2:
   413  		if t.IsSigned() {
   414  			return wasm.AI64Load16S
   415  		}
   416  		return wasm.AI64Load16U
   417  	case 4:
   418  		if t.IsSigned() {
   419  			return wasm.AI64Load32S
   420  		}
   421  		return wasm.AI64Load32U
   422  	case 8:
   423  		return wasm.AI64Load
   424  	default:
   425  		panic("bad load type")
   426  	}
   427  }
   428  
   429  func storeOp(t *types.Type) obj.As {
   430  	if t.IsFloat() {
   431  		switch t.Size() {
   432  		case 4:
   433  			return wasm.AF32Store
   434  		case 8:
   435  			return wasm.AF64Store
   436  		default:
   437  			panic("bad store type")
   438  		}
   439  	}
   440  
   441  	switch t.Size() {
   442  	case 1:
   443  		return wasm.AI64Store8
   444  	case 2:
   445  		return wasm.AI64Store16
   446  	case 4:
   447  		return wasm.AI64Store32
   448  	case 8:
   449  		return wasm.AI64Store
   450  	default:
   451  		panic("bad store type")
   452  	}
   453  }