github.com/tetratelabs/wazero@v1.7.3-0.20240513003603-48f702e154b5/internal/engine/wazevo/frontend/lower.go (about) 1 package frontend 2 3 import ( 4 "encoding/binary" 5 "fmt" 6 "math" 7 "runtime" 8 "strings" 9 10 "github.com/tetratelabs/wazero/api" 11 "github.com/tetratelabs/wazero/internal/engine/wazevo/ssa" 12 "github.com/tetratelabs/wazero/internal/engine/wazevo/wazevoapi" 13 "github.com/tetratelabs/wazero/internal/leb128" 14 "github.com/tetratelabs/wazero/internal/wasm" 15 ) 16 17 type ( 18 // loweringState is used to keep the state of lowering. 19 loweringState struct { 20 // values holds the values on the Wasm stack. 21 values []ssa.Value 22 controlFrames []controlFrame 23 unreachable bool 24 unreachableDepth int 25 tmpForBrTable []uint32 26 pc int 27 } 28 controlFrame struct { 29 kind controlFrameKind 30 // originalStackLen holds the number of values on the Wasm stack 31 // when start executing this control frame minus params for the block. 32 originalStackLenWithoutParam int 33 // blk is the loop header if this is loop, and is the else-block if this is an if frame. 34 blk, 35 // followingBlock is the basic block we enter if we reach "end" of block. 36 followingBlock ssa.BasicBlock 37 blockType *wasm.FunctionType 38 // clonedArgs hold the arguments to Else block. 39 clonedArgs ssa.Values 40 } 41 42 controlFrameKind byte 43 ) 44 45 // String implements fmt.Stringer for debugging. 46 func (l *loweringState) String() string { 47 var str []string 48 for _, v := range l.values { 49 str = append(str, fmt.Sprintf("v%v", v.ID())) 50 } 51 var frames []string 52 for i := range l.controlFrames { 53 frames = append(frames, l.controlFrames[i].kind.String()) 54 } 55 return fmt.Sprintf("\n\tunreachable=%v(depth=%d)\n\tstack: %s\n\tcontrol frames: %s", 56 l.unreachable, l.unreachableDepth, 57 strings.Join(str, ", "), 58 strings.Join(frames, ", "), 59 ) 60 } 61 62 const ( 63 controlFrameKindFunction = iota + 1 64 controlFrameKindLoop 65 controlFrameKindIfWithElse 66 controlFrameKindIfWithoutElse 67 controlFrameKindBlock 68 ) 69 70 // String implements fmt.Stringer for debugging. 71 func (k controlFrameKind) String() string { 72 switch k { 73 case controlFrameKindFunction: 74 return "function" 75 case controlFrameKindLoop: 76 return "loop" 77 case controlFrameKindIfWithElse: 78 return "if_with_else" 79 case controlFrameKindIfWithoutElse: 80 return "if_without_else" 81 case controlFrameKindBlock: 82 return "block" 83 default: 84 panic(k) 85 } 86 } 87 88 // isLoop returns true if this is a loop frame. 89 func (ctrl *controlFrame) isLoop() bool { 90 return ctrl.kind == controlFrameKindLoop 91 } 92 93 // reset resets the state of loweringState for reuse. 94 func (l *loweringState) reset() { 95 l.values = l.values[:0] 96 l.controlFrames = l.controlFrames[:0] 97 l.pc = 0 98 l.unreachable = false 99 l.unreachableDepth = 0 100 } 101 102 func (l *loweringState) peek() (ret ssa.Value) { 103 tail := len(l.values) - 1 104 return l.values[tail] 105 } 106 107 func (l *loweringState) pop() (ret ssa.Value) { 108 tail := len(l.values) - 1 109 ret = l.values[tail] 110 l.values = l.values[:tail] 111 return 112 } 113 114 func (l *loweringState) push(ret ssa.Value) { 115 l.values = append(l.values, ret) 116 } 117 118 func (c *Compiler) nPeekDup(n int) ssa.Values { 119 if n == 0 { 120 return ssa.ValuesNil 121 } 122 123 l := c.state() 124 tail := len(l.values) 125 126 args := c.allocateVarLengthValues(n) 127 args = args.Append(c.ssaBuilder.VarLengthPool(), l.values[tail-n:tail]...) 128 return args 129 } 130 131 func (l *loweringState) ctrlPop() (ret controlFrame) { 132 tail := len(l.controlFrames) - 1 133 ret = l.controlFrames[tail] 134 l.controlFrames = l.controlFrames[:tail] 135 return 136 } 137 138 func (l *loweringState) ctrlPush(ret controlFrame) { 139 l.controlFrames = append(l.controlFrames, ret) 140 } 141 142 func (l *loweringState) ctrlPeekAt(n int) (ret *controlFrame) { 143 tail := len(l.controlFrames) - 1 144 return &l.controlFrames[tail-n] 145 } 146 147 // lowerBody lowers the body of the Wasm function to the SSA form. 148 func (c *Compiler) lowerBody(entryBlk ssa.BasicBlock) { 149 c.ssaBuilder.Seal(entryBlk) 150 151 if c.needListener { 152 c.callListenerBefore() 153 } 154 155 // Pushes the empty control frame which corresponds to the function return. 156 c.loweringState.ctrlPush(controlFrame{ 157 kind: controlFrameKindFunction, 158 blockType: c.wasmFunctionTyp, 159 followingBlock: c.ssaBuilder.ReturnBlock(), 160 }) 161 162 for c.loweringState.pc < len(c.wasmFunctionBody) { 163 blkBeforeLowering := c.ssaBuilder.CurrentBlock() 164 c.lowerCurrentOpcode() 165 blkAfterLowering := c.ssaBuilder.CurrentBlock() 166 if blkBeforeLowering != blkAfterLowering { 167 // In Wasm, once a block exits, that means we've done compiling the block. 168 // Therefore, we finalize the known bounds at the end of the block for the exiting block. 169 c.finalizeKnownSafeBoundsAtTheEndOfBlock(blkBeforeLowering.ID()) 170 // After that, we initialize the known bounds for the new compilation target block. 171 c.initializeCurrentBlockKnownBounds() 172 } 173 } 174 } 175 176 func (c *Compiler) state() *loweringState { 177 return &c.loweringState 178 } 179 180 func (c *Compiler) lowerCurrentOpcode() { 181 op := c.wasmFunctionBody[c.loweringState.pc] 182 183 if c.needSourceOffsetInfo { 184 c.ssaBuilder.SetCurrentSourceOffset( 185 ssa.SourceOffset(c.loweringState.pc) + ssa.SourceOffset(c.wasmFunctionBodyOffsetInCodeSection), 186 ) 187 } 188 189 builder := c.ssaBuilder 190 state := c.state() 191 switch op { 192 case wasm.OpcodeI32Const: 193 c := c.readI32s() 194 if state.unreachable { 195 break 196 } 197 198 iconst := builder.AllocateInstruction().AsIconst32(uint32(c)).Insert(builder) 199 value := iconst.Return() 200 state.push(value) 201 case wasm.OpcodeI64Const: 202 c := c.readI64s() 203 if state.unreachable { 204 break 205 } 206 iconst := builder.AllocateInstruction().AsIconst64(uint64(c)).Insert(builder) 207 value := iconst.Return() 208 state.push(value) 209 case wasm.OpcodeF32Const: 210 f32 := c.readF32() 211 if state.unreachable { 212 break 213 } 214 f32const := builder.AllocateInstruction(). 215 AsF32const(f32). 216 Insert(builder). 217 Return() 218 state.push(f32const) 219 case wasm.OpcodeF64Const: 220 f64 := c.readF64() 221 if state.unreachable { 222 break 223 } 224 f64const := builder.AllocateInstruction(). 225 AsF64const(f64). 226 Insert(builder). 227 Return() 228 state.push(f64const) 229 case wasm.OpcodeI32Add, wasm.OpcodeI64Add: 230 if state.unreachable { 231 break 232 } 233 y, x := state.pop(), state.pop() 234 iadd := builder.AllocateInstruction() 235 iadd.AsIadd(x, y) 236 builder.InsertInstruction(iadd) 237 value := iadd.Return() 238 state.push(value) 239 case wasm.OpcodeI32Sub, wasm.OpcodeI64Sub: 240 if state.unreachable { 241 break 242 } 243 y, x := state.pop(), state.pop() 244 isub := builder.AllocateInstruction() 245 isub.AsIsub(x, y) 246 builder.InsertInstruction(isub) 247 value := isub.Return() 248 state.push(value) 249 case wasm.OpcodeF32Add, wasm.OpcodeF64Add: 250 if state.unreachable { 251 break 252 } 253 y, x := state.pop(), state.pop() 254 iadd := builder.AllocateInstruction() 255 iadd.AsFadd(x, y) 256 builder.InsertInstruction(iadd) 257 value := iadd.Return() 258 state.push(value) 259 case wasm.OpcodeI32Mul, wasm.OpcodeI64Mul: 260 if state.unreachable { 261 break 262 } 263 y, x := state.pop(), state.pop() 264 imul := builder.AllocateInstruction() 265 imul.AsImul(x, y) 266 builder.InsertInstruction(imul) 267 value := imul.Return() 268 state.push(value) 269 case wasm.OpcodeF32Sub, wasm.OpcodeF64Sub: 270 if state.unreachable { 271 break 272 } 273 y, x := state.pop(), state.pop() 274 isub := builder.AllocateInstruction() 275 isub.AsFsub(x, y) 276 builder.InsertInstruction(isub) 277 value := isub.Return() 278 state.push(value) 279 case wasm.OpcodeF32Mul, wasm.OpcodeF64Mul: 280 if state.unreachable { 281 break 282 } 283 y, x := state.pop(), state.pop() 284 isub := builder.AllocateInstruction() 285 isub.AsFmul(x, y) 286 builder.InsertInstruction(isub) 287 value := isub.Return() 288 state.push(value) 289 case wasm.OpcodeF32Div, wasm.OpcodeF64Div: 290 if state.unreachable { 291 break 292 } 293 y, x := state.pop(), state.pop() 294 isub := builder.AllocateInstruction() 295 isub.AsFdiv(x, y) 296 builder.InsertInstruction(isub) 297 value := isub.Return() 298 state.push(value) 299 case wasm.OpcodeF32Max, wasm.OpcodeF64Max: 300 if state.unreachable { 301 break 302 } 303 y, x := state.pop(), state.pop() 304 isub := builder.AllocateInstruction() 305 isub.AsFmax(x, y) 306 builder.InsertInstruction(isub) 307 value := isub.Return() 308 state.push(value) 309 case wasm.OpcodeF32Min, wasm.OpcodeF64Min: 310 if state.unreachable { 311 break 312 } 313 y, x := state.pop(), state.pop() 314 isub := builder.AllocateInstruction() 315 isub.AsFmin(x, y) 316 builder.InsertInstruction(isub) 317 value := isub.Return() 318 state.push(value) 319 case wasm.OpcodeI64Extend8S: 320 if state.unreachable { 321 break 322 } 323 c.insertIntegerExtend(true, 8, 64) 324 case wasm.OpcodeI64Extend16S: 325 if state.unreachable { 326 break 327 } 328 c.insertIntegerExtend(true, 16, 64) 329 case wasm.OpcodeI64Extend32S, wasm.OpcodeI64ExtendI32S: 330 if state.unreachable { 331 break 332 } 333 c.insertIntegerExtend(true, 32, 64) 334 case wasm.OpcodeI64ExtendI32U: 335 if state.unreachable { 336 break 337 } 338 c.insertIntegerExtend(false, 32, 64) 339 case wasm.OpcodeI32Extend8S: 340 if state.unreachable { 341 break 342 } 343 c.insertIntegerExtend(true, 8, 32) 344 case wasm.OpcodeI32Extend16S: 345 if state.unreachable { 346 break 347 } 348 c.insertIntegerExtend(true, 16, 32) 349 case wasm.OpcodeI32Eqz, wasm.OpcodeI64Eqz: 350 if state.unreachable { 351 break 352 } 353 x := state.pop() 354 zero := builder.AllocateInstruction() 355 if op == wasm.OpcodeI32Eqz { 356 zero.AsIconst32(0) 357 } else { 358 zero.AsIconst64(0) 359 } 360 builder.InsertInstruction(zero) 361 icmp := builder.AllocateInstruction(). 362 AsIcmp(x, zero.Return(), ssa.IntegerCmpCondEqual). 363 Insert(builder). 364 Return() 365 state.push(icmp) 366 case wasm.OpcodeI32Eq, wasm.OpcodeI64Eq: 367 if state.unreachable { 368 break 369 } 370 c.insertIcmp(ssa.IntegerCmpCondEqual) 371 case wasm.OpcodeI32Ne, wasm.OpcodeI64Ne: 372 if state.unreachable { 373 break 374 } 375 c.insertIcmp(ssa.IntegerCmpCondNotEqual) 376 case wasm.OpcodeI32LtS, wasm.OpcodeI64LtS: 377 if state.unreachable { 378 break 379 } 380 c.insertIcmp(ssa.IntegerCmpCondSignedLessThan) 381 case wasm.OpcodeI32LtU, wasm.OpcodeI64LtU: 382 if state.unreachable { 383 break 384 } 385 c.insertIcmp(ssa.IntegerCmpCondUnsignedLessThan) 386 case wasm.OpcodeI32GtS, wasm.OpcodeI64GtS: 387 if state.unreachable { 388 break 389 } 390 c.insertIcmp(ssa.IntegerCmpCondSignedGreaterThan) 391 case wasm.OpcodeI32GtU, wasm.OpcodeI64GtU: 392 if state.unreachable { 393 break 394 } 395 c.insertIcmp(ssa.IntegerCmpCondUnsignedGreaterThan) 396 case wasm.OpcodeI32LeS, wasm.OpcodeI64LeS: 397 if state.unreachable { 398 break 399 } 400 c.insertIcmp(ssa.IntegerCmpCondSignedLessThanOrEqual) 401 case wasm.OpcodeI32LeU, wasm.OpcodeI64LeU: 402 if state.unreachable { 403 break 404 } 405 c.insertIcmp(ssa.IntegerCmpCondUnsignedLessThanOrEqual) 406 case wasm.OpcodeI32GeS, wasm.OpcodeI64GeS: 407 if state.unreachable { 408 break 409 } 410 c.insertIcmp(ssa.IntegerCmpCondSignedGreaterThanOrEqual) 411 case wasm.OpcodeI32GeU, wasm.OpcodeI64GeU: 412 if state.unreachable { 413 break 414 } 415 c.insertIcmp(ssa.IntegerCmpCondUnsignedGreaterThanOrEqual) 416 417 case wasm.OpcodeF32Eq, wasm.OpcodeF64Eq: 418 if state.unreachable { 419 break 420 } 421 c.insertFcmp(ssa.FloatCmpCondEqual) 422 case wasm.OpcodeF32Ne, wasm.OpcodeF64Ne: 423 if state.unreachable { 424 break 425 } 426 c.insertFcmp(ssa.FloatCmpCondNotEqual) 427 case wasm.OpcodeF32Lt, wasm.OpcodeF64Lt: 428 if state.unreachable { 429 break 430 } 431 c.insertFcmp(ssa.FloatCmpCondLessThan) 432 case wasm.OpcodeF32Gt, wasm.OpcodeF64Gt: 433 if state.unreachable { 434 break 435 } 436 c.insertFcmp(ssa.FloatCmpCondGreaterThan) 437 case wasm.OpcodeF32Le, wasm.OpcodeF64Le: 438 if state.unreachable { 439 break 440 } 441 c.insertFcmp(ssa.FloatCmpCondLessThanOrEqual) 442 case wasm.OpcodeF32Ge, wasm.OpcodeF64Ge: 443 if state.unreachable { 444 break 445 } 446 c.insertFcmp(ssa.FloatCmpCondGreaterThanOrEqual) 447 case wasm.OpcodeF32Neg, wasm.OpcodeF64Neg: 448 if state.unreachable { 449 break 450 } 451 x := state.pop() 452 v := builder.AllocateInstruction().AsFneg(x).Insert(builder).Return() 453 state.push(v) 454 case wasm.OpcodeF32Sqrt, wasm.OpcodeF64Sqrt: 455 if state.unreachable { 456 break 457 } 458 x := state.pop() 459 v := builder.AllocateInstruction().AsSqrt(x).Insert(builder).Return() 460 state.push(v) 461 case wasm.OpcodeF32Abs, wasm.OpcodeF64Abs: 462 if state.unreachable { 463 break 464 } 465 x := state.pop() 466 v := builder.AllocateInstruction().AsFabs(x).Insert(builder).Return() 467 state.push(v) 468 case wasm.OpcodeF32Copysign, wasm.OpcodeF64Copysign: 469 if state.unreachable { 470 break 471 } 472 y, x := state.pop(), state.pop() 473 v := builder.AllocateInstruction().AsFcopysign(x, y).Insert(builder).Return() 474 state.push(v) 475 476 case wasm.OpcodeF32Ceil, wasm.OpcodeF64Ceil: 477 if state.unreachable { 478 break 479 } 480 x := state.pop() 481 v := builder.AllocateInstruction().AsCeil(x).Insert(builder).Return() 482 state.push(v) 483 case wasm.OpcodeF32Floor, wasm.OpcodeF64Floor: 484 if state.unreachable { 485 break 486 } 487 x := state.pop() 488 v := builder.AllocateInstruction().AsFloor(x).Insert(builder).Return() 489 state.push(v) 490 case wasm.OpcodeF32Trunc, wasm.OpcodeF64Trunc: 491 if state.unreachable { 492 break 493 } 494 x := state.pop() 495 v := builder.AllocateInstruction().AsTrunc(x).Insert(builder).Return() 496 state.push(v) 497 case wasm.OpcodeF32Nearest, wasm.OpcodeF64Nearest: 498 if state.unreachable { 499 break 500 } 501 x := state.pop() 502 v := builder.AllocateInstruction().AsNearest(x).Insert(builder).Return() 503 state.push(v) 504 case wasm.OpcodeI64TruncF64S, wasm.OpcodeI64TruncF32S, 505 wasm.OpcodeI32TruncF64S, wasm.OpcodeI32TruncF32S, 506 wasm.OpcodeI64TruncF64U, wasm.OpcodeI64TruncF32U, 507 wasm.OpcodeI32TruncF64U, wasm.OpcodeI32TruncF32U: 508 if state.unreachable { 509 break 510 } 511 ret := builder.AllocateInstruction().AsFcvtToInt( 512 state.pop(), 513 c.execCtxPtrValue, 514 op == wasm.OpcodeI64TruncF64S || op == wasm.OpcodeI64TruncF32S || op == wasm.OpcodeI32TruncF32S || op == wasm.OpcodeI32TruncF64S, 515 op == wasm.OpcodeI64TruncF64S || op == wasm.OpcodeI64TruncF32S || op == wasm.OpcodeI64TruncF64U || op == wasm.OpcodeI64TruncF32U, 516 false, 517 ).Insert(builder).Return() 518 state.push(ret) 519 case wasm.OpcodeMiscPrefix: 520 state.pc++ 521 // A misc opcode is encoded as an unsigned variable 32-bit integer. 522 miscOpUint, num, err := leb128.LoadUint32(c.wasmFunctionBody[state.pc:]) 523 if err != nil { 524 // In normal conditions this should never happen because the function has passed validation. 525 panic(fmt.Sprintf("failed to read misc opcode: %v", err)) 526 } 527 state.pc += int(num - 1) 528 miscOp := wasm.OpcodeMisc(miscOpUint) 529 switch miscOp { 530 case wasm.OpcodeMiscI64TruncSatF64S, wasm.OpcodeMiscI64TruncSatF32S, 531 wasm.OpcodeMiscI32TruncSatF64S, wasm.OpcodeMiscI32TruncSatF32S, 532 wasm.OpcodeMiscI64TruncSatF64U, wasm.OpcodeMiscI64TruncSatF32U, 533 wasm.OpcodeMiscI32TruncSatF64U, wasm.OpcodeMiscI32TruncSatF32U: 534 if state.unreachable { 535 break 536 } 537 ret := builder.AllocateInstruction().AsFcvtToInt( 538 state.pop(), 539 c.execCtxPtrValue, 540 miscOp == wasm.OpcodeMiscI64TruncSatF64S || miscOp == wasm.OpcodeMiscI64TruncSatF32S || miscOp == wasm.OpcodeMiscI32TruncSatF32S || miscOp == wasm.OpcodeMiscI32TruncSatF64S, 541 miscOp == wasm.OpcodeMiscI64TruncSatF64S || miscOp == wasm.OpcodeMiscI64TruncSatF32S || miscOp == wasm.OpcodeMiscI64TruncSatF64U || miscOp == wasm.OpcodeMiscI64TruncSatF32U, 542 true, 543 ).Insert(builder).Return() 544 state.push(ret) 545 546 case wasm.OpcodeMiscTableSize: 547 tableIndex := c.readI32u() 548 if state.unreachable { 549 break 550 } 551 552 // Load the table. 553 loadTableInstancePtr := builder.AllocateInstruction() 554 loadTableInstancePtr.AsLoad(c.moduleCtxPtrValue, c.offset.TableOffset(int(tableIndex)).U32(), ssa.TypeI64) 555 builder.InsertInstruction(loadTableInstancePtr) 556 tableInstancePtr := loadTableInstancePtr.Return() 557 558 // Load the table's length. 559 loadTableLen := builder.AllocateInstruction(). 560 AsLoad(tableInstancePtr, tableInstanceLenOffset, ssa.TypeI32). 561 Insert(builder) 562 state.push(loadTableLen.Return()) 563 564 case wasm.OpcodeMiscTableGrow: 565 tableIndex := c.readI32u() 566 if state.unreachable { 567 break 568 } 569 570 c.storeCallerModuleContext() 571 572 tableIndexVal := builder.AllocateInstruction().AsIconst32(tableIndex).Insert(builder).Return() 573 574 num := state.pop() 575 r := state.pop() 576 577 tableGrowPtr := builder.AllocateInstruction(). 578 AsLoad(c.execCtxPtrValue, 579 wazevoapi.ExecutionContextOffsetTableGrowTrampolineAddress.U32(), 580 ssa.TypeI64, 581 ).Insert(builder).Return() 582 583 args := c.allocateVarLengthValues(4, c.execCtxPtrValue, tableIndexVal, num, r) 584 callGrowRet := builder. 585 AllocateInstruction(). 586 AsCallIndirect(tableGrowPtr, &c.tableGrowSig, args). 587 Insert(builder).Return() 588 state.push(callGrowRet) 589 590 case wasm.OpcodeMiscTableCopy: 591 dstTableIndex := c.readI32u() 592 srcTableIndex := c.readI32u() 593 if state.unreachable { 594 break 595 } 596 597 copySize := builder. 598 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 599 srcOffset := builder. 600 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 601 dstOffset := builder. 602 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 603 604 // Out of bounds check. 605 dstTableInstancePtr := c.boundsCheckInTable(dstTableIndex, dstOffset, copySize) 606 srcTableInstancePtr := c.boundsCheckInTable(srcTableIndex, srcOffset, copySize) 607 608 dstTableBaseAddr := c.loadTableBaseAddr(dstTableInstancePtr) 609 srcTableBaseAddr := c.loadTableBaseAddr(srcTableInstancePtr) 610 611 three := builder.AllocateInstruction().AsIconst64(3).Insert(builder).Return() 612 613 dstOffsetInBytes := builder.AllocateInstruction().AsIshl(dstOffset, three).Insert(builder).Return() 614 dstAddr := builder.AllocateInstruction().AsIadd(dstTableBaseAddr, dstOffsetInBytes).Insert(builder).Return() 615 srcOffsetInBytes := builder.AllocateInstruction().AsIshl(srcOffset, three).Insert(builder).Return() 616 srcAddr := builder.AllocateInstruction().AsIadd(srcTableBaseAddr, srcOffsetInBytes).Insert(builder).Return() 617 618 copySizeInBytes := builder.AllocateInstruction().AsIshl(copySize, three).Insert(builder).Return() 619 c.callMemmove(dstAddr, srcAddr, copySizeInBytes) 620 621 case wasm.OpcodeMiscMemoryCopy: 622 state.pc += 2 // +2 to skip two memory indexes which are fixed to zero. 623 if state.unreachable { 624 break 625 } 626 627 copySize := builder. 628 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 629 srcOffset := builder. 630 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 631 dstOffset := builder. 632 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 633 634 // Out of bounds check. 635 memLen := c.getMemoryLenValue(false) 636 c.boundsCheckInMemory(memLen, dstOffset, copySize) 637 c.boundsCheckInMemory(memLen, srcOffset, copySize) 638 639 memBase := c.getMemoryBaseValue(false) 640 dstAddr := builder.AllocateInstruction().AsIadd(memBase, dstOffset).Insert(builder).Return() 641 srcAddr := builder.AllocateInstruction().AsIadd(memBase, srcOffset).Insert(builder).Return() 642 643 c.callMemmove(dstAddr, srcAddr, copySize) 644 645 case wasm.OpcodeMiscTableFill: 646 tableIndex := c.readI32u() 647 if state.unreachable { 648 break 649 } 650 fillSize := state.pop() 651 value := state.pop() 652 offset := state.pop() 653 654 fillSizeExt := builder. 655 AllocateInstruction().AsUExtend(fillSize, 32, 64).Insert(builder).Return() 656 offsetExt := builder. 657 AllocateInstruction().AsUExtend(offset, 32, 64).Insert(builder).Return() 658 tableInstancePtr := c.boundsCheckInTable(tableIndex, offsetExt, fillSizeExt) 659 660 three := builder.AllocateInstruction().AsIconst64(3).Insert(builder).Return() 661 offsetInBytes := builder.AllocateInstruction().AsIshl(offsetExt, three).Insert(builder).Return() 662 fillSizeInBytes := builder.AllocateInstruction().AsIshl(fillSizeExt, three).Insert(builder).Return() 663 664 // Calculate the base address of the table. 665 tableBaseAddr := c.loadTableBaseAddr(tableInstancePtr) 666 addr := builder.AllocateInstruction().AsIadd(tableBaseAddr, offsetInBytes).Insert(builder).Return() 667 668 // Prepare the loop and following block. 669 beforeLoop := builder.AllocateBasicBlock() 670 loopBlk := builder.AllocateBasicBlock() 671 loopVar := loopBlk.AddParam(builder, ssa.TypeI64) 672 followingBlk := builder.AllocateBasicBlock() 673 674 // Uses the copy trick for faster filling buffer like memory.fill, but in this case we copy 8 bytes at a time. 675 // buf := memoryInst.Buffer[offset : offset+fillSize] 676 // buf[0:8] = value 677 // for i := 8; i < fillSize; i *= 2 { Begin with 8 bytes. 678 // copy(buf[i:], buf[:i]) 679 // } 680 681 // Insert the jump to the beforeLoop block; If the fillSize is zero, then jump to the following block to skip entire logics. 682 zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return() 683 ifFillSizeZero := builder.AllocateInstruction().AsIcmp(fillSizeExt, zero, ssa.IntegerCmpCondEqual). 684 Insert(builder).Return() 685 builder.AllocateInstruction().AsBrnz(ifFillSizeZero, ssa.ValuesNil, followingBlk).Insert(builder) 686 c.insertJumpToBlock(ssa.ValuesNil, beforeLoop) 687 688 // buf[0:8] = value 689 builder.SetCurrentBlock(beforeLoop) 690 builder.AllocateInstruction().AsStore(ssa.OpcodeStore, value, addr, 0).Insert(builder) 691 initValue := builder.AllocateInstruction().AsIconst64(8).Insert(builder).Return() 692 c.insertJumpToBlock(c.allocateVarLengthValues(1, initValue), loopBlk) 693 694 builder.SetCurrentBlock(loopBlk) 695 dstAddr := builder.AllocateInstruction().AsIadd(addr, loopVar).Insert(builder).Return() 696 697 // If loopVar*2 > fillSizeInBytes, then count must be fillSizeInBytes-loopVar. 698 var count ssa.Value 699 { 700 loopVarDoubled := builder.AllocateInstruction().AsIadd(loopVar, loopVar).Insert(builder).Return() 701 loopVarDoubledLargerThanFillSize := builder. 702 AllocateInstruction().AsIcmp(loopVarDoubled, fillSizeInBytes, ssa.IntegerCmpCondUnsignedGreaterThanOrEqual). 703 Insert(builder).Return() 704 diff := builder.AllocateInstruction().AsIsub(fillSizeInBytes, loopVar).Insert(builder).Return() 705 count = builder.AllocateInstruction().AsSelect(loopVarDoubledLargerThanFillSize, diff, loopVar).Insert(builder).Return() 706 } 707 708 c.callMemmove(dstAddr, addr, count) 709 710 shiftAmount := builder.AllocateInstruction().AsIconst64(1).Insert(builder).Return() 711 newLoopVar := builder.AllocateInstruction().AsIshl(loopVar, shiftAmount).Insert(builder).Return() 712 loopVarLessThanFillSize := builder.AllocateInstruction(). 713 AsIcmp(newLoopVar, fillSizeInBytes, ssa.IntegerCmpCondUnsignedLessThan).Insert(builder).Return() 714 715 builder.AllocateInstruction(). 716 AsBrnz(loopVarLessThanFillSize, c.allocateVarLengthValues(1, newLoopVar), loopBlk). 717 Insert(builder) 718 719 c.insertJumpToBlock(ssa.ValuesNil, followingBlk) 720 builder.SetCurrentBlock(followingBlk) 721 722 builder.Seal(beforeLoop) 723 builder.Seal(loopBlk) 724 builder.Seal(followingBlk) 725 726 case wasm.OpcodeMiscMemoryFill: 727 state.pc++ // Skip the memory index which is fixed to zero. 728 if state.unreachable { 729 break 730 } 731 732 fillSize := builder. 733 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 734 value := state.pop() 735 offset := builder. 736 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 737 738 // Out of bounds check. 739 c.boundsCheckInMemory(c.getMemoryLenValue(false), offset, fillSize) 740 741 // Calculate the base address: 742 addr := builder.AllocateInstruction().AsIadd(c.getMemoryBaseValue(false), offset).Insert(builder).Return() 743 744 // Uses the copy trick for faster filling buffer: https://gist.github.com/taylorza/df2f89d5f9ab3ffd06865062a4cf015d 745 // buf := memoryInst.Buffer[offset : offset+fillSize] 746 // buf[0] = value 747 // for i := 1; i < fillSize; i *= 2 { 748 // copy(buf[i:], buf[:i]) 749 // } 750 751 // Prepare the loop and following block. 752 beforeLoop := builder.AllocateBasicBlock() 753 loopBlk := builder.AllocateBasicBlock() 754 loopVar := loopBlk.AddParam(builder, ssa.TypeI64) 755 followingBlk := builder.AllocateBasicBlock() 756 757 // Insert the jump to the beforeLoop block; If the fillSize is zero, then jump to the following block to skip entire logics. 758 zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return() 759 ifFillSizeZero := builder.AllocateInstruction().AsIcmp(fillSize, zero, ssa.IntegerCmpCondEqual). 760 Insert(builder).Return() 761 builder.AllocateInstruction().AsBrnz(ifFillSizeZero, ssa.ValuesNil, followingBlk).Insert(builder) 762 c.insertJumpToBlock(ssa.ValuesNil, beforeLoop) 763 764 // buf[0] = value 765 builder.SetCurrentBlock(beforeLoop) 766 builder.AllocateInstruction().AsStore(ssa.OpcodeIstore8, value, addr, 0).Insert(builder) 767 initValue := builder.AllocateInstruction().AsIconst64(1).Insert(builder).Return() 768 c.insertJumpToBlock(c.allocateVarLengthValues(1, initValue), loopBlk) 769 770 builder.SetCurrentBlock(loopBlk) 771 dstAddr := builder.AllocateInstruction().AsIadd(addr, loopVar).Insert(builder).Return() 772 773 // If loopVar*2 > fillSizeExt, then count must be fillSizeExt-loopVar. 774 var count ssa.Value 775 { 776 loopVarDoubled := builder.AllocateInstruction().AsIadd(loopVar, loopVar).Insert(builder).Return() 777 loopVarDoubledLargerThanFillSize := builder. 778 AllocateInstruction().AsIcmp(loopVarDoubled, fillSize, ssa.IntegerCmpCondUnsignedGreaterThanOrEqual). 779 Insert(builder).Return() 780 diff := builder.AllocateInstruction().AsIsub(fillSize, loopVar).Insert(builder).Return() 781 count = builder.AllocateInstruction().AsSelect(loopVarDoubledLargerThanFillSize, diff, loopVar).Insert(builder).Return() 782 } 783 784 c.callMemmove(dstAddr, addr, count) 785 786 shiftAmount := builder.AllocateInstruction().AsIconst64(1).Insert(builder).Return() 787 newLoopVar := builder.AllocateInstruction().AsIshl(loopVar, shiftAmount).Insert(builder).Return() 788 loopVarLessThanFillSize := builder.AllocateInstruction(). 789 AsIcmp(newLoopVar, fillSize, ssa.IntegerCmpCondUnsignedLessThan).Insert(builder).Return() 790 791 builder.AllocateInstruction(). 792 AsBrnz(loopVarLessThanFillSize, c.allocateVarLengthValues(1, newLoopVar), loopBlk). 793 Insert(builder) 794 795 c.insertJumpToBlock(ssa.ValuesNil, followingBlk) 796 builder.SetCurrentBlock(followingBlk) 797 798 builder.Seal(beforeLoop) 799 builder.Seal(loopBlk) 800 builder.Seal(followingBlk) 801 802 case wasm.OpcodeMiscMemoryInit: 803 index := c.readI32u() 804 state.pc++ // Skip the memory index which is fixed to zero. 805 if state.unreachable { 806 break 807 } 808 809 copySize := builder. 810 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 811 offsetInDataInstance := builder. 812 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 813 offsetInMemory := builder. 814 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 815 816 dataInstPtr := c.dataOrElementInstanceAddr(index, c.offset.DataInstances1stElement) 817 818 // Bounds check. 819 c.boundsCheckInMemory(c.getMemoryLenValue(false), offsetInMemory, copySize) 820 c.boundsCheckInDataOrElementInstance(dataInstPtr, offsetInDataInstance, copySize, wazevoapi.ExitCodeMemoryOutOfBounds) 821 822 dataInstBaseAddr := builder.AllocateInstruction().AsLoad(dataInstPtr, 0, ssa.TypeI64).Insert(builder).Return() 823 srcAddr := builder.AllocateInstruction().AsIadd(dataInstBaseAddr, offsetInDataInstance).Insert(builder).Return() 824 825 memBase := c.getMemoryBaseValue(false) 826 dstAddr := builder.AllocateInstruction().AsIadd(memBase, offsetInMemory).Insert(builder).Return() 827 828 c.callMemmove(dstAddr, srcAddr, copySize) 829 830 case wasm.OpcodeMiscTableInit: 831 elemIndex := c.readI32u() 832 tableIndex := c.readI32u() 833 if state.unreachable { 834 break 835 } 836 837 copySize := builder. 838 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 839 offsetInElementInstance := builder. 840 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 841 offsetInTable := builder. 842 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 843 844 elemInstPtr := c.dataOrElementInstanceAddr(elemIndex, c.offset.ElementInstances1stElement) 845 846 // Bounds check. 847 tableInstancePtr := c.boundsCheckInTable(tableIndex, offsetInTable, copySize) 848 c.boundsCheckInDataOrElementInstance(elemInstPtr, offsetInElementInstance, copySize, wazevoapi.ExitCodeTableOutOfBounds) 849 850 three := builder.AllocateInstruction().AsIconst64(3).Insert(builder).Return() 851 // Calculates the destination address in the table. 852 tableOffsetInBytes := builder.AllocateInstruction().AsIshl(offsetInTable, three).Insert(builder).Return() 853 tableBaseAddr := c.loadTableBaseAddr(tableInstancePtr) 854 dstAddr := builder.AllocateInstruction().AsIadd(tableBaseAddr, tableOffsetInBytes).Insert(builder).Return() 855 856 // Calculates the source address in the element instance. 857 srcOffsetInBytes := builder.AllocateInstruction().AsIshl(offsetInElementInstance, three).Insert(builder).Return() 858 elemInstBaseAddr := builder.AllocateInstruction().AsLoad(elemInstPtr, 0, ssa.TypeI64).Insert(builder).Return() 859 srcAddr := builder.AllocateInstruction().AsIadd(elemInstBaseAddr, srcOffsetInBytes).Insert(builder).Return() 860 861 copySizeInBytes := builder.AllocateInstruction().AsIshl(copySize, three).Insert(builder).Return() 862 c.callMemmove(dstAddr, srcAddr, copySizeInBytes) 863 864 case wasm.OpcodeMiscElemDrop: 865 index := c.readI32u() 866 if state.unreachable { 867 break 868 } 869 870 c.dropDataOrElementInstance(index, c.offset.ElementInstances1stElement) 871 872 case wasm.OpcodeMiscDataDrop: 873 index := c.readI32u() 874 if state.unreachable { 875 break 876 } 877 c.dropDataOrElementInstance(index, c.offset.DataInstances1stElement) 878 879 default: 880 panic("Unknown MiscOp " + wasm.MiscInstructionName(miscOp)) 881 } 882 883 case wasm.OpcodeI32ReinterpretF32: 884 if state.unreachable { 885 break 886 } 887 reinterpret := builder.AllocateInstruction(). 888 AsBitcast(state.pop(), ssa.TypeI32). 889 Insert(builder).Return() 890 state.push(reinterpret) 891 892 case wasm.OpcodeI64ReinterpretF64: 893 if state.unreachable { 894 break 895 } 896 reinterpret := builder.AllocateInstruction(). 897 AsBitcast(state.pop(), ssa.TypeI64). 898 Insert(builder).Return() 899 state.push(reinterpret) 900 901 case wasm.OpcodeF32ReinterpretI32: 902 if state.unreachable { 903 break 904 } 905 reinterpret := builder.AllocateInstruction(). 906 AsBitcast(state.pop(), ssa.TypeF32). 907 Insert(builder).Return() 908 state.push(reinterpret) 909 910 case wasm.OpcodeF64ReinterpretI64: 911 if state.unreachable { 912 break 913 } 914 reinterpret := builder.AllocateInstruction(). 915 AsBitcast(state.pop(), ssa.TypeF64). 916 Insert(builder).Return() 917 state.push(reinterpret) 918 919 case wasm.OpcodeI32DivS, wasm.OpcodeI64DivS: 920 if state.unreachable { 921 break 922 } 923 y, x := state.pop(), state.pop() 924 result := builder.AllocateInstruction().AsSDiv(x, y, c.execCtxPtrValue).Insert(builder).Return() 925 state.push(result) 926 927 case wasm.OpcodeI32DivU, wasm.OpcodeI64DivU: 928 if state.unreachable { 929 break 930 } 931 y, x := state.pop(), state.pop() 932 result := builder.AllocateInstruction().AsUDiv(x, y, c.execCtxPtrValue).Insert(builder).Return() 933 state.push(result) 934 935 case wasm.OpcodeI32RemS, wasm.OpcodeI64RemS: 936 if state.unreachable { 937 break 938 } 939 y, x := state.pop(), state.pop() 940 result := builder.AllocateInstruction().AsSRem(x, y, c.execCtxPtrValue).Insert(builder).Return() 941 state.push(result) 942 943 case wasm.OpcodeI32RemU, wasm.OpcodeI64RemU: 944 if state.unreachable { 945 break 946 } 947 y, x := state.pop(), state.pop() 948 result := builder.AllocateInstruction().AsURem(x, y, c.execCtxPtrValue).Insert(builder).Return() 949 state.push(result) 950 951 case wasm.OpcodeI32And, wasm.OpcodeI64And: 952 if state.unreachable { 953 break 954 } 955 y, x := state.pop(), state.pop() 956 and := builder.AllocateInstruction() 957 and.AsBand(x, y) 958 builder.InsertInstruction(and) 959 value := and.Return() 960 state.push(value) 961 case wasm.OpcodeI32Or, wasm.OpcodeI64Or: 962 if state.unreachable { 963 break 964 } 965 y, x := state.pop(), state.pop() 966 or := builder.AllocateInstruction() 967 or.AsBor(x, y) 968 builder.InsertInstruction(or) 969 value := or.Return() 970 state.push(value) 971 case wasm.OpcodeI32Xor, wasm.OpcodeI64Xor: 972 if state.unreachable { 973 break 974 } 975 y, x := state.pop(), state.pop() 976 xor := builder.AllocateInstruction() 977 xor.AsBxor(x, y) 978 builder.InsertInstruction(xor) 979 value := xor.Return() 980 state.push(value) 981 case wasm.OpcodeI32Shl, wasm.OpcodeI64Shl: 982 if state.unreachable { 983 break 984 } 985 y, x := state.pop(), state.pop() 986 ishl := builder.AllocateInstruction() 987 ishl.AsIshl(x, y) 988 builder.InsertInstruction(ishl) 989 value := ishl.Return() 990 state.push(value) 991 case wasm.OpcodeI32ShrU, wasm.OpcodeI64ShrU: 992 if state.unreachable { 993 break 994 } 995 y, x := state.pop(), state.pop() 996 ishl := builder.AllocateInstruction() 997 ishl.AsUshr(x, y) 998 builder.InsertInstruction(ishl) 999 value := ishl.Return() 1000 state.push(value) 1001 case wasm.OpcodeI32ShrS, wasm.OpcodeI64ShrS: 1002 if state.unreachable { 1003 break 1004 } 1005 y, x := state.pop(), state.pop() 1006 ishl := builder.AllocateInstruction() 1007 ishl.AsSshr(x, y) 1008 builder.InsertInstruction(ishl) 1009 value := ishl.Return() 1010 state.push(value) 1011 case wasm.OpcodeI32Rotl, wasm.OpcodeI64Rotl: 1012 if state.unreachable { 1013 break 1014 } 1015 y, x := state.pop(), state.pop() 1016 rotl := builder.AllocateInstruction() 1017 rotl.AsRotl(x, y) 1018 builder.InsertInstruction(rotl) 1019 value := rotl.Return() 1020 state.push(value) 1021 case wasm.OpcodeI32Rotr, wasm.OpcodeI64Rotr: 1022 if state.unreachable { 1023 break 1024 } 1025 y, x := state.pop(), state.pop() 1026 rotr := builder.AllocateInstruction() 1027 rotr.AsRotr(x, y) 1028 builder.InsertInstruction(rotr) 1029 value := rotr.Return() 1030 state.push(value) 1031 case wasm.OpcodeI32Clz, wasm.OpcodeI64Clz: 1032 if state.unreachable { 1033 break 1034 } 1035 x := state.pop() 1036 clz := builder.AllocateInstruction() 1037 clz.AsClz(x) 1038 builder.InsertInstruction(clz) 1039 value := clz.Return() 1040 state.push(value) 1041 case wasm.OpcodeI32Ctz, wasm.OpcodeI64Ctz: 1042 if state.unreachable { 1043 break 1044 } 1045 x := state.pop() 1046 ctz := builder.AllocateInstruction() 1047 ctz.AsCtz(x) 1048 builder.InsertInstruction(ctz) 1049 value := ctz.Return() 1050 state.push(value) 1051 case wasm.OpcodeI32Popcnt, wasm.OpcodeI64Popcnt: 1052 if state.unreachable { 1053 break 1054 } 1055 x := state.pop() 1056 popcnt := builder.AllocateInstruction() 1057 popcnt.AsPopcnt(x) 1058 builder.InsertInstruction(popcnt) 1059 value := popcnt.Return() 1060 state.push(value) 1061 1062 case wasm.OpcodeI32WrapI64: 1063 if state.unreachable { 1064 break 1065 } 1066 x := state.pop() 1067 wrap := builder.AllocateInstruction().AsIreduce(x, ssa.TypeI32).Insert(builder).Return() 1068 state.push(wrap) 1069 case wasm.OpcodeGlobalGet: 1070 index := c.readI32u() 1071 if state.unreachable { 1072 break 1073 } 1074 v := c.getWasmGlobalValue(index, false) 1075 state.push(v) 1076 case wasm.OpcodeGlobalSet: 1077 index := c.readI32u() 1078 if state.unreachable { 1079 break 1080 } 1081 v := state.pop() 1082 c.setWasmGlobalValue(index, v) 1083 case wasm.OpcodeLocalGet: 1084 index := c.readI32u() 1085 if state.unreachable { 1086 break 1087 } 1088 variable := c.localVariable(index) 1089 if _, ok := c.m.NonStaticLocals[c.wasmLocalFunctionIndex][index]; ok { 1090 state.push(builder.MustFindValue(variable)) 1091 } else { 1092 // If a local is static, we can simply find it in the entry block which is either a function param 1093 // or a zero value. This fast pass helps to avoid the overhead of searching the entire function plus 1094 // avoid adding unnecessary block arguments. 1095 // TODO: I think this optimization should be done in a SSA pass like passRedundantPhiEliminationOpt, 1096 // but somehow there's some corner cases that it fails to optimize. 1097 state.push(builder.MustFindValueInBlk(variable, c.ssaBuilder.EntryBlock())) 1098 } 1099 case wasm.OpcodeLocalSet: 1100 index := c.readI32u() 1101 if state.unreachable { 1102 break 1103 } 1104 variable := c.localVariable(index) 1105 newValue := state.pop() 1106 builder.DefineVariableInCurrentBB(variable, newValue) 1107 1108 case wasm.OpcodeLocalTee: 1109 index := c.readI32u() 1110 if state.unreachable { 1111 break 1112 } 1113 variable := c.localVariable(index) 1114 newValue := state.peek() 1115 builder.DefineVariableInCurrentBB(variable, newValue) 1116 1117 case wasm.OpcodeSelect, wasm.OpcodeTypedSelect: 1118 if op == wasm.OpcodeTypedSelect { 1119 state.pc += 2 // ignores the type which is only needed during validation. 1120 } 1121 1122 if state.unreachable { 1123 break 1124 } 1125 1126 cond := state.pop() 1127 v2 := state.pop() 1128 v1 := state.pop() 1129 1130 sl := builder.AllocateInstruction(). 1131 AsSelect(cond, v1, v2). 1132 Insert(builder). 1133 Return() 1134 state.push(sl) 1135 1136 case wasm.OpcodeMemorySize: 1137 state.pc++ // skips the memory index. 1138 if state.unreachable { 1139 break 1140 } 1141 1142 var memSizeInBytes ssa.Value 1143 if c.offset.LocalMemoryBegin < 0 { 1144 memInstPtr := builder.AllocateInstruction(). 1145 AsLoad(c.moduleCtxPtrValue, c.offset.ImportedMemoryBegin.U32(), ssa.TypeI64). 1146 Insert(builder). 1147 Return() 1148 1149 memSizeInBytes = builder.AllocateInstruction(). 1150 AsLoad(memInstPtr, memoryInstanceBufSizeOffset, ssa.TypeI32). 1151 Insert(builder). 1152 Return() 1153 } else { 1154 memSizeInBytes = builder.AllocateInstruction(). 1155 AsLoad(c.moduleCtxPtrValue, c.offset.LocalMemoryLen().U32(), ssa.TypeI32). 1156 Insert(builder). 1157 Return() 1158 } 1159 1160 amount := builder.AllocateInstruction() 1161 amount.AsIconst32(uint32(wasm.MemoryPageSizeInBits)) 1162 builder.InsertInstruction(amount) 1163 memSize := builder.AllocateInstruction(). 1164 AsUshr(memSizeInBytes, amount.Return()). 1165 Insert(builder). 1166 Return() 1167 state.push(memSize) 1168 1169 case wasm.OpcodeMemoryGrow: 1170 state.pc++ // skips the memory index. 1171 if state.unreachable { 1172 break 1173 } 1174 1175 c.storeCallerModuleContext() 1176 1177 pages := state.pop() 1178 memoryGrowPtr := builder.AllocateInstruction(). 1179 AsLoad(c.execCtxPtrValue, 1180 wazevoapi.ExecutionContextOffsetMemoryGrowTrampolineAddress.U32(), 1181 ssa.TypeI64, 1182 ).Insert(builder).Return() 1183 1184 args := c.allocateVarLengthValues(1, c.execCtxPtrValue, pages) 1185 callGrowRet := builder. 1186 AllocateInstruction(). 1187 AsCallIndirect(memoryGrowPtr, &c.memoryGrowSig, args). 1188 Insert(builder).Return() 1189 state.push(callGrowRet) 1190 1191 // After the memory grow, reload the cached memory base and len. 1192 c.reloadMemoryBaseLen() 1193 1194 case wasm.OpcodeI32Store, 1195 wasm.OpcodeI64Store, 1196 wasm.OpcodeF32Store, 1197 wasm.OpcodeF64Store, 1198 wasm.OpcodeI32Store8, 1199 wasm.OpcodeI32Store16, 1200 wasm.OpcodeI64Store8, 1201 wasm.OpcodeI64Store16, 1202 wasm.OpcodeI64Store32: 1203 1204 _, offset := c.readMemArg() 1205 if state.unreachable { 1206 break 1207 } 1208 var opSize uint64 1209 var opcode ssa.Opcode 1210 switch op { 1211 case wasm.OpcodeI32Store, wasm.OpcodeF32Store: 1212 opcode = ssa.OpcodeStore 1213 opSize = 4 1214 case wasm.OpcodeI64Store, wasm.OpcodeF64Store: 1215 opcode = ssa.OpcodeStore 1216 opSize = 8 1217 case wasm.OpcodeI32Store8, wasm.OpcodeI64Store8: 1218 opcode = ssa.OpcodeIstore8 1219 opSize = 1 1220 case wasm.OpcodeI32Store16, wasm.OpcodeI64Store16: 1221 opcode = ssa.OpcodeIstore16 1222 opSize = 2 1223 case wasm.OpcodeI64Store32: 1224 opcode = ssa.OpcodeIstore32 1225 opSize = 4 1226 default: 1227 panic("BUG") 1228 } 1229 1230 value := state.pop() 1231 baseAddr := state.pop() 1232 addr := c.memOpSetup(baseAddr, uint64(offset), opSize) 1233 builder.AllocateInstruction(). 1234 AsStore(opcode, value, addr, offset). 1235 Insert(builder) 1236 1237 case wasm.OpcodeI32Load, 1238 wasm.OpcodeI64Load, 1239 wasm.OpcodeF32Load, 1240 wasm.OpcodeF64Load, 1241 wasm.OpcodeI32Load8S, 1242 wasm.OpcodeI32Load8U, 1243 wasm.OpcodeI32Load16S, 1244 wasm.OpcodeI32Load16U, 1245 wasm.OpcodeI64Load8S, 1246 wasm.OpcodeI64Load8U, 1247 wasm.OpcodeI64Load16S, 1248 wasm.OpcodeI64Load16U, 1249 wasm.OpcodeI64Load32S, 1250 wasm.OpcodeI64Load32U: 1251 _, offset := c.readMemArg() 1252 if state.unreachable { 1253 break 1254 } 1255 1256 var opSize uint64 1257 switch op { 1258 case wasm.OpcodeI32Load, wasm.OpcodeF32Load: 1259 opSize = 4 1260 case wasm.OpcodeI64Load, wasm.OpcodeF64Load: 1261 opSize = 8 1262 case wasm.OpcodeI32Load8S, wasm.OpcodeI32Load8U: 1263 opSize = 1 1264 case wasm.OpcodeI32Load16S, wasm.OpcodeI32Load16U: 1265 opSize = 2 1266 case wasm.OpcodeI64Load8S, wasm.OpcodeI64Load8U: 1267 opSize = 1 1268 case wasm.OpcodeI64Load16S, wasm.OpcodeI64Load16U: 1269 opSize = 2 1270 case wasm.OpcodeI64Load32S, wasm.OpcodeI64Load32U: 1271 opSize = 4 1272 default: 1273 panic("BUG") 1274 } 1275 1276 baseAddr := state.pop() 1277 addr := c.memOpSetup(baseAddr, uint64(offset), opSize) 1278 load := builder.AllocateInstruction() 1279 switch op { 1280 case wasm.OpcodeI32Load: 1281 load.AsLoad(addr, offset, ssa.TypeI32) 1282 case wasm.OpcodeI64Load: 1283 load.AsLoad(addr, offset, ssa.TypeI64) 1284 case wasm.OpcodeF32Load: 1285 load.AsLoad(addr, offset, ssa.TypeF32) 1286 case wasm.OpcodeF64Load: 1287 load.AsLoad(addr, offset, ssa.TypeF64) 1288 case wasm.OpcodeI32Load8S: 1289 load.AsExtLoad(ssa.OpcodeSload8, addr, offset, false) 1290 case wasm.OpcodeI32Load8U: 1291 load.AsExtLoad(ssa.OpcodeUload8, addr, offset, false) 1292 case wasm.OpcodeI32Load16S: 1293 load.AsExtLoad(ssa.OpcodeSload16, addr, offset, false) 1294 case wasm.OpcodeI32Load16U: 1295 load.AsExtLoad(ssa.OpcodeUload16, addr, offset, false) 1296 case wasm.OpcodeI64Load8S: 1297 load.AsExtLoad(ssa.OpcodeSload8, addr, offset, true) 1298 case wasm.OpcodeI64Load8U: 1299 load.AsExtLoad(ssa.OpcodeUload8, addr, offset, true) 1300 case wasm.OpcodeI64Load16S: 1301 load.AsExtLoad(ssa.OpcodeSload16, addr, offset, true) 1302 case wasm.OpcodeI64Load16U: 1303 load.AsExtLoad(ssa.OpcodeUload16, addr, offset, true) 1304 case wasm.OpcodeI64Load32S: 1305 load.AsExtLoad(ssa.OpcodeSload32, addr, offset, true) 1306 case wasm.OpcodeI64Load32U: 1307 load.AsExtLoad(ssa.OpcodeUload32, addr, offset, true) 1308 default: 1309 panic("BUG") 1310 } 1311 builder.InsertInstruction(load) 1312 state.push(load.Return()) 1313 case wasm.OpcodeBlock: 1314 // Note: we do not need to create a BB for this as that would always have only one predecessor 1315 // which is the current BB, and therefore it's always ok to merge them in any way. 1316 1317 bt := c.readBlockType() 1318 1319 if state.unreachable { 1320 state.unreachableDepth++ 1321 break 1322 } 1323 1324 followingBlk := builder.AllocateBasicBlock() 1325 c.addBlockParamsFromWasmTypes(bt.Results, followingBlk) 1326 1327 state.ctrlPush(controlFrame{ 1328 kind: controlFrameKindBlock, 1329 originalStackLenWithoutParam: len(state.values) - len(bt.Params), 1330 followingBlock: followingBlk, 1331 blockType: bt, 1332 }) 1333 case wasm.OpcodeLoop: 1334 bt := c.readBlockType() 1335 1336 if state.unreachable { 1337 state.unreachableDepth++ 1338 break 1339 } 1340 1341 loopHeader, afterLoopBlock := builder.AllocateBasicBlock(), builder.AllocateBasicBlock() 1342 c.addBlockParamsFromWasmTypes(bt.Params, loopHeader) 1343 c.addBlockParamsFromWasmTypes(bt.Results, afterLoopBlock) 1344 1345 originalLen := len(state.values) - len(bt.Params) 1346 state.ctrlPush(controlFrame{ 1347 originalStackLenWithoutParam: originalLen, 1348 kind: controlFrameKindLoop, 1349 blk: loopHeader, 1350 followingBlock: afterLoopBlock, 1351 blockType: bt, 1352 }) 1353 1354 args := c.allocateVarLengthValues(originalLen) 1355 args = args.Append(builder.VarLengthPool(), state.values[originalLen:]...) 1356 1357 // Insert the jump to the header of loop. 1358 br := builder.AllocateInstruction() 1359 br.AsJump(args, loopHeader) 1360 builder.InsertInstruction(br) 1361 1362 c.switchTo(originalLen, loopHeader) 1363 1364 if c.ensureTermination { 1365 checkModuleExitCodePtr := builder.AllocateInstruction(). 1366 AsLoad(c.execCtxPtrValue, 1367 wazevoapi.ExecutionContextOffsetCheckModuleExitCodeTrampolineAddress.U32(), 1368 ssa.TypeI64, 1369 ).Insert(builder).Return() 1370 1371 args := c.allocateVarLengthValues(1, c.execCtxPtrValue) 1372 builder.AllocateInstruction(). 1373 AsCallIndirect(checkModuleExitCodePtr, &c.checkModuleExitCodeSig, args). 1374 Insert(builder) 1375 } 1376 case wasm.OpcodeIf: 1377 bt := c.readBlockType() 1378 1379 if state.unreachable { 1380 state.unreachableDepth++ 1381 break 1382 } 1383 1384 v := state.pop() 1385 thenBlk, elseBlk, followingBlk := builder.AllocateBasicBlock(), builder.AllocateBasicBlock(), builder.AllocateBasicBlock() 1386 1387 // We do not make the Wasm-level block parameters as SSA-level block params for if-else blocks 1388 // since they won't be PHI and the definition is unique. 1389 1390 // On the other hand, the following block after if-else-end will likely have 1391 // multiple definitions (one in Then and another in Else blocks). 1392 c.addBlockParamsFromWasmTypes(bt.Results, followingBlk) 1393 1394 args := c.allocateVarLengthValues(len(bt.Params)) 1395 args = args.Append(builder.VarLengthPool(), state.values[len(state.values)-len(bt.Params):]...) 1396 1397 // Insert the conditional jump to the Else block. 1398 brz := builder.AllocateInstruction() 1399 brz.AsBrz(v, ssa.ValuesNil, elseBlk) 1400 builder.InsertInstruction(brz) 1401 1402 // Then, insert the jump to the Then block. 1403 br := builder.AllocateInstruction() 1404 br.AsJump(ssa.ValuesNil, thenBlk) 1405 builder.InsertInstruction(br) 1406 1407 state.ctrlPush(controlFrame{ 1408 kind: controlFrameKindIfWithoutElse, 1409 originalStackLenWithoutParam: len(state.values) - len(bt.Params), 1410 blk: elseBlk, 1411 followingBlock: followingBlk, 1412 blockType: bt, 1413 clonedArgs: args, 1414 }) 1415 1416 builder.SetCurrentBlock(thenBlk) 1417 1418 // Then and Else (if exists) have only one predecessor. 1419 builder.Seal(thenBlk) 1420 builder.Seal(elseBlk) 1421 case wasm.OpcodeElse: 1422 ifctrl := state.ctrlPeekAt(0) 1423 if unreachable := state.unreachable; unreachable && state.unreachableDepth > 0 { 1424 // If it is currently in unreachable and is a nested if, 1425 // we just remove the entire else block. 1426 break 1427 } 1428 1429 ifctrl.kind = controlFrameKindIfWithElse 1430 if !state.unreachable { 1431 // If this Then block is currently reachable, we have to insert the branching to the following BB. 1432 followingBlk := ifctrl.followingBlock // == the BB after if-then-else. 1433 args := c.nPeekDup(len(ifctrl.blockType.Results)) 1434 c.insertJumpToBlock(args, followingBlk) 1435 } else { 1436 state.unreachable = false 1437 } 1438 1439 // Reset the stack so that we can correctly handle the else block. 1440 state.values = state.values[:ifctrl.originalStackLenWithoutParam] 1441 elseBlk := ifctrl.blk 1442 for _, arg := range ifctrl.clonedArgs.View() { 1443 state.push(arg) 1444 } 1445 1446 builder.SetCurrentBlock(elseBlk) 1447 1448 case wasm.OpcodeEnd: 1449 if state.unreachableDepth > 0 { 1450 state.unreachableDepth-- 1451 break 1452 } 1453 1454 ctrl := state.ctrlPop() 1455 followingBlk := ctrl.followingBlock 1456 1457 unreachable := state.unreachable 1458 if !unreachable { 1459 // Top n-th args will be used as a result of the current control frame. 1460 args := c.nPeekDup(len(ctrl.blockType.Results)) 1461 1462 // Insert the unconditional branch to the target. 1463 c.insertJumpToBlock(args, followingBlk) 1464 } else { // recover from the unreachable state. 1465 state.unreachable = false 1466 } 1467 1468 switch ctrl.kind { 1469 case controlFrameKindFunction: 1470 break // This is the very end of function. 1471 case controlFrameKindLoop: 1472 // Loop header block can be reached from any br/br_table contained in the loop, 1473 // so now that we've reached End of it, we can seal it. 1474 builder.Seal(ctrl.blk) 1475 case controlFrameKindIfWithoutElse: 1476 // If this is the end of Then block, we have to emit the empty Else block. 1477 elseBlk := ctrl.blk 1478 builder.SetCurrentBlock(elseBlk) 1479 c.insertJumpToBlock(ctrl.clonedArgs, followingBlk) 1480 } 1481 1482 builder.Seal(followingBlk) 1483 1484 // Ready to start translating the following block. 1485 c.switchTo(ctrl.originalStackLenWithoutParam, followingBlk) 1486 1487 case wasm.OpcodeBr: 1488 labelIndex := c.readI32u() 1489 if state.unreachable { 1490 break 1491 } 1492 1493 targetBlk, argNum := state.brTargetArgNumFor(labelIndex) 1494 args := c.nPeekDup(argNum) 1495 c.insertJumpToBlock(args, targetBlk) 1496 1497 state.unreachable = true 1498 1499 case wasm.OpcodeBrIf: 1500 labelIndex := c.readI32u() 1501 if state.unreachable { 1502 break 1503 } 1504 1505 v := state.pop() 1506 1507 targetBlk, argNum := state.brTargetArgNumFor(labelIndex) 1508 args := c.nPeekDup(argNum) 1509 var sealTargetBlk bool 1510 if c.needListener && targetBlk.ReturnBlock() { // In this case, we have to call the listener before returning. 1511 // Save the currently active block. 1512 current := builder.CurrentBlock() 1513 1514 // Allocate the trampoline block to the return where we call the listener. 1515 targetBlk = builder.AllocateBasicBlock() 1516 builder.SetCurrentBlock(targetBlk) 1517 sealTargetBlk = true 1518 1519 c.callListenerAfter() 1520 1521 instr := builder.AllocateInstruction() 1522 instr.AsReturn(args) 1523 builder.InsertInstruction(instr) 1524 1525 args = ssa.ValuesNil 1526 1527 // Revert the current block. 1528 builder.SetCurrentBlock(current) 1529 } 1530 1531 // Insert the conditional jump to the target block. 1532 brnz := builder.AllocateInstruction() 1533 brnz.AsBrnz(v, args, targetBlk) 1534 builder.InsertInstruction(brnz) 1535 1536 if sealTargetBlk { 1537 builder.Seal(targetBlk) 1538 } 1539 1540 // Insert the unconditional jump to the Else block which corresponds to after br_if. 1541 elseBlk := builder.AllocateBasicBlock() 1542 c.insertJumpToBlock(ssa.ValuesNil, elseBlk) 1543 1544 // Now start translating the instructions after br_if. 1545 builder.Seal(elseBlk) // Else of br_if has the current block as the only one successor. 1546 builder.SetCurrentBlock(elseBlk) 1547 1548 case wasm.OpcodeBrTable: 1549 labels := state.tmpForBrTable 1550 labels = labels[:0] 1551 labelCount := c.readI32u() 1552 for i := 0; i < int(labelCount); i++ { 1553 labels = append(labels, c.readI32u()) 1554 } 1555 labels = append(labels, c.readI32u()) // default label. 1556 if state.unreachable { 1557 break 1558 } 1559 1560 index := state.pop() 1561 if labelCount == 0 { // If this br_table is empty, we can just emit the unconditional jump. 1562 targetBlk, argNum := state.brTargetArgNumFor(labels[0]) 1563 args := c.nPeekDup(argNum) 1564 c.insertJumpToBlock(args, targetBlk) 1565 } else { 1566 c.lowerBrTable(labels, index) 1567 } 1568 state.unreachable = true 1569 1570 case wasm.OpcodeNop: 1571 case wasm.OpcodeReturn: 1572 if state.unreachable { 1573 break 1574 } 1575 if c.needListener { 1576 c.callListenerAfter() 1577 } 1578 1579 results := c.nPeekDup(c.results()) 1580 instr := builder.AllocateInstruction() 1581 1582 instr.AsReturn(results) 1583 builder.InsertInstruction(instr) 1584 state.unreachable = true 1585 1586 case wasm.OpcodeUnreachable: 1587 if state.unreachable { 1588 break 1589 } 1590 exit := builder.AllocateInstruction() 1591 exit.AsExitWithCode(c.execCtxPtrValue, wazevoapi.ExitCodeUnreachable) 1592 builder.InsertInstruction(exit) 1593 state.unreachable = true 1594 1595 case wasm.OpcodeCallIndirect: 1596 typeIndex := c.readI32u() 1597 tableIndex := c.readI32u() 1598 if state.unreachable { 1599 break 1600 } 1601 c.lowerCallIndirect(typeIndex, tableIndex) 1602 1603 case wasm.OpcodeCall: 1604 fnIndex := c.readI32u() 1605 if state.unreachable { 1606 break 1607 } 1608 1609 var typIndex wasm.Index 1610 if fnIndex < c.m.ImportFunctionCount { 1611 // Before transfer the control to the callee, we have to store the current module's moduleContextPtr 1612 // into execContext.callerModuleContextPtr in case when the callee is a Go function. 1613 c.storeCallerModuleContext() 1614 var fi int 1615 for i := range c.m.ImportSection { 1616 imp := &c.m.ImportSection[i] 1617 if imp.Type == wasm.ExternTypeFunc { 1618 if fi == int(fnIndex) { 1619 typIndex = imp.DescFunc 1620 break 1621 } 1622 fi++ 1623 } 1624 } 1625 } else { 1626 typIndex = c.m.FunctionSection[fnIndex-c.m.ImportFunctionCount] 1627 } 1628 typ := &c.m.TypeSection[typIndex] 1629 1630 argN := len(typ.Params) 1631 tail := len(state.values) - argN 1632 vs := state.values[tail:] 1633 state.values = state.values[:tail] 1634 args := c.allocateVarLengthValues(2+len(vs), c.execCtxPtrValue) 1635 1636 sig := c.signatures[typ] 1637 call := builder.AllocateInstruction() 1638 if fnIndex >= c.m.ImportFunctionCount { 1639 args = args.Append(builder.VarLengthPool(), c.moduleCtxPtrValue) // This case the callee module is itself. 1640 args = args.Append(builder.VarLengthPool(), vs...) 1641 call.AsCall(FunctionIndexToFuncRef(fnIndex), sig, args) 1642 builder.InsertInstruction(call) 1643 } else { 1644 // This case we have to read the address of the imported function from the module context. 1645 moduleCtx := c.moduleCtxPtrValue 1646 loadFuncPtr, loadModuleCtxPtr := builder.AllocateInstruction(), builder.AllocateInstruction() 1647 funcPtrOffset, moduleCtxPtrOffset, _ := c.offset.ImportedFunctionOffset(fnIndex) 1648 loadFuncPtr.AsLoad(moduleCtx, funcPtrOffset.U32(), ssa.TypeI64) 1649 loadModuleCtxPtr.AsLoad(moduleCtx, moduleCtxPtrOffset.U32(), ssa.TypeI64) 1650 builder.InsertInstruction(loadFuncPtr) 1651 builder.InsertInstruction(loadModuleCtxPtr) 1652 1653 args = args.Append(builder.VarLengthPool(), loadModuleCtxPtr.Return()) 1654 args = args.Append(builder.VarLengthPool(), vs...) 1655 call.AsCallIndirect(loadFuncPtr.Return(), sig, args) 1656 builder.InsertInstruction(call) 1657 } 1658 1659 first, rest := call.Returns() 1660 if first.Valid() { 1661 state.push(first) 1662 } 1663 for _, v := range rest { 1664 state.push(v) 1665 } 1666 1667 c.reloadAfterCall() 1668 1669 case wasm.OpcodeDrop: 1670 if state.unreachable { 1671 break 1672 } 1673 _ = state.pop() 1674 case wasm.OpcodeF64ConvertI32S, wasm.OpcodeF64ConvertI64S, wasm.OpcodeF64ConvertI32U, wasm.OpcodeF64ConvertI64U: 1675 if state.unreachable { 1676 break 1677 } 1678 result := builder.AllocateInstruction().AsFcvtFromInt( 1679 state.pop(), 1680 op == wasm.OpcodeF64ConvertI32S || op == wasm.OpcodeF64ConvertI64S, 1681 true, 1682 ).Insert(builder).Return() 1683 state.push(result) 1684 case wasm.OpcodeF32ConvertI32S, wasm.OpcodeF32ConvertI64S, wasm.OpcodeF32ConvertI32U, wasm.OpcodeF32ConvertI64U: 1685 if state.unreachable { 1686 break 1687 } 1688 result := builder.AllocateInstruction().AsFcvtFromInt( 1689 state.pop(), 1690 op == wasm.OpcodeF32ConvertI32S || op == wasm.OpcodeF32ConvertI64S, 1691 false, 1692 ).Insert(builder).Return() 1693 state.push(result) 1694 case wasm.OpcodeF32DemoteF64: 1695 if state.unreachable { 1696 break 1697 } 1698 cvt := builder.AllocateInstruction() 1699 cvt.AsFdemote(state.pop()) 1700 builder.InsertInstruction(cvt) 1701 state.push(cvt.Return()) 1702 case wasm.OpcodeF64PromoteF32: 1703 if state.unreachable { 1704 break 1705 } 1706 cvt := builder.AllocateInstruction() 1707 cvt.AsFpromote(state.pop()) 1708 builder.InsertInstruction(cvt) 1709 state.push(cvt.Return()) 1710 1711 case wasm.OpcodeVecPrefix: 1712 state.pc++ 1713 vecOp := c.wasmFunctionBody[state.pc] 1714 switch vecOp { 1715 case wasm.OpcodeVecV128Const: 1716 state.pc++ 1717 lo := binary.LittleEndian.Uint64(c.wasmFunctionBody[state.pc:]) 1718 state.pc += 8 1719 hi := binary.LittleEndian.Uint64(c.wasmFunctionBody[state.pc:]) 1720 state.pc += 7 1721 if state.unreachable { 1722 break 1723 } 1724 ret := builder.AllocateInstruction().AsVconst(lo, hi).Insert(builder).Return() 1725 state.push(ret) 1726 case wasm.OpcodeVecV128Load: 1727 _, offset := c.readMemArg() 1728 if state.unreachable { 1729 break 1730 } 1731 baseAddr := state.pop() 1732 addr := c.memOpSetup(baseAddr, uint64(offset), 16) 1733 load := builder.AllocateInstruction() 1734 load.AsLoad(addr, offset, ssa.TypeV128) 1735 builder.InsertInstruction(load) 1736 state.push(load.Return()) 1737 case wasm.OpcodeVecV128Load8Lane, wasm.OpcodeVecV128Load16Lane, wasm.OpcodeVecV128Load32Lane: 1738 _, offset := c.readMemArg() 1739 state.pc++ 1740 if state.unreachable { 1741 break 1742 } 1743 var lane ssa.VecLane 1744 var loadOp ssa.Opcode 1745 var opSize uint64 1746 switch vecOp { 1747 case wasm.OpcodeVecV128Load8Lane: 1748 loadOp, lane, opSize = ssa.OpcodeUload8, ssa.VecLaneI8x16, 1 1749 case wasm.OpcodeVecV128Load16Lane: 1750 loadOp, lane, opSize = ssa.OpcodeUload16, ssa.VecLaneI16x8, 2 1751 case wasm.OpcodeVecV128Load32Lane: 1752 loadOp, lane, opSize = ssa.OpcodeUload32, ssa.VecLaneI32x4, 4 1753 } 1754 laneIndex := c.wasmFunctionBody[state.pc] 1755 vector := state.pop() 1756 baseAddr := state.pop() 1757 addr := c.memOpSetup(baseAddr, uint64(offset), opSize) 1758 load := builder.AllocateInstruction(). 1759 AsExtLoad(loadOp, addr, offset, false). 1760 Insert(builder).Return() 1761 ret := builder.AllocateInstruction(). 1762 AsInsertlane(vector, load, laneIndex, lane). 1763 Insert(builder).Return() 1764 state.push(ret) 1765 case wasm.OpcodeVecV128Load64Lane: 1766 _, offset := c.readMemArg() 1767 state.pc++ 1768 if state.unreachable { 1769 break 1770 } 1771 laneIndex := c.wasmFunctionBody[state.pc] 1772 vector := state.pop() 1773 baseAddr := state.pop() 1774 addr := c.memOpSetup(baseAddr, uint64(offset), 8) 1775 load := builder.AllocateInstruction(). 1776 AsLoad(addr, offset, ssa.TypeI64). 1777 Insert(builder).Return() 1778 ret := builder.AllocateInstruction(). 1779 AsInsertlane(vector, load, laneIndex, ssa.VecLaneI64x2). 1780 Insert(builder).Return() 1781 state.push(ret) 1782 1783 case wasm.OpcodeVecV128Load32zero, wasm.OpcodeVecV128Load64zero: 1784 _, offset := c.readMemArg() 1785 if state.unreachable { 1786 break 1787 } 1788 1789 var scalarType ssa.Type 1790 switch vecOp { 1791 case wasm.OpcodeVecV128Load32zero: 1792 scalarType = ssa.TypeF32 1793 case wasm.OpcodeVecV128Load64zero: 1794 scalarType = ssa.TypeF64 1795 } 1796 1797 baseAddr := state.pop() 1798 addr := c.memOpSetup(baseAddr, uint64(offset), uint64(scalarType.Size())) 1799 1800 ret := builder.AllocateInstruction(). 1801 AsVZeroExtLoad(addr, offset, scalarType). 1802 Insert(builder).Return() 1803 state.push(ret) 1804 1805 case wasm.OpcodeVecV128Load8x8u, wasm.OpcodeVecV128Load8x8s, 1806 wasm.OpcodeVecV128Load16x4u, wasm.OpcodeVecV128Load16x4s, 1807 wasm.OpcodeVecV128Load32x2u, wasm.OpcodeVecV128Load32x2s: 1808 _, offset := c.readMemArg() 1809 if state.unreachable { 1810 break 1811 } 1812 var lane ssa.VecLane 1813 var signed bool 1814 switch vecOp { 1815 case wasm.OpcodeVecV128Load8x8s: 1816 signed = true 1817 fallthrough 1818 case wasm.OpcodeVecV128Load8x8u: 1819 lane = ssa.VecLaneI8x16 1820 case wasm.OpcodeVecV128Load16x4s: 1821 signed = true 1822 fallthrough 1823 case wasm.OpcodeVecV128Load16x4u: 1824 lane = ssa.VecLaneI16x8 1825 case wasm.OpcodeVecV128Load32x2s: 1826 signed = true 1827 fallthrough 1828 case wasm.OpcodeVecV128Load32x2u: 1829 lane = ssa.VecLaneI32x4 1830 } 1831 baseAddr := state.pop() 1832 addr := c.memOpSetup(baseAddr, uint64(offset), 8) 1833 load := builder.AllocateInstruction(). 1834 AsLoad(addr, offset, ssa.TypeF64). 1835 Insert(builder).Return() 1836 ret := builder.AllocateInstruction(). 1837 AsWiden(load, lane, signed, true). 1838 Insert(builder).Return() 1839 state.push(ret) 1840 case wasm.OpcodeVecV128Load8Splat, wasm.OpcodeVecV128Load16Splat, 1841 wasm.OpcodeVecV128Load32Splat, wasm.OpcodeVecV128Load64Splat: 1842 _, offset := c.readMemArg() 1843 if state.unreachable { 1844 break 1845 } 1846 var lane ssa.VecLane 1847 var opSize uint64 1848 switch vecOp { 1849 case wasm.OpcodeVecV128Load8Splat: 1850 lane, opSize = ssa.VecLaneI8x16, 1 1851 case wasm.OpcodeVecV128Load16Splat: 1852 lane, opSize = ssa.VecLaneI16x8, 2 1853 case wasm.OpcodeVecV128Load32Splat: 1854 lane, opSize = ssa.VecLaneI32x4, 4 1855 case wasm.OpcodeVecV128Load64Splat: 1856 lane, opSize = ssa.VecLaneI64x2, 8 1857 } 1858 baseAddr := state.pop() 1859 addr := c.memOpSetup(baseAddr, uint64(offset), opSize) 1860 ret := builder.AllocateInstruction(). 1861 AsLoadSplat(addr, offset, lane). 1862 Insert(builder).Return() 1863 state.push(ret) 1864 case wasm.OpcodeVecV128Store: 1865 _, offset := c.readMemArg() 1866 if state.unreachable { 1867 break 1868 } 1869 value := state.pop() 1870 baseAddr := state.pop() 1871 addr := c.memOpSetup(baseAddr, uint64(offset), 16) 1872 builder.AllocateInstruction(). 1873 AsStore(ssa.OpcodeStore, value, addr, offset). 1874 Insert(builder) 1875 case wasm.OpcodeVecV128Store8Lane, wasm.OpcodeVecV128Store16Lane, 1876 wasm.OpcodeVecV128Store32Lane, wasm.OpcodeVecV128Store64Lane: 1877 _, offset := c.readMemArg() 1878 state.pc++ 1879 if state.unreachable { 1880 break 1881 } 1882 laneIndex := c.wasmFunctionBody[state.pc] 1883 var storeOp ssa.Opcode 1884 var lane ssa.VecLane 1885 var opSize uint64 1886 switch vecOp { 1887 case wasm.OpcodeVecV128Store8Lane: 1888 storeOp, lane, opSize = ssa.OpcodeIstore8, ssa.VecLaneI8x16, 1 1889 case wasm.OpcodeVecV128Store16Lane: 1890 storeOp, lane, opSize = ssa.OpcodeIstore16, ssa.VecLaneI16x8, 2 1891 case wasm.OpcodeVecV128Store32Lane: 1892 storeOp, lane, opSize = ssa.OpcodeIstore32, ssa.VecLaneI32x4, 4 1893 case wasm.OpcodeVecV128Store64Lane: 1894 storeOp, lane, opSize = ssa.OpcodeStore, ssa.VecLaneI64x2, 8 1895 } 1896 vector := state.pop() 1897 baseAddr := state.pop() 1898 addr := c.memOpSetup(baseAddr, uint64(offset), opSize) 1899 value := builder.AllocateInstruction(). 1900 AsExtractlane(vector, laneIndex, lane, false). 1901 Insert(builder).Return() 1902 builder.AllocateInstruction(). 1903 AsStore(storeOp, value, addr, offset). 1904 Insert(builder) 1905 case wasm.OpcodeVecV128Not: 1906 if state.unreachable { 1907 break 1908 } 1909 v1 := state.pop() 1910 ret := builder.AllocateInstruction().AsVbnot(v1).Insert(builder).Return() 1911 state.push(ret) 1912 case wasm.OpcodeVecV128And: 1913 if state.unreachable { 1914 break 1915 } 1916 v2 := state.pop() 1917 v1 := state.pop() 1918 ret := builder.AllocateInstruction().AsVband(v1, v2).Insert(builder).Return() 1919 state.push(ret) 1920 case wasm.OpcodeVecV128AndNot: 1921 if state.unreachable { 1922 break 1923 } 1924 v2 := state.pop() 1925 v1 := state.pop() 1926 ret := builder.AllocateInstruction().AsVbandnot(v1, v2).Insert(builder).Return() 1927 state.push(ret) 1928 case wasm.OpcodeVecV128Or: 1929 if state.unreachable { 1930 break 1931 } 1932 v2 := state.pop() 1933 v1 := state.pop() 1934 ret := builder.AllocateInstruction().AsVbor(v1, v2).Insert(builder).Return() 1935 state.push(ret) 1936 case wasm.OpcodeVecV128Xor: 1937 if state.unreachable { 1938 break 1939 } 1940 v2 := state.pop() 1941 v1 := state.pop() 1942 ret := builder.AllocateInstruction().AsVbxor(v1, v2).Insert(builder).Return() 1943 state.push(ret) 1944 case wasm.OpcodeVecV128Bitselect: 1945 if state.unreachable { 1946 break 1947 } 1948 c := state.pop() 1949 v2 := state.pop() 1950 v1 := state.pop() 1951 ret := builder.AllocateInstruction().AsVbitselect(c, v1, v2).Insert(builder).Return() 1952 state.push(ret) 1953 case wasm.OpcodeVecV128AnyTrue: 1954 if state.unreachable { 1955 break 1956 } 1957 v1 := state.pop() 1958 ret := builder.AllocateInstruction().AsVanyTrue(v1).Insert(builder).Return() 1959 state.push(ret) 1960 case wasm.OpcodeVecI8x16AllTrue, wasm.OpcodeVecI16x8AllTrue, wasm.OpcodeVecI32x4AllTrue, wasm.OpcodeVecI64x2AllTrue: 1961 if state.unreachable { 1962 break 1963 } 1964 var lane ssa.VecLane 1965 switch vecOp { 1966 case wasm.OpcodeVecI8x16AllTrue: 1967 lane = ssa.VecLaneI8x16 1968 case wasm.OpcodeVecI16x8AllTrue: 1969 lane = ssa.VecLaneI16x8 1970 case wasm.OpcodeVecI32x4AllTrue: 1971 lane = ssa.VecLaneI32x4 1972 case wasm.OpcodeVecI64x2AllTrue: 1973 lane = ssa.VecLaneI64x2 1974 } 1975 v1 := state.pop() 1976 ret := builder.AllocateInstruction().AsVallTrue(v1, lane).Insert(builder).Return() 1977 state.push(ret) 1978 case wasm.OpcodeVecI8x16BitMask, wasm.OpcodeVecI16x8BitMask, wasm.OpcodeVecI32x4BitMask, wasm.OpcodeVecI64x2BitMask: 1979 if state.unreachable { 1980 break 1981 } 1982 var lane ssa.VecLane 1983 switch vecOp { 1984 case wasm.OpcodeVecI8x16BitMask: 1985 lane = ssa.VecLaneI8x16 1986 case wasm.OpcodeVecI16x8BitMask: 1987 lane = ssa.VecLaneI16x8 1988 case wasm.OpcodeVecI32x4BitMask: 1989 lane = ssa.VecLaneI32x4 1990 case wasm.OpcodeVecI64x2BitMask: 1991 lane = ssa.VecLaneI64x2 1992 } 1993 v1 := state.pop() 1994 ret := builder.AllocateInstruction().AsVhighBits(v1, lane).Insert(builder).Return() 1995 state.push(ret) 1996 case wasm.OpcodeVecI8x16Abs, wasm.OpcodeVecI16x8Abs, wasm.OpcodeVecI32x4Abs, wasm.OpcodeVecI64x2Abs: 1997 if state.unreachable { 1998 break 1999 } 2000 var lane ssa.VecLane 2001 switch vecOp { 2002 case wasm.OpcodeVecI8x16Abs: 2003 lane = ssa.VecLaneI8x16 2004 case wasm.OpcodeVecI16x8Abs: 2005 lane = ssa.VecLaneI16x8 2006 case wasm.OpcodeVecI32x4Abs: 2007 lane = ssa.VecLaneI32x4 2008 case wasm.OpcodeVecI64x2Abs: 2009 lane = ssa.VecLaneI64x2 2010 } 2011 v1 := state.pop() 2012 ret := builder.AllocateInstruction().AsVIabs(v1, lane).Insert(builder).Return() 2013 state.push(ret) 2014 case wasm.OpcodeVecI8x16Neg, wasm.OpcodeVecI16x8Neg, wasm.OpcodeVecI32x4Neg, wasm.OpcodeVecI64x2Neg: 2015 if state.unreachable { 2016 break 2017 } 2018 var lane ssa.VecLane 2019 switch vecOp { 2020 case wasm.OpcodeVecI8x16Neg: 2021 lane = ssa.VecLaneI8x16 2022 case wasm.OpcodeVecI16x8Neg: 2023 lane = ssa.VecLaneI16x8 2024 case wasm.OpcodeVecI32x4Neg: 2025 lane = ssa.VecLaneI32x4 2026 case wasm.OpcodeVecI64x2Neg: 2027 lane = ssa.VecLaneI64x2 2028 } 2029 v1 := state.pop() 2030 ret := builder.AllocateInstruction().AsVIneg(v1, lane).Insert(builder).Return() 2031 state.push(ret) 2032 case wasm.OpcodeVecI8x16Popcnt: 2033 if state.unreachable { 2034 break 2035 } 2036 lane := ssa.VecLaneI8x16 2037 v1 := state.pop() 2038 2039 ret := builder.AllocateInstruction().AsVIpopcnt(v1, lane).Insert(builder).Return() 2040 state.push(ret) 2041 case wasm.OpcodeVecI8x16Add, wasm.OpcodeVecI16x8Add, wasm.OpcodeVecI32x4Add, wasm.OpcodeVecI64x2Add: 2042 if state.unreachable { 2043 break 2044 } 2045 var lane ssa.VecLane 2046 switch vecOp { 2047 case wasm.OpcodeVecI8x16Add: 2048 lane = ssa.VecLaneI8x16 2049 case wasm.OpcodeVecI16x8Add: 2050 lane = ssa.VecLaneI16x8 2051 case wasm.OpcodeVecI32x4Add: 2052 lane = ssa.VecLaneI32x4 2053 case wasm.OpcodeVecI64x2Add: 2054 lane = ssa.VecLaneI64x2 2055 } 2056 v2 := state.pop() 2057 v1 := state.pop() 2058 ret := builder.AllocateInstruction().AsVIadd(v1, v2, lane).Insert(builder).Return() 2059 state.push(ret) 2060 case wasm.OpcodeVecI8x16AddSatS, wasm.OpcodeVecI16x8AddSatS: 2061 if state.unreachable { 2062 break 2063 } 2064 var lane ssa.VecLane 2065 switch vecOp { 2066 case wasm.OpcodeVecI8x16AddSatS: 2067 lane = ssa.VecLaneI8x16 2068 case wasm.OpcodeVecI16x8AddSatS: 2069 lane = ssa.VecLaneI16x8 2070 } 2071 v2 := state.pop() 2072 v1 := state.pop() 2073 ret := builder.AllocateInstruction().AsVSaddSat(v1, v2, lane).Insert(builder).Return() 2074 state.push(ret) 2075 case wasm.OpcodeVecI8x16AddSatU, wasm.OpcodeVecI16x8AddSatU: 2076 if state.unreachable { 2077 break 2078 } 2079 var lane ssa.VecLane 2080 switch vecOp { 2081 case wasm.OpcodeVecI8x16AddSatU: 2082 lane = ssa.VecLaneI8x16 2083 case wasm.OpcodeVecI16x8AddSatU: 2084 lane = ssa.VecLaneI16x8 2085 } 2086 v2 := state.pop() 2087 v1 := state.pop() 2088 ret := builder.AllocateInstruction().AsVUaddSat(v1, v2, lane).Insert(builder).Return() 2089 state.push(ret) 2090 case wasm.OpcodeVecI8x16SubSatS, wasm.OpcodeVecI16x8SubSatS: 2091 if state.unreachable { 2092 break 2093 } 2094 var lane ssa.VecLane 2095 switch vecOp { 2096 case wasm.OpcodeVecI8x16SubSatS: 2097 lane = ssa.VecLaneI8x16 2098 case wasm.OpcodeVecI16x8SubSatS: 2099 lane = ssa.VecLaneI16x8 2100 } 2101 v2 := state.pop() 2102 v1 := state.pop() 2103 ret := builder.AllocateInstruction().AsVSsubSat(v1, v2, lane).Insert(builder).Return() 2104 state.push(ret) 2105 case wasm.OpcodeVecI8x16SubSatU, wasm.OpcodeVecI16x8SubSatU: 2106 if state.unreachable { 2107 break 2108 } 2109 var lane ssa.VecLane 2110 switch vecOp { 2111 case wasm.OpcodeVecI8x16SubSatU: 2112 lane = ssa.VecLaneI8x16 2113 case wasm.OpcodeVecI16x8SubSatU: 2114 lane = ssa.VecLaneI16x8 2115 } 2116 v2 := state.pop() 2117 v1 := state.pop() 2118 ret := builder.AllocateInstruction().AsVUsubSat(v1, v2, lane).Insert(builder).Return() 2119 state.push(ret) 2120 2121 case wasm.OpcodeVecI8x16Sub, wasm.OpcodeVecI16x8Sub, wasm.OpcodeVecI32x4Sub, wasm.OpcodeVecI64x2Sub: 2122 if state.unreachable { 2123 break 2124 } 2125 var lane ssa.VecLane 2126 switch vecOp { 2127 case wasm.OpcodeVecI8x16Sub: 2128 lane = ssa.VecLaneI8x16 2129 case wasm.OpcodeVecI16x8Sub: 2130 lane = ssa.VecLaneI16x8 2131 case wasm.OpcodeVecI32x4Sub: 2132 lane = ssa.VecLaneI32x4 2133 case wasm.OpcodeVecI64x2Sub: 2134 lane = ssa.VecLaneI64x2 2135 } 2136 v2 := state.pop() 2137 v1 := state.pop() 2138 ret := builder.AllocateInstruction().AsVIsub(v1, v2, lane).Insert(builder).Return() 2139 state.push(ret) 2140 case wasm.OpcodeVecI8x16MinS, wasm.OpcodeVecI16x8MinS, wasm.OpcodeVecI32x4MinS: 2141 if state.unreachable { 2142 break 2143 } 2144 var lane ssa.VecLane 2145 switch vecOp { 2146 case wasm.OpcodeVecI8x16MinS: 2147 lane = ssa.VecLaneI8x16 2148 case wasm.OpcodeVecI16x8MinS: 2149 lane = ssa.VecLaneI16x8 2150 case wasm.OpcodeVecI32x4MinS: 2151 lane = ssa.VecLaneI32x4 2152 } 2153 v2 := state.pop() 2154 v1 := state.pop() 2155 ret := builder.AllocateInstruction().AsVImin(v1, v2, lane).Insert(builder).Return() 2156 state.push(ret) 2157 case wasm.OpcodeVecI8x16MinU, wasm.OpcodeVecI16x8MinU, wasm.OpcodeVecI32x4MinU: 2158 if state.unreachable { 2159 break 2160 } 2161 var lane ssa.VecLane 2162 switch vecOp { 2163 case wasm.OpcodeVecI8x16MinU: 2164 lane = ssa.VecLaneI8x16 2165 case wasm.OpcodeVecI16x8MinU: 2166 lane = ssa.VecLaneI16x8 2167 case wasm.OpcodeVecI32x4MinU: 2168 lane = ssa.VecLaneI32x4 2169 } 2170 v2 := state.pop() 2171 v1 := state.pop() 2172 ret := builder.AllocateInstruction().AsVUmin(v1, v2, lane).Insert(builder).Return() 2173 state.push(ret) 2174 case wasm.OpcodeVecI8x16MaxS, wasm.OpcodeVecI16x8MaxS, wasm.OpcodeVecI32x4MaxS: 2175 if state.unreachable { 2176 break 2177 } 2178 var lane ssa.VecLane 2179 switch vecOp { 2180 case wasm.OpcodeVecI8x16MaxS: 2181 lane = ssa.VecLaneI8x16 2182 case wasm.OpcodeVecI16x8MaxS: 2183 lane = ssa.VecLaneI16x8 2184 case wasm.OpcodeVecI32x4MaxS: 2185 lane = ssa.VecLaneI32x4 2186 } 2187 v2 := state.pop() 2188 v1 := state.pop() 2189 ret := builder.AllocateInstruction().AsVImax(v1, v2, lane).Insert(builder).Return() 2190 state.push(ret) 2191 case wasm.OpcodeVecI8x16MaxU, wasm.OpcodeVecI16x8MaxU, wasm.OpcodeVecI32x4MaxU: 2192 if state.unreachable { 2193 break 2194 } 2195 var lane ssa.VecLane 2196 switch vecOp { 2197 case wasm.OpcodeVecI8x16MaxU: 2198 lane = ssa.VecLaneI8x16 2199 case wasm.OpcodeVecI16x8MaxU: 2200 lane = ssa.VecLaneI16x8 2201 case wasm.OpcodeVecI32x4MaxU: 2202 lane = ssa.VecLaneI32x4 2203 } 2204 v2 := state.pop() 2205 v1 := state.pop() 2206 ret := builder.AllocateInstruction().AsVUmax(v1, v2, lane).Insert(builder).Return() 2207 state.push(ret) 2208 case wasm.OpcodeVecI8x16AvgrU, wasm.OpcodeVecI16x8AvgrU: 2209 if state.unreachable { 2210 break 2211 } 2212 var lane ssa.VecLane 2213 switch vecOp { 2214 case wasm.OpcodeVecI8x16AvgrU: 2215 lane = ssa.VecLaneI8x16 2216 case wasm.OpcodeVecI16x8AvgrU: 2217 lane = ssa.VecLaneI16x8 2218 } 2219 v2 := state.pop() 2220 v1 := state.pop() 2221 ret := builder.AllocateInstruction().AsVAvgRound(v1, v2, lane).Insert(builder).Return() 2222 state.push(ret) 2223 case wasm.OpcodeVecI16x8Mul, wasm.OpcodeVecI32x4Mul, wasm.OpcodeVecI64x2Mul: 2224 if state.unreachable { 2225 break 2226 } 2227 var lane ssa.VecLane 2228 switch vecOp { 2229 case wasm.OpcodeVecI16x8Mul: 2230 lane = ssa.VecLaneI16x8 2231 case wasm.OpcodeVecI32x4Mul: 2232 lane = ssa.VecLaneI32x4 2233 case wasm.OpcodeVecI64x2Mul: 2234 lane = ssa.VecLaneI64x2 2235 } 2236 v2 := state.pop() 2237 v1 := state.pop() 2238 ret := builder.AllocateInstruction().AsVImul(v1, v2, lane).Insert(builder).Return() 2239 state.push(ret) 2240 case wasm.OpcodeVecI16x8Q15mulrSatS: 2241 if state.unreachable { 2242 break 2243 } 2244 v2 := state.pop() 2245 v1 := state.pop() 2246 ret := builder.AllocateInstruction().AsSqmulRoundSat(v1, v2, ssa.VecLaneI16x8).Insert(builder).Return() 2247 state.push(ret) 2248 case wasm.OpcodeVecI8x16Eq, wasm.OpcodeVecI16x8Eq, wasm.OpcodeVecI32x4Eq, wasm.OpcodeVecI64x2Eq: 2249 if state.unreachable { 2250 break 2251 } 2252 var lane ssa.VecLane 2253 switch vecOp { 2254 case wasm.OpcodeVecI8x16Eq: 2255 lane = ssa.VecLaneI8x16 2256 case wasm.OpcodeVecI16x8Eq: 2257 lane = ssa.VecLaneI16x8 2258 case wasm.OpcodeVecI32x4Eq: 2259 lane = ssa.VecLaneI32x4 2260 case wasm.OpcodeVecI64x2Eq: 2261 lane = ssa.VecLaneI64x2 2262 } 2263 v2 := state.pop() 2264 v1 := state.pop() 2265 ret := builder.AllocateInstruction(). 2266 AsVIcmp(v1, v2, ssa.IntegerCmpCondEqual, lane).Insert(builder).Return() 2267 state.push(ret) 2268 case wasm.OpcodeVecI8x16Ne, wasm.OpcodeVecI16x8Ne, wasm.OpcodeVecI32x4Ne, wasm.OpcodeVecI64x2Ne: 2269 if state.unreachable { 2270 break 2271 } 2272 var lane ssa.VecLane 2273 switch vecOp { 2274 case wasm.OpcodeVecI8x16Ne: 2275 lane = ssa.VecLaneI8x16 2276 case wasm.OpcodeVecI16x8Ne: 2277 lane = ssa.VecLaneI16x8 2278 case wasm.OpcodeVecI32x4Ne: 2279 lane = ssa.VecLaneI32x4 2280 case wasm.OpcodeVecI64x2Ne: 2281 lane = ssa.VecLaneI64x2 2282 } 2283 v2 := state.pop() 2284 v1 := state.pop() 2285 ret := builder.AllocateInstruction(). 2286 AsVIcmp(v1, v2, ssa.IntegerCmpCondNotEqual, lane).Insert(builder).Return() 2287 state.push(ret) 2288 case wasm.OpcodeVecI8x16LtS, wasm.OpcodeVecI16x8LtS, wasm.OpcodeVecI32x4LtS, wasm.OpcodeVecI64x2LtS: 2289 if state.unreachable { 2290 break 2291 } 2292 var lane ssa.VecLane 2293 switch vecOp { 2294 case wasm.OpcodeVecI8x16LtS: 2295 lane = ssa.VecLaneI8x16 2296 case wasm.OpcodeVecI16x8LtS: 2297 lane = ssa.VecLaneI16x8 2298 case wasm.OpcodeVecI32x4LtS: 2299 lane = ssa.VecLaneI32x4 2300 case wasm.OpcodeVecI64x2LtS: 2301 lane = ssa.VecLaneI64x2 2302 } 2303 v2 := state.pop() 2304 v1 := state.pop() 2305 ret := builder.AllocateInstruction(). 2306 AsVIcmp(v1, v2, ssa.IntegerCmpCondSignedLessThan, lane).Insert(builder).Return() 2307 state.push(ret) 2308 case wasm.OpcodeVecI8x16LtU, wasm.OpcodeVecI16x8LtU, wasm.OpcodeVecI32x4LtU: 2309 if state.unreachable { 2310 break 2311 } 2312 var lane ssa.VecLane 2313 switch vecOp { 2314 case wasm.OpcodeVecI8x16LtU: 2315 lane = ssa.VecLaneI8x16 2316 case wasm.OpcodeVecI16x8LtU: 2317 lane = ssa.VecLaneI16x8 2318 case wasm.OpcodeVecI32x4LtU: 2319 lane = ssa.VecLaneI32x4 2320 } 2321 v2 := state.pop() 2322 v1 := state.pop() 2323 ret := builder.AllocateInstruction(). 2324 AsVIcmp(v1, v2, ssa.IntegerCmpCondUnsignedLessThan, lane).Insert(builder).Return() 2325 state.push(ret) 2326 case wasm.OpcodeVecI8x16LeS, wasm.OpcodeVecI16x8LeS, wasm.OpcodeVecI32x4LeS, wasm.OpcodeVecI64x2LeS: 2327 if state.unreachable { 2328 break 2329 } 2330 var lane ssa.VecLane 2331 switch vecOp { 2332 case wasm.OpcodeVecI8x16LeS: 2333 lane = ssa.VecLaneI8x16 2334 case wasm.OpcodeVecI16x8LeS: 2335 lane = ssa.VecLaneI16x8 2336 case wasm.OpcodeVecI32x4LeS: 2337 lane = ssa.VecLaneI32x4 2338 case wasm.OpcodeVecI64x2LeS: 2339 lane = ssa.VecLaneI64x2 2340 } 2341 v2 := state.pop() 2342 v1 := state.pop() 2343 ret := builder.AllocateInstruction(). 2344 AsVIcmp(v1, v2, ssa.IntegerCmpCondSignedLessThanOrEqual, lane).Insert(builder).Return() 2345 state.push(ret) 2346 case wasm.OpcodeVecI8x16LeU, wasm.OpcodeVecI16x8LeU, wasm.OpcodeVecI32x4LeU: 2347 if state.unreachable { 2348 break 2349 } 2350 var lane ssa.VecLane 2351 switch vecOp { 2352 case wasm.OpcodeVecI8x16LeU: 2353 lane = ssa.VecLaneI8x16 2354 case wasm.OpcodeVecI16x8LeU: 2355 lane = ssa.VecLaneI16x8 2356 case wasm.OpcodeVecI32x4LeU: 2357 lane = ssa.VecLaneI32x4 2358 } 2359 v2 := state.pop() 2360 v1 := state.pop() 2361 ret := builder.AllocateInstruction(). 2362 AsVIcmp(v1, v2, ssa.IntegerCmpCondUnsignedLessThanOrEqual, lane).Insert(builder).Return() 2363 state.push(ret) 2364 case wasm.OpcodeVecI8x16GtS, wasm.OpcodeVecI16x8GtS, wasm.OpcodeVecI32x4GtS, wasm.OpcodeVecI64x2GtS: 2365 if state.unreachable { 2366 break 2367 } 2368 var lane ssa.VecLane 2369 switch vecOp { 2370 case wasm.OpcodeVecI8x16GtS: 2371 lane = ssa.VecLaneI8x16 2372 case wasm.OpcodeVecI16x8GtS: 2373 lane = ssa.VecLaneI16x8 2374 case wasm.OpcodeVecI32x4GtS: 2375 lane = ssa.VecLaneI32x4 2376 case wasm.OpcodeVecI64x2GtS: 2377 lane = ssa.VecLaneI64x2 2378 } 2379 v2 := state.pop() 2380 v1 := state.pop() 2381 ret := builder.AllocateInstruction(). 2382 AsVIcmp(v1, v2, ssa.IntegerCmpCondSignedGreaterThan, lane).Insert(builder).Return() 2383 state.push(ret) 2384 case wasm.OpcodeVecI8x16GtU, wasm.OpcodeVecI16x8GtU, wasm.OpcodeVecI32x4GtU: 2385 if state.unreachable { 2386 break 2387 } 2388 var lane ssa.VecLane 2389 switch vecOp { 2390 case wasm.OpcodeVecI8x16GtU: 2391 lane = ssa.VecLaneI8x16 2392 case wasm.OpcodeVecI16x8GtU: 2393 lane = ssa.VecLaneI16x8 2394 case wasm.OpcodeVecI32x4GtU: 2395 lane = ssa.VecLaneI32x4 2396 } 2397 v2 := state.pop() 2398 v1 := state.pop() 2399 ret := builder.AllocateInstruction(). 2400 AsVIcmp(v1, v2, ssa.IntegerCmpCondUnsignedGreaterThan, lane).Insert(builder).Return() 2401 state.push(ret) 2402 case wasm.OpcodeVecI8x16GeS, wasm.OpcodeVecI16x8GeS, wasm.OpcodeVecI32x4GeS, wasm.OpcodeVecI64x2GeS: 2403 if state.unreachable { 2404 break 2405 } 2406 var lane ssa.VecLane 2407 switch vecOp { 2408 case wasm.OpcodeVecI8x16GeS: 2409 lane = ssa.VecLaneI8x16 2410 case wasm.OpcodeVecI16x8GeS: 2411 lane = ssa.VecLaneI16x8 2412 case wasm.OpcodeVecI32x4GeS: 2413 lane = ssa.VecLaneI32x4 2414 case wasm.OpcodeVecI64x2GeS: 2415 lane = ssa.VecLaneI64x2 2416 } 2417 v2 := state.pop() 2418 v1 := state.pop() 2419 ret := builder.AllocateInstruction(). 2420 AsVIcmp(v1, v2, ssa.IntegerCmpCondSignedGreaterThanOrEqual, lane).Insert(builder).Return() 2421 state.push(ret) 2422 case wasm.OpcodeVecI8x16GeU, wasm.OpcodeVecI16x8GeU, wasm.OpcodeVecI32x4GeU: 2423 if state.unreachable { 2424 break 2425 } 2426 var lane ssa.VecLane 2427 switch vecOp { 2428 case wasm.OpcodeVecI8x16GeU: 2429 lane = ssa.VecLaneI8x16 2430 case wasm.OpcodeVecI16x8GeU: 2431 lane = ssa.VecLaneI16x8 2432 case wasm.OpcodeVecI32x4GeU: 2433 lane = ssa.VecLaneI32x4 2434 } 2435 v2 := state.pop() 2436 v1 := state.pop() 2437 ret := builder.AllocateInstruction(). 2438 AsVIcmp(v1, v2, ssa.IntegerCmpCondUnsignedGreaterThanOrEqual, lane).Insert(builder).Return() 2439 state.push(ret) 2440 case wasm.OpcodeVecF32x4Max, wasm.OpcodeVecF64x2Max: 2441 if state.unreachable { 2442 break 2443 } 2444 var lane ssa.VecLane 2445 switch vecOp { 2446 case wasm.OpcodeVecF32x4Max: 2447 lane = ssa.VecLaneF32x4 2448 case wasm.OpcodeVecF64x2Max: 2449 lane = ssa.VecLaneF64x2 2450 } 2451 v2 := state.pop() 2452 v1 := state.pop() 2453 ret := builder.AllocateInstruction().AsVFmax(v1, v2, lane).Insert(builder).Return() 2454 state.push(ret) 2455 case wasm.OpcodeVecF32x4Abs, wasm.OpcodeVecF64x2Abs: 2456 if state.unreachable { 2457 break 2458 } 2459 var lane ssa.VecLane 2460 switch vecOp { 2461 case wasm.OpcodeVecF32x4Abs: 2462 lane = ssa.VecLaneF32x4 2463 case wasm.OpcodeVecF64x2Abs: 2464 lane = ssa.VecLaneF64x2 2465 } 2466 v1 := state.pop() 2467 ret := builder.AllocateInstruction().AsVFabs(v1, lane).Insert(builder).Return() 2468 state.push(ret) 2469 case wasm.OpcodeVecF32x4Min, wasm.OpcodeVecF64x2Min: 2470 if state.unreachable { 2471 break 2472 } 2473 var lane ssa.VecLane 2474 switch vecOp { 2475 case wasm.OpcodeVecF32x4Min: 2476 lane = ssa.VecLaneF32x4 2477 case wasm.OpcodeVecF64x2Min: 2478 lane = ssa.VecLaneF64x2 2479 } 2480 v2 := state.pop() 2481 v1 := state.pop() 2482 ret := builder.AllocateInstruction().AsVFmin(v1, v2, lane).Insert(builder).Return() 2483 state.push(ret) 2484 case wasm.OpcodeVecF32x4Neg, wasm.OpcodeVecF64x2Neg: 2485 if state.unreachable { 2486 break 2487 } 2488 var lane ssa.VecLane 2489 switch vecOp { 2490 case wasm.OpcodeVecF32x4Neg: 2491 lane = ssa.VecLaneF32x4 2492 case wasm.OpcodeVecF64x2Neg: 2493 lane = ssa.VecLaneF64x2 2494 } 2495 v1 := state.pop() 2496 ret := builder.AllocateInstruction().AsVFneg(v1, lane).Insert(builder).Return() 2497 state.push(ret) 2498 case wasm.OpcodeVecF32x4Sqrt, wasm.OpcodeVecF64x2Sqrt: 2499 if state.unreachable { 2500 break 2501 } 2502 var lane ssa.VecLane 2503 switch vecOp { 2504 case wasm.OpcodeVecF32x4Sqrt: 2505 lane = ssa.VecLaneF32x4 2506 case wasm.OpcodeVecF64x2Sqrt: 2507 lane = ssa.VecLaneF64x2 2508 } 2509 v1 := state.pop() 2510 ret := builder.AllocateInstruction().AsVSqrt(v1, lane).Insert(builder).Return() 2511 state.push(ret) 2512 2513 case wasm.OpcodeVecF32x4Add, wasm.OpcodeVecF64x2Add: 2514 if state.unreachable { 2515 break 2516 } 2517 var lane ssa.VecLane 2518 switch vecOp { 2519 case wasm.OpcodeVecF32x4Add: 2520 lane = ssa.VecLaneF32x4 2521 case wasm.OpcodeVecF64x2Add: 2522 lane = ssa.VecLaneF64x2 2523 } 2524 v2 := state.pop() 2525 v1 := state.pop() 2526 ret := builder.AllocateInstruction().AsVFadd(v1, v2, lane).Insert(builder).Return() 2527 state.push(ret) 2528 case wasm.OpcodeVecF32x4Sub, wasm.OpcodeVecF64x2Sub: 2529 if state.unreachable { 2530 break 2531 } 2532 var lane ssa.VecLane 2533 switch vecOp { 2534 case wasm.OpcodeVecF32x4Sub: 2535 lane = ssa.VecLaneF32x4 2536 case wasm.OpcodeVecF64x2Sub: 2537 lane = ssa.VecLaneF64x2 2538 } 2539 v2 := state.pop() 2540 v1 := state.pop() 2541 ret := builder.AllocateInstruction().AsVFsub(v1, v2, lane).Insert(builder).Return() 2542 state.push(ret) 2543 case wasm.OpcodeVecF32x4Mul, wasm.OpcodeVecF64x2Mul: 2544 if state.unreachable { 2545 break 2546 } 2547 var lane ssa.VecLane 2548 switch vecOp { 2549 case wasm.OpcodeVecF32x4Mul: 2550 lane = ssa.VecLaneF32x4 2551 case wasm.OpcodeVecF64x2Mul: 2552 lane = ssa.VecLaneF64x2 2553 } 2554 v2 := state.pop() 2555 v1 := state.pop() 2556 ret := builder.AllocateInstruction().AsVFmul(v1, v2, lane).Insert(builder).Return() 2557 state.push(ret) 2558 case wasm.OpcodeVecF32x4Div, wasm.OpcodeVecF64x2Div: 2559 if state.unreachable { 2560 break 2561 } 2562 var lane ssa.VecLane 2563 switch vecOp { 2564 case wasm.OpcodeVecF32x4Div: 2565 lane = ssa.VecLaneF32x4 2566 case wasm.OpcodeVecF64x2Div: 2567 lane = ssa.VecLaneF64x2 2568 } 2569 v2 := state.pop() 2570 v1 := state.pop() 2571 ret := builder.AllocateInstruction().AsVFdiv(v1, v2, lane).Insert(builder).Return() 2572 state.push(ret) 2573 2574 case wasm.OpcodeVecI16x8ExtaddPairwiseI8x16S, wasm.OpcodeVecI16x8ExtaddPairwiseI8x16U: 2575 if state.unreachable { 2576 break 2577 } 2578 v := state.pop() 2579 signed := vecOp == wasm.OpcodeVecI16x8ExtaddPairwiseI8x16S 2580 ret := builder.AllocateInstruction().AsExtIaddPairwise(v, ssa.VecLaneI8x16, signed).Insert(builder).Return() 2581 state.push(ret) 2582 2583 case wasm.OpcodeVecI32x4ExtaddPairwiseI16x8S, wasm.OpcodeVecI32x4ExtaddPairwiseI16x8U: 2584 if state.unreachable { 2585 break 2586 } 2587 v := state.pop() 2588 signed := vecOp == wasm.OpcodeVecI32x4ExtaddPairwiseI16x8S 2589 ret := builder.AllocateInstruction().AsExtIaddPairwise(v, ssa.VecLaneI16x8, signed).Insert(builder).Return() 2590 state.push(ret) 2591 2592 case wasm.OpcodeVecI16x8ExtMulLowI8x16S, wasm.OpcodeVecI16x8ExtMulLowI8x16U: 2593 if state.unreachable { 2594 break 2595 } 2596 v2 := state.pop() 2597 v1 := state.pop() 2598 ret := c.lowerExtMul( 2599 v1, v2, 2600 ssa.VecLaneI8x16, ssa.VecLaneI16x8, 2601 vecOp == wasm.OpcodeVecI16x8ExtMulLowI8x16S, true) 2602 state.push(ret) 2603 2604 case wasm.OpcodeVecI16x8ExtMulHighI8x16S, wasm.OpcodeVecI16x8ExtMulHighI8x16U: 2605 if state.unreachable { 2606 break 2607 } 2608 v2 := state.pop() 2609 v1 := state.pop() 2610 ret := c.lowerExtMul( 2611 v1, v2, 2612 ssa.VecLaneI8x16, ssa.VecLaneI16x8, 2613 vecOp == wasm.OpcodeVecI16x8ExtMulHighI8x16S, false) 2614 state.push(ret) 2615 2616 case wasm.OpcodeVecI32x4ExtMulLowI16x8S, wasm.OpcodeVecI32x4ExtMulLowI16x8U: 2617 if state.unreachable { 2618 break 2619 } 2620 v2 := state.pop() 2621 v1 := state.pop() 2622 ret := c.lowerExtMul( 2623 v1, v2, 2624 ssa.VecLaneI16x8, ssa.VecLaneI32x4, 2625 vecOp == wasm.OpcodeVecI32x4ExtMulLowI16x8S, true) 2626 state.push(ret) 2627 2628 case wasm.OpcodeVecI32x4ExtMulHighI16x8S, wasm.OpcodeVecI32x4ExtMulHighI16x8U: 2629 if state.unreachable { 2630 break 2631 } 2632 v2 := state.pop() 2633 v1 := state.pop() 2634 ret := c.lowerExtMul( 2635 v1, v2, 2636 ssa.VecLaneI16x8, ssa.VecLaneI32x4, 2637 vecOp == wasm.OpcodeVecI32x4ExtMulHighI16x8S, false) 2638 state.push(ret) 2639 case wasm.OpcodeVecI64x2ExtMulLowI32x4S, wasm.OpcodeVecI64x2ExtMulLowI32x4U: 2640 if state.unreachable { 2641 break 2642 } 2643 v2 := state.pop() 2644 v1 := state.pop() 2645 ret := c.lowerExtMul( 2646 v1, v2, 2647 ssa.VecLaneI32x4, ssa.VecLaneI64x2, 2648 vecOp == wasm.OpcodeVecI64x2ExtMulLowI32x4S, true) 2649 state.push(ret) 2650 2651 case wasm.OpcodeVecI64x2ExtMulHighI32x4S, wasm.OpcodeVecI64x2ExtMulHighI32x4U: 2652 if state.unreachable { 2653 break 2654 } 2655 v2 := state.pop() 2656 v1 := state.pop() 2657 ret := c.lowerExtMul( 2658 v1, v2, 2659 ssa.VecLaneI32x4, ssa.VecLaneI64x2, 2660 vecOp == wasm.OpcodeVecI64x2ExtMulHighI32x4S, false) 2661 state.push(ret) 2662 2663 case wasm.OpcodeVecI32x4DotI16x8S: 2664 if state.unreachable { 2665 break 2666 } 2667 v2 := state.pop() 2668 v1 := state.pop() 2669 2670 ret := builder.AllocateInstruction().AsWideningPairwiseDotProductS(v1, v2).Insert(builder).Return() 2671 state.push(ret) 2672 2673 case wasm.OpcodeVecF32x4Eq, wasm.OpcodeVecF64x2Eq: 2674 if state.unreachable { 2675 break 2676 } 2677 var lane ssa.VecLane 2678 switch vecOp { 2679 case wasm.OpcodeVecF32x4Eq: 2680 lane = ssa.VecLaneF32x4 2681 case wasm.OpcodeVecF64x2Eq: 2682 lane = ssa.VecLaneF64x2 2683 } 2684 v2 := state.pop() 2685 v1 := state.pop() 2686 ret := builder.AllocateInstruction(). 2687 AsVFcmp(v1, v2, ssa.FloatCmpCondEqual, lane).Insert(builder).Return() 2688 state.push(ret) 2689 case wasm.OpcodeVecF32x4Ne, wasm.OpcodeVecF64x2Ne: 2690 if state.unreachable { 2691 break 2692 } 2693 var lane ssa.VecLane 2694 switch vecOp { 2695 case wasm.OpcodeVecF32x4Ne: 2696 lane = ssa.VecLaneF32x4 2697 case wasm.OpcodeVecF64x2Ne: 2698 lane = ssa.VecLaneF64x2 2699 } 2700 v2 := state.pop() 2701 v1 := state.pop() 2702 ret := builder.AllocateInstruction(). 2703 AsVFcmp(v1, v2, ssa.FloatCmpCondNotEqual, lane).Insert(builder).Return() 2704 state.push(ret) 2705 case wasm.OpcodeVecF32x4Lt, wasm.OpcodeVecF64x2Lt: 2706 if state.unreachable { 2707 break 2708 } 2709 var lane ssa.VecLane 2710 switch vecOp { 2711 case wasm.OpcodeVecF32x4Lt: 2712 lane = ssa.VecLaneF32x4 2713 case wasm.OpcodeVecF64x2Lt: 2714 lane = ssa.VecLaneF64x2 2715 } 2716 v2 := state.pop() 2717 v1 := state.pop() 2718 ret := builder.AllocateInstruction(). 2719 AsVFcmp(v1, v2, ssa.FloatCmpCondLessThan, lane).Insert(builder).Return() 2720 state.push(ret) 2721 case wasm.OpcodeVecF32x4Le, wasm.OpcodeVecF64x2Le: 2722 if state.unreachable { 2723 break 2724 } 2725 var lane ssa.VecLane 2726 switch vecOp { 2727 case wasm.OpcodeVecF32x4Le: 2728 lane = ssa.VecLaneF32x4 2729 case wasm.OpcodeVecF64x2Le: 2730 lane = ssa.VecLaneF64x2 2731 } 2732 v2 := state.pop() 2733 v1 := state.pop() 2734 ret := builder.AllocateInstruction(). 2735 AsVFcmp(v1, v2, ssa.FloatCmpCondLessThanOrEqual, lane).Insert(builder).Return() 2736 state.push(ret) 2737 case wasm.OpcodeVecF32x4Gt, wasm.OpcodeVecF64x2Gt: 2738 if state.unreachable { 2739 break 2740 } 2741 var lane ssa.VecLane 2742 switch vecOp { 2743 case wasm.OpcodeVecF32x4Gt: 2744 lane = ssa.VecLaneF32x4 2745 case wasm.OpcodeVecF64x2Gt: 2746 lane = ssa.VecLaneF64x2 2747 } 2748 v2 := state.pop() 2749 v1 := state.pop() 2750 ret := builder.AllocateInstruction(). 2751 AsVFcmp(v1, v2, ssa.FloatCmpCondGreaterThan, lane).Insert(builder).Return() 2752 state.push(ret) 2753 case wasm.OpcodeVecF32x4Ge, wasm.OpcodeVecF64x2Ge: 2754 if state.unreachable { 2755 break 2756 } 2757 var lane ssa.VecLane 2758 switch vecOp { 2759 case wasm.OpcodeVecF32x4Ge: 2760 lane = ssa.VecLaneF32x4 2761 case wasm.OpcodeVecF64x2Ge: 2762 lane = ssa.VecLaneF64x2 2763 } 2764 v2 := state.pop() 2765 v1 := state.pop() 2766 ret := builder.AllocateInstruction(). 2767 AsVFcmp(v1, v2, ssa.FloatCmpCondGreaterThanOrEqual, lane).Insert(builder).Return() 2768 state.push(ret) 2769 case wasm.OpcodeVecF32x4Ceil, wasm.OpcodeVecF64x2Ceil: 2770 if state.unreachable { 2771 break 2772 } 2773 var lane ssa.VecLane 2774 switch vecOp { 2775 case wasm.OpcodeVecF32x4Ceil: 2776 lane = ssa.VecLaneF32x4 2777 case wasm.OpcodeVecF64x2Ceil: 2778 lane = ssa.VecLaneF64x2 2779 } 2780 v1 := state.pop() 2781 ret := builder.AllocateInstruction().AsVCeil(v1, lane).Insert(builder).Return() 2782 state.push(ret) 2783 case wasm.OpcodeVecF32x4Floor, wasm.OpcodeVecF64x2Floor: 2784 if state.unreachable { 2785 break 2786 } 2787 var lane ssa.VecLane 2788 switch vecOp { 2789 case wasm.OpcodeVecF32x4Floor: 2790 lane = ssa.VecLaneF32x4 2791 case wasm.OpcodeVecF64x2Floor: 2792 lane = ssa.VecLaneF64x2 2793 } 2794 v1 := state.pop() 2795 ret := builder.AllocateInstruction().AsVFloor(v1, lane).Insert(builder).Return() 2796 state.push(ret) 2797 case wasm.OpcodeVecF32x4Trunc, wasm.OpcodeVecF64x2Trunc: 2798 if state.unreachable { 2799 break 2800 } 2801 var lane ssa.VecLane 2802 switch vecOp { 2803 case wasm.OpcodeVecF32x4Trunc: 2804 lane = ssa.VecLaneF32x4 2805 case wasm.OpcodeVecF64x2Trunc: 2806 lane = ssa.VecLaneF64x2 2807 } 2808 v1 := state.pop() 2809 ret := builder.AllocateInstruction().AsVTrunc(v1, lane).Insert(builder).Return() 2810 state.push(ret) 2811 case wasm.OpcodeVecF32x4Nearest, wasm.OpcodeVecF64x2Nearest: 2812 if state.unreachable { 2813 break 2814 } 2815 var lane ssa.VecLane 2816 switch vecOp { 2817 case wasm.OpcodeVecF32x4Nearest: 2818 lane = ssa.VecLaneF32x4 2819 case wasm.OpcodeVecF64x2Nearest: 2820 lane = ssa.VecLaneF64x2 2821 } 2822 v1 := state.pop() 2823 ret := builder.AllocateInstruction().AsVNearest(v1, lane).Insert(builder).Return() 2824 state.push(ret) 2825 case wasm.OpcodeVecF32x4Pmin, wasm.OpcodeVecF64x2Pmin: 2826 if state.unreachable { 2827 break 2828 } 2829 var lane ssa.VecLane 2830 switch vecOp { 2831 case wasm.OpcodeVecF32x4Pmin: 2832 lane = ssa.VecLaneF32x4 2833 case wasm.OpcodeVecF64x2Pmin: 2834 lane = ssa.VecLaneF64x2 2835 } 2836 v2 := state.pop() 2837 v1 := state.pop() 2838 ret := builder.AllocateInstruction().AsVMinPseudo(v1, v2, lane).Insert(builder).Return() 2839 state.push(ret) 2840 case wasm.OpcodeVecF32x4Pmax, wasm.OpcodeVecF64x2Pmax: 2841 if state.unreachable { 2842 break 2843 } 2844 var lane ssa.VecLane 2845 switch vecOp { 2846 case wasm.OpcodeVecF32x4Pmax: 2847 lane = ssa.VecLaneF32x4 2848 case wasm.OpcodeVecF64x2Pmax: 2849 lane = ssa.VecLaneF64x2 2850 } 2851 v2 := state.pop() 2852 v1 := state.pop() 2853 ret := builder.AllocateInstruction().AsVMaxPseudo(v1, v2, lane).Insert(builder).Return() 2854 state.push(ret) 2855 case wasm.OpcodeVecI32x4TruncSatF32x4S, wasm.OpcodeVecI32x4TruncSatF32x4U: 2856 if state.unreachable { 2857 break 2858 } 2859 v1 := state.pop() 2860 ret := builder.AllocateInstruction(). 2861 AsVFcvtToIntSat(v1, ssa.VecLaneF32x4, vecOp == wasm.OpcodeVecI32x4TruncSatF32x4S).Insert(builder).Return() 2862 state.push(ret) 2863 case wasm.OpcodeVecI32x4TruncSatF64x2SZero, wasm.OpcodeVecI32x4TruncSatF64x2UZero: 2864 if state.unreachable { 2865 break 2866 } 2867 v1 := state.pop() 2868 ret := builder.AllocateInstruction(). 2869 AsVFcvtToIntSat(v1, ssa.VecLaneF64x2, vecOp == wasm.OpcodeVecI32x4TruncSatF64x2SZero).Insert(builder).Return() 2870 state.push(ret) 2871 case wasm.OpcodeVecF32x4ConvertI32x4S, wasm.OpcodeVecF32x4ConvertI32x4U: 2872 if state.unreachable { 2873 break 2874 } 2875 v1 := state.pop() 2876 ret := builder.AllocateInstruction(). 2877 AsVFcvtFromInt(v1, ssa.VecLaneF32x4, vecOp == wasm.OpcodeVecF32x4ConvertI32x4S).Insert(builder).Return() 2878 state.push(ret) 2879 case wasm.OpcodeVecF64x2ConvertLowI32x4S, wasm.OpcodeVecF64x2ConvertLowI32x4U: 2880 if state.unreachable { 2881 break 2882 } 2883 v1 := state.pop() 2884 if runtime.GOARCH == "arm64" { 2885 // TODO: this is weird. fix. 2886 v1 = builder.AllocateInstruction(). 2887 AsWiden(v1, ssa.VecLaneI32x4, vecOp == wasm.OpcodeVecF64x2ConvertLowI32x4S, true).Insert(builder).Return() 2888 } 2889 ret := builder.AllocateInstruction(). 2890 AsVFcvtFromInt(v1, ssa.VecLaneF64x2, vecOp == wasm.OpcodeVecF64x2ConvertLowI32x4S). 2891 Insert(builder).Return() 2892 state.push(ret) 2893 case wasm.OpcodeVecI8x16NarrowI16x8S, wasm.OpcodeVecI8x16NarrowI16x8U: 2894 if state.unreachable { 2895 break 2896 } 2897 v2 := state.pop() 2898 v1 := state.pop() 2899 ret := builder.AllocateInstruction(). 2900 AsNarrow(v1, v2, ssa.VecLaneI16x8, vecOp == wasm.OpcodeVecI8x16NarrowI16x8S). 2901 Insert(builder).Return() 2902 state.push(ret) 2903 case wasm.OpcodeVecI16x8NarrowI32x4S, wasm.OpcodeVecI16x8NarrowI32x4U: 2904 if state.unreachable { 2905 break 2906 } 2907 v2 := state.pop() 2908 v1 := state.pop() 2909 ret := builder.AllocateInstruction(). 2910 AsNarrow(v1, v2, ssa.VecLaneI32x4, vecOp == wasm.OpcodeVecI16x8NarrowI32x4S). 2911 Insert(builder).Return() 2912 state.push(ret) 2913 case wasm.OpcodeVecI16x8ExtendLowI8x16S, wasm.OpcodeVecI16x8ExtendLowI8x16U: 2914 if state.unreachable { 2915 break 2916 } 2917 v1 := state.pop() 2918 ret := builder.AllocateInstruction(). 2919 AsWiden(v1, ssa.VecLaneI8x16, vecOp == wasm.OpcodeVecI16x8ExtendLowI8x16S, true). 2920 Insert(builder).Return() 2921 state.push(ret) 2922 case wasm.OpcodeVecI16x8ExtendHighI8x16S, wasm.OpcodeVecI16x8ExtendHighI8x16U: 2923 if state.unreachable { 2924 break 2925 } 2926 v1 := state.pop() 2927 ret := builder.AllocateInstruction(). 2928 AsWiden(v1, ssa.VecLaneI8x16, vecOp == wasm.OpcodeVecI16x8ExtendHighI8x16S, false). 2929 Insert(builder).Return() 2930 state.push(ret) 2931 case wasm.OpcodeVecI32x4ExtendLowI16x8S, wasm.OpcodeVecI32x4ExtendLowI16x8U: 2932 if state.unreachable { 2933 break 2934 } 2935 v1 := state.pop() 2936 ret := builder.AllocateInstruction(). 2937 AsWiden(v1, ssa.VecLaneI16x8, vecOp == wasm.OpcodeVecI32x4ExtendLowI16x8S, true). 2938 Insert(builder).Return() 2939 state.push(ret) 2940 case wasm.OpcodeVecI32x4ExtendHighI16x8S, wasm.OpcodeVecI32x4ExtendHighI16x8U: 2941 if state.unreachable { 2942 break 2943 } 2944 v1 := state.pop() 2945 ret := builder.AllocateInstruction(). 2946 AsWiden(v1, ssa.VecLaneI16x8, vecOp == wasm.OpcodeVecI32x4ExtendHighI16x8S, false). 2947 Insert(builder).Return() 2948 state.push(ret) 2949 case wasm.OpcodeVecI64x2ExtendLowI32x4S, wasm.OpcodeVecI64x2ExtendLowI32x4U: 2950 if state.unreachable { 2951 break 2952 } 2953 v1 := state.pop() 2954 ret := builder.AllocateInstruction(). 2955 AsWiden(v1, ssa.VecLaneI32x4, vecOp == wasm.OpcodeVecI64x2ExtendLowI32x4S, true). 2956 Insert(builder).Return() 2957 state.push(ret) 2958 case wasm.OpcodeVecI64x2ExtendHighI32x4S, wasm.OpcodeVecI64x2ExtendHighI32x4U: 2959 if state.unreachable { 2960 break 2961 } 2962 v1 := state.pop() 2963 ret := builder.AllocateInstruction(). 2964 AsWiden(v1, ssa.VecLaneI32x4, vecOp == wasm.OpcodeVecI64x2ExtendHighI32x4S, false). 2965 Insert(builder).Return() 2966 state.push(ret) 2967 2968 case wasm.OpcodeVecF64x2PromoteLowF32x4Zero: 2969 if state.unreachable { 2970 break 2971 } 2972 v1 := state.pop() 2973 ret := builder.AllocateInstruction(). 2974 AsFvpromoteLow(v1, ssa.VecLaneF32x4). 2975 Insert(builder).Return() 2976 state.push(ret) 2977 case wasm.OpcodeVecF32x4DemoteF64x2Zero: 2978 if state.unreachable { 2979 break 2980 } 2981 v1 := state.pop() 2982 ret := builder.AllocateInstruction(). 2983 AsFvdemote(v1, ssa.VecLaneF64x2). 2984 Insert(builder).Return() 2985 state.push(ret) 2986 case wasm.OpcodeVecI8x16Shl, wasm.OpcodeVecI16x8Shl, wasm.OpcodeVecI32x4Shl, wasm.OpcodeVecI64x2Shl: 2987 if state.unreachable { 2988 break 2989 } 2990 var lane ssa.VecLane 2991 switch vecOp { 2992 case wasm.OpcodeVecI8x16Shl: 2993 lane = ssa.VecLaneI8x16 2994 case wasm.OpcodeVecI16x8Shl: 2995 lane = ssa.VecLaneI16x8 2996 case wasm.OpcodeVecI32x4Shl: 2997 lane = ssa.VecLaneI32x4 2998 case wasm.OpcodeVecI64x2Shl: 2999 lane = ssa.VecLaneI64x2 3000 } 3001 v2 := state.pop() 3002 v1 := state.pop() 3003 ret := builder.AllocateInstruction().AsVIshl(v1, v2, lane).Insert(builder).Return() 3004 state.push(ret) 3005 case wasm.OpcodeVecI8x16ShrS, wasm.OpcodeVecI16x8ShrS, wasm.OpcodeVecI32x4ShrS, wasm.OpcodeVecI64x2ShrS: 3006 if state.unreachable { 3007 break 3008 } 3009 var lane ssa.VecLane 3010 switch vecOp { 3011 case wasm.OpcodeVecI8x16ShrS: 3012 lane = ssa.VecLaneI8x16 3013 case wasm.OpcodeVecI16x8ShrS: 3014 lane = ssa.VecLaneI16x8 3015 case wasm.OpcodeVecI32x4ShrS: 3016 lane = ssa.VecLaneI32x4 3017 case wasm.OpcodeVecI64x2ShrS: 3018 lane = ssa.VecLaneI64x2 3019 } 3020 v2 := state.pop() 3021 v1 := state.pop() 3022 ret := builder.AllocateInstruction().AsVSshr(v1, v2, lane).Insert(builder).Return() 3023 state.push(ret) 3024 case wasm.OpcodeVecI8x16ShrU, wasm.OpcodeVecI16x8ShrU, wasm.OpcodeVecI32x4ShrU, wasm.OpcodeVecI64x2ShrU: 3025 if state.unreachable { 3026 break 3027 } 3028 var lane ssa.VecLane 3029 switch vecOp { 3030 case wasm.OpcodeVecI8x16ShrU: 3031 lane = ssa.VecLaneI8x16 3032 case wasm.OpcodeVecI16x8ShrU: 3033 lane = ssa.VecLaneI16x8 3034 case wasm.OpcodeVecI32x4ShrU: 3035 lane = ssa.VecLaneI32x4 3036 case wasm.OpcodeVecI64x2ShrU: 3037 lane = ssa.VecLaneI64x2 3038 } 3039 v2 := state.pop() 3040 v1 := state.pop() 3041 ret := builder.AllocateInstruction().AsVUshr(v1, v2, lane).Insert(builder).Return() 3042 state.push(ret) 3043 case wasm.OpcodeVecI8x16ExtractLaneS, wasm.OpcodeVecI16x8ExtractLaneS: 3044 state.pc++ 3045 if state.unreachable { 3046 break 3047 } 3048 var lane ssa.VecLane 3049 switch vecOp { 3050 case wasm.OpcodeVecI8x16ExtractLaneS: 3051 lane = ssa.VecLaneI8x16 3052 case wasm.OpcodeVecI16x8ExtractLaneS: 3053 lane = ssa.VecLaneI16x8 3054 } 3055 v1 := state.pop() 3056 index := c.wasmFunctionBody[state.pc] 3057 ext := builder.AllocateInstruction().AsExtractlane(v1, index, lane, true).Insert(builder).Return() 3058 state.push(ext) 3059 case wasm.OpcodeVecI8x16ExtractLaneU, wasm.OpcodeVecI16x8ExtractLaneU, 3060 wasm.OpcodeVecI32x4ExtractLane, wasm.OpcodeVecI64x2ExtractLane, 3061 wasm.OpcodeVecF32x4ExtractLane, wasm.OpcodeVecF64x2ExtractLane: 3062 state.pc++ // Skip the immediate value. 3063 if state.unreachable { 3064 break 3065 } 3066 var lane ssa.VecLane 3067 switch vecOp { 3068 case wasm.OpcodeVecI8x16ExtractLaneU: 3069 lane = ssa.VecLaneI8x16 3070 case wasm.OpcodeVecI16x8ExtractLaneU: 3071 lane = ssa.VecLaneI16x8 3072 case wasm.OpcodeVecI32x4ExtractLane: 3073 lane = ssa.VecLaneI32x4 3074 case wasm.OpcodeVecI64x2ExtractLane: 3075 lane = ssa.VecLaneI64x2 3076 case wasm.OpcodeVecF32x4ExtractLane: 3077 lane = ssa.VecLaneF32x4 3078 case wasm.OpcodeVecF64x2ExtractLane: 3079 lane = ssa.VecLaneF64x2 3080 } 3081 v1 := state.pop() 3082 index := c.wasmFunctionBody[state.pc] 3083 ext := builder.AllocateInstruction().AsExtractlane(v1, index, lane, false).Insert(builder).Return() 3084 state.push(ext) 3085 case wasm.OpcodeVecI8x16ReplaceLane, wasm.OpcodeVecI16x8ReplaceLane, 3086 wasm.OpcodeVecI32x4ReplaceLane, wasm.OpcodeVecI64x2ReplaceLane, 3087 wasm.OpcodeVecF32x4ReplaceLane, wasm.OpcodeVecF64x2ReplaceLane: 3088 state.pc++ 3089 if state.unreachable { 3090 break 3091 } 3092 var lane ssa.VecLane 3093 switch vecOp { 3094 case wasm.OpcodeVecI8x16ReplaceLane: 3095 lane = ssa.VecLaneI8x16 3096 case wasm.OpcodeVecI16x8ReplaceLane: 3097 lane = ssa.VecLaneI16x8 3098 case wasm.OpcodeVecI32x4ReplaceLane: 3099 lane = ssa.VecLaneI32x4 3100 case wasm.OpcodeVecI64x2ReplaceLane: 3101 lane = ssa.VecLaneI64x2 3102 case wasm.OpcodeVecF32x4ReplaceLane: 3103 lane = ssa.VecLaneF32x4 3104 case wasm.OpcodeVecF64x2ReplaceLane: 3105 lane = ssa.VecLaneF64x2 3106 } 3107 v2 := state.pop() 3108 v1 := state.pop() 3109 index := c.wasmFunctionBody[state.pc] 3110 ret := builder.AllocateInstruction().AsInsertlane(v1, v2, index, lane).Insert(builder).Return() 3111 state.push(ret) 3112 case wasm.OpcodeVecV128i8x16Shuffle: 3113 state.pc++ 3114 laneIndexes := c.wasmFunctionBody[state.pc : state.pc+16] 3115 state.pc += 15 3116 if state.unreachable { 3117 break 3118 } 3119 v2 := state.pop() 3120 v1 := state.pop() 3121 ret := builder.AllocateInstruction().AsShuffle(v1, v2, laneIndexes).Insert(builder).Return() 3122 state.push(ret) 3123 3124 case wasm.OpcodeVecI8x16Swizzle: 3125 if state.unreachable { 3126 break 3127 } 3128 v2 := state.pop() 3129 v1 := state.pop() 3130 ret := builder.AllocateInstruction().AsSwizzle(v1, v2, ssa.VecLaneI8x16).Insert(builder).Return() 3131 state.push(ret) 3132 3133 case wasm.OpcodeVecI8x16Splat, 3134 wasm.OpcodeVecI16x8Splat, 3135 wasm.OpcodeVecI32x4Splat, 3136 wasm.OpcodeVecI64x2Splat, 3137 wasm.OpcodeVecF32x4Splat, 3138 wasm.OpcodeVecF64x2Splat: 3139 if state.unreachable { 3140 break 3141 } 3142 var lane ssa.VecLane 3143 switch vecOp { 3144 case wasm.OpcodeVecI8x16Splat: 3145 lane = ssa.VecLaneI8x16 3146 case wasm.OpcodeVecI16x8Splat: 3147 lane = ssa.VecLaneI16x8 3148 case wasm.OpcodeVecI32x4Splat: 3149 lane = ssa.VecLaneI32x4 3150 case wasm.OpcodeVecI64x2Splat: 3151 lane = ssa.VecLaneI64x2 3152 case wasm.OpcodeVecF32x4Splat: 3153 lane = ssa.VecLaneF32x4 3154 case wasm.OpcodeVecF64x2Splat: 3155 lane = ssa.VecLaneF64x2 3156 } 3157 v1 := state.pop() 3158 ret := builder.AllocateInstruction().AsSplat(v1, lane).Insert(builder).Return() 3159 state.push(ret) 3160 3161 default: 3162 panic("TODO: unsupported vector instruction: " + wasm.VectorInstructionName(vecOp)) 3163 } 3164 case wasm.OpcodeAtomicPrefix: 3165 state.pc++ 3166 atomicOp := c.wasmFunctionBody[state.pc] 3167 switch atomicOp { 3168 case wasm.OpcodeAtomicMemoryWait32, wasm.OpcodeAtomicMemoryWait64: 3169 _, offset := c.readMemArg() 3170 if state.unreachable { 3171 break 3172 } 3173 3174 c.storeCallerModuleContext() 3175 3176 var opSize uint64 3177 var trampoline wazevoapi.Offset 3178 var sig *ssa.Signature 3179 switch atomicOp { 3180 case wasm.OpcodeAtomicMemoryWait32: 3181 opSize = 4 3182 trampoline = wazevoapi.ExecutionContextOffsetMemoryWait32TrampolineAddress 3183 sig = &c.memoryWait32Sig 3184 case wasm.OpcodeAtomicMemoryWait64: 3185 opSize = 8 3186 trampoline = wazevoapi.ExecutionContextOffsetMemoryWait64TrampolineAddress 3187 sig = &c.memoryWait64Sig 3188 } 3189 3190 timeout := state.pop() 3191 exp := state.pop() 3192 baseAddr := state.pop() 3193 addr := c.atomicMemOpSetup(baseAddr, uint64(offset), opSize) 3194 3195 memoryWaitPtr := builder.AllocateInstruction(). 3196 AsLoad(c.execCtxPtrValue, 3197 trampoline.U32(), 3198 ssa.TypeI64, 3199 ).Insert(builder).Return() 3200 3201 args := c.allocateVarLengthValues(3, c.execCtxPtrValue, timeout, exp, addr) 3202 memoryWaitRet := builder.AllocateInstruction(). 3203 AsCallIndirect(memoryWaitPtr, sig, args). 3204 Insert(builder).Return() 3205 state.push(memoryWaitRet) 3206 case wasm.OpcodeAtomicMemoryNotify: 3207 _, offset := c.readMemArg() 3208 if state.unreachable { 3209 break 3210 } 3211 3212 c.storeCallerModuleContext() 3213 count := state.pop() 3214 baseAddr := state.pop() 3215 addr := c.atomicMemOpSetup(baseAddr, uint64(offset), 4) 3216 3217 memoryNotifyPtr := builder.AllocateInstruction(). 3218 AsLoad(c.execCtxPtrValue, 3219 wazevoapi.ExecutionContextOffsetMemoryNotifyTrampolineAddress.U32(), 3220 ssa.TypeI64, 3221 ).Insert(builder).Return() 3222 args := c.allocateVarLengthValues(2, c.execCtxPtrValue, count, addr) 3223 memoryNotifyRet := builder.AllocateInstruction(). 3224 AsCallIndirect(memoryNotifyPtr, &c.memoryNotifySig, args). 3225 Insert(builder).Return() 3226 state.push(memoryNotifyRet) 3227 case wasm.OpcodeAtomicI32Load, wasm.OpcodeAtomicI64Load, wasm.OpcodeAtomicI32Load8U, wasm.OpcodeAtomicI32Load16U, wasm.OpcodeAtomicI64Load8U, wasm.OpcodeAtomicI64Load16U, wasm.OpcodeAtomicI64Load32U: 3228 _, offset := c.readMemArg() 3229 if state.unreachable { 3230 break 3231 } 3232 3233 baseAddr := state.pop() 3234 3235 var size uint64 3236 switch atomicOp { 3237 case wasm.OpcodeAtomicI64Load: 3238 size = 8 3239 case wasm.OpcodeAtomicI32Load, wasm.OpcodeAtomicI64Load32U: 3240 size = 4 3241 case wasm.OpcodeAtomicI32Load16U, wasm.OpcodeAtomicI64Load16U: 3242 size = 2 3243 case wasm.OpcodeAtomicI32Load8U, wasm.OpcodeAtomicI64Load8U: 3244 size = 1 3245 } 3246 3247 var typ ssa.Type 3248 switch atomicOp { 3249 case wasm.OpcodeAtomicI64Load, wasm.OpcodeAtomicI64Load32U, wasm.OpcodeAtomicI64Load16U, wasm.OpcodeAtomicI64Load8U: 3250 typ = ssa.TypeI64 3251 case wasm.OpcodeAtomicI32Load, wasm.OpcodeAtomicI32Load16U, wasm.OpcodeAtomicI32Load8U: 3252 typ = ssa.TypeI32 3253 } 3254 3255 addr := c.atomicMemOpSetup(baseAddr, uint64(offset), size) 3256 res := builder.AllocateInstruction().AsAtomicLoad(addr, size, typ).Insert(builder).Return() 3257 state.push(res) 3258 case wasm.OpcodeAtomicI32Store, wasm.OpcodeAtomicI64Store, wasm.OpcodeAtomicI32Store8, wasm.OpcodeAtomicI32Store16, wasm.OpcodeAtomicI64Store8, wasm.OpcodeAtomicI64Store16, wasm.OpcodeAtomicI64Store32: 3259 _, offset := c.readMemArg() 3260 if state.unreachable { 3261 break 3262 } 3263 3264 val := state.pop() 3265 baseAddr := state.pop() 3266 3267 var size uint64 3268 switch atomicOp { 3269 case wasm.OpcodeAtomicI64Store: 3270 size = 8 3271 case wasm.OpcodeAtomicI32Store, wasm.OpcodeAtomicI64Store32: 3272 size = 4 3273 case wasm.OpcodeAtomicI32Store16, wasm.OpcodeAtomicI64Store16: 3274 size = 2 3275 case wasm.OpcodeAtomicI32Store8, wasm.OpcodeAtomicI64Store8: 3276 size = 1 3277 } 3278 3279 addr := c.atomicMemOpSetup(baseAddr, uint64(offset), size) 3280 builder.AllocateInstruction().AsAtomicStore(addr, val, size).Insert(builder) 3281 case wasm.OpcodeAtomicI32RmwAdd, wasm.OpcodeAtomicI64RmwAdd, wasm.OpcodeAtomicI32Rmw8AddU, wasm.OpcodeAtomicI32Rmw16AddU, wasm.OpcodeAtomicI64Rmw8AddU, wasm.OpcodeAtomicI64Rmw16AddU, wasm.OpcodeAtomicI64Rmw32AddU, 3282 wasm.OpcodeAtomicI32RmwSub, wasm.OpcodeAtomicI64RmwSub, wasm.OpcodeAtomicI32Rmw8SubU, wasm.OpcodeAtomicI32Rmw16SubU, wasm.OpcodeAtomicI64Rmw8SubU, wasm.OpcodeAtomicI64Rmw16SubU, wasm.OpcodeAtomicI64Rmw32SubU, 3283 wasm.OpcodeAtomicI32RmwAnd, wasm.OpcodeAtomicI64RmwAnd, wasm.OpcodeAtomicI32Rmw8AndU, wasm.OpcodeAtomicI32Rmw16AndU, wasm.OpcodeAtomicI64Rmw8AndU, wasm.OpcodeAtomicI64Rmw16AndU, wasm.OpcodeAtomicI64Rmw32AndU, 3284 wasm.OpcodeAtomicI32RmwOr, wasm.OpcodeAtomicI64RmwOr, wasm.OpcodeAtomicI32Rmw8OrU, wasm.OpcodeAtomicI32Rmw16OrU, wasm.OpcodeAtomicI64Rmw8OrU, wasm.OpcodeAtomicI64Rmw16OrU, wasm.OpcodeAtomicI64Rmw32OrU, 3285 wasm.OpcodeAtomicI32RmwXor, wasm.OpcodeAtomicI64RmwXor, wasm.OpcodeAtomicI32Rmw8XorU, wasm.OpcodeAtomicI32Rmw16XorU, wasm.OpcodeAtomicI64Rmw8XorU, wasm.OpcodeAtomicI64Rmw16XorU, wasm.OpcodeAtomicI64Rmw32XorU, 3286 wasm.OpcodeAtomicI32RmwXchg, wasm.OpcodeAtomicI64RmwXchg, wasm.OpcodeAtomicI32Rmw8XchgU, wasm.OpcodeAtomicI32Rmw16XchgU, wasm.OpcodeAtomicI64Rmw8XchgU, wasm.OpcodeAtomicI64Rmw16XchgU, wasm.OpcodeAtomicI64Rmw32XchgU: 3287 _, offset := c.readMemArg() 3288 if state.unreachable { 3289 break 3290 } 3291 3292 val := state.pop() 3293 baseAddr := state.pop() 3294 3295 var rmwOp ssa.AtomicRmwOp 3296 var size uint64 3297 switch atomicOp { 3298 case wasm.OpcodeAtomicI32RmwAdd, wasm.OpcodeAtomicI64RmwAdd, wasm.OpcodeAtomicI32Rmw8AddU, wasm.OpcodeAtomicI32Rmw16AddU, wasm.OpcodeAtomicI64Rmw8AddU, wasm.OpcodeAtomicI64Rmw16AddU, wasm.OpcodeAtomicI64Rmw32AddU: 3299 rmwOp = ssa.AtomicRmwOpAdd 3300 switch atomicOp { 3301 case wasm.OpcodeAtomicI64RmwAdd: 3302 size = 8 3303 case wasm.OpcodeAtomicI32RmwAdd, wasm.OpcodeAtomicI64Rmw32AddU: 3304 size = 4 3305 case wasm.OpcodeAtomicI32Rmw16AddU, wasm.OpcodeAtomicI64Rmw16AddU: 3306 size = 2 3307 case wasm.OpcodeAtomicI32Rmw8AddU, wasm.OpcodeAtomicI64Rmw8AddU: 3308 size = 1 3309 } 3310 case wasm.OpcodeAtomicI32RmwSub, wasm.OpcodeAtomicI64RmwSub, wasm.OpcodeAtomicI32Rmw8SubU, wasm.OpcodeAtomicI32Rmw16SubU, wasm.OpcodeAtomicI64Rmw8SubU, wasm.OpcodeAtomicI64Rmw16SubU, wasm.OpcodeAtomicI64Rmw32SubU: 3311 rmwOp = ssa.AtomicRmwOpSub 3312 switch atomicOp { 3313 case wasm.OpcodeAtomicI64RmwSub: 3314 size = 8 3315 case wasm.OpcodeAtomicI32RmwSub, wasm.OpcodeAtomicI64Rmw32SubU: 3316 size = 4 3317 case wasm.OpcodeAtomicI32Rmw16SubU, wasm.OpcodeAtomicI64Rmw16SubU: 3318 size = 2 3319 case wasm.OpcodeAtomicI32Rmw8SubU, wasm.OpcodeAtomicI64Rmw8SubU: 3320 size = 1 3321 } 3322 case wasm.OpcodeAtomicI32RmwAnd, wasm.OpcodeAtomicI64RmwAnd, wasm.OpcodeAtomicI32Rmw8AndU, wasm.OpcodeAtomicI32Rmw16AndU, wasm.OpcodeAtomicI64Rmw8AndU, wasm.OpcodeAtomicI64Rmw16AndU, wasm.OpcodeAtomicI64Rmw32AndU: 3323 rmwOp = ssa.AtomicRmwOpAnd 3324 switch atomicOp { 3325 case wasm.OpcodeAtomicI64RmwAnd: 3326 size = 8 3327 case wasm.OpcodeAtomicI32RmwAnd, wasm.OpcodeAtomicI64Rmw32AndU: 3328 size = 4 3329 case wasm.OpcodeAtomicI32Rmw16AndU, wasm.OpcodeAtomicI64Rmw16AndU: 3330 size = 2 3331 case wasm.OpcodeAtomicI32Rmw8AndU, wasm.OpcodeAtomicI64Rmw8AndU: 3332 size = 1 3333 } 3334 case wasm.OpcodeAtomicI32RmwOr, wasm.OpcodeAtomicI64RmwOr, wasm.OpcodeAtomicI32Rmw8OrU, wasm.OpcodeAtomicI32Rmw16OrU, wasm.OpcodeAtomicI64Rmw8OrU, wasm.OpcodeAtomicI64Rmw16OrU, wasm.OpcodeAtomicI64Rmw32OrU: 3335 rmwOp = ssa.AtomicRmwOpOr 3336 switch atomicOp { 3337 case wasm.OpcodeAtomicI64RmwOr: 3338 size = 8 3339 case wasm.OpcodeAtomicI32RmwOr, wasm.OpcodeAtomicI64Rmw32OrU: 3340 size = 4 3341 case wasm.OpcodeAtomicI32Rmw16OrU, wasm.OpcodeAtomicI64Rmw16OrU: 3342 size = 2 3343 case wasm.OpcodeAtomicI32Rmw8OrU, wasm.OpcodeAtomicI64Rmw8OrU: 3344 size = 1 3345 } 3346 case wasm.OpcodeAtomicI32RmwXor, wasm.OpcodeAtomicI64RmwXor, wasm.OpcodeAtomicI32Rmw8XorU, wasm.OpcodeAtomicI32Rmw16XorU, wasm.OpcodeAtomicI64Rmw8XorU, wasm.OpcodeAtomicI64Rmw16XorU, wasm.OpcodeAtomicI64Rmw32XorU: 3347 rmwOp = ssa.AtomicRmwOpXor 3348 switch atomicOp { 3349 case wasm.OpcodeAtomicI64RmwXor: 3350 size = 8 3351 case wasm.OpcodeAtomicI32RmwXor, wasm.OpcodeAtomicI64Rmw32XorU: 3352 size = 4 3353 case wasm.OpcodeAtomicI32Rmw16XorU, wasm.OpcodeAtomicI64Rmw16XorU: 3354 size = 2 3355 case wasm.OpcodeAtomicI32Rmw8XorU, wasm.OpcodeAtomicI64Rmw8XorU: 3356 size = 1 3357 } 3358 case wasm.OpcodeAtomicI32RmwXchg, wasm.OpcodeAtomicI64RmwXchg, wasm.OpcodeAtomicI32Rmw8XchgU, wasm.OpcodeAtomicI32Rmw16XchgU, wasm.OpcodeAtomicI64Rmw8XchgU, wasm.OpcodeAtomicI64Rmw16XchgU, wasm.OpcodeAtomicI64Rmw32XchgU: 3359 rmwOp = ssa.AtomicRmwOpXchg 3360 switch atomicOp { 3361 case wasm.OpcodeAtomicI64RmwXchg: 3362 size = 8 3363 case wasm.OpcodeAtomicI32RmwXchg, wasm.OpcodeAtomicI64Rmw32XchgU: 3364 size = 4 3365 case wasm.OpcodeAtomicI32Rmw16XchgU, wasm.OpcodeAtomicI64Rmw16XchgU: 3366 size = 2 3367 case wasm.OpcodeAtomicI32Rmw8XchgU, wasm.OpcodeAtomicI64Rmw8XchgU: 3368 size = 1 3369 } 3370 } 3371 3372 addr := c.atomicMemOpSetup(baseAddr, uint64(offset), size) 3373 res := builder.AllocateInstruction().AsAtomicRmw(rmwOp, addr, val, size).Insert(builder).Return() 3374 state.push(res) 3375 case wasm.OpcodeAtomicI32RmwCmpxchg, wasm.OpcodeAtomicI64RmwCmpxchg, wasm.OpcodeAtomicI32Rmw8CmpxchgU, wasm.OpcodeAtomicI32Rmw16CmpxchgU, wasm.OpcodeAtomicI64Rmw8CmpxchgU, wasm.OpcodeAtomicI64Rmw16CmpxchgU, wasm.OpcodeAtomicI64Rmw32CmpxchgU: 3376 _, offset := c.readMemArg() 3377 if state.unreachable { 3378 break 3379 } 3380 3381 repl := state.pop() 3382 exp := state.pop() 3383 baseAddr := state.pop() 3384 3385 var size uint64 3386 switch atomicOp { 3387 case wasm.OpcodeAtomicI64RmwCmpxchg: 3388 size = 8 3389 case wasm.OpcodeAtomicI32RmwCmpxchg, wasm.OpcodeAtomicI64Rmw32CmpxchgU: 3390 size = 4 3391 case wasm.OpcodeAtomicI32Rmw16CmpxchgU, wasm.OpcodeAtomicI64Rmw16CmpxchgU: 3392 size = 2 3393 case wasm.OpcodeAtomicI32Rmw8CmpxchgU, wasm.OpcodeAtomicI64Rmw8CmpxchgU: 3394 size = 1 3395 } 3396 addr := c.atomicMemOpSetup(baseAddr, uint64(offset), size) 3397 res := builder.AllocateInstruction().AsAtomicCas(addr, exp, repl, size).Insert(builder).Return() 3398 state.push(res) 3399 case wasm.OpcodeAtomicFence: 3400 order := c.readByte() 3401 if state.unreachable { 3402 break 3403 } 3404 if c.needMemory { 3405 builder.AllocateInstruction().AsFence(order).Insert(builder) 3406 } 3407 default: 3408 panic("TODO: unsupported atomic instruction: " + wasm.AtomicInstructionName(atomicOp)) 3409 } 3410 case wasm.OpcodeRefFunc: 3411 funcIndex := c.readI32u() 3412 if state.unreachable { 3413 break 3414 } 3415 3416 c.storeCallerModuleContext() 3417 3418 funcIndexVal := builder.AllocateInstruction().AsIconst32(funcIndex).Insert(builder).Return() 3419 3420 refFuncPtr := builder.AllocateInstruction(). 3421 AsLoad(c.execCtxPtrValue, 3422 wazevoapi.ExecutionContextOffsetRefFuncTrampolineAddress.U32(), 3423 ssa.TypeI64, 3424 ).Insert(builder).Return() 3425 3426 args := c.allocateVarLengthValues(2, c.execCtxPtrValue, funcIndexVal) 3427 refFuncRet := builder. 3428 AllocateInstruction(). 3429 AsCallIndirect(refFuncPtr, &c.refFuncSig, args). 3430 Insert(builder).Return() 3431 state.push(refFuncRet) 3432 3433 case wasm.OpcodeRefNull: 3434 c.loweringState.pc++ // skips the reference type as we treat both of them as i64(0). 3435 if state.unreachable { 3436 break 3437 } 3438 ret := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return() 3439 state.push(ret) 3440 case wasm.OpcodeRefIsNull: 3441 if state.unreachable { 3442 break 3443 } 3444 r := state.pop() 3445 zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder) 3446 icmp := builder.AllocateInstruction(). 3447 AsIcmp(r, zero.Return(), ssa.IntegerCmpCondEqual). 3448 Insert(builder). 3449 Return() 3450 state.push(icmp) 3451 case wasm.OpcodeTableSet: 3452 tableIndex := c.readI32u() 3453 if state.unreachable { 3454 break 3455 } 3456 r := state.pop() 3457 targetOffsetInTable := state.pop() 3458 3459 elementAddr := c.lowerAccessTableWithBoundsCheck(tableIndex, targetOffsetInTable) 3460 builder.AllocateInstruction().AsStore(ssa.OpcodeStore, r, elementAddr, 0).Insert(builder) 3461 3462 case wasm.OpcodeTableGet: 3463 tableIndex := c.readI32u() 3464 if state.unreachable { 3465 break 3466 } 3467 targetOffsetInTable := state.pop() 3468 elementAddr := c.lowerAccessTableWithBoundsCheck(tableIndex, targetOffsetInTable) 3469 loaded := builder.AllocateInstruction().AsLoad(elementAddr, 0, ssa.TypeI64).Insert(builder).Return() 3470 state.push(loaded) 3471 default: 3472 panic("TODO: unsupported in wazevo yet: " + wasm.InstructionName(op)) 3473 } 3474 3475 if wazevoapi.FrontEndLoggingEnabled { 3476 fmt.Println("--------- Translated " + wasm.InstructionName(op) + " --------") 3477 fmt.Println("state: " + c.loweringState.String()) 3478 fmt.Println(c.formatBuilder()) 3479 fmt.Println("--------------------------") 3480 } 3481 c.loweringState.pc++ 3482 } 3483 3484 func (c *Compiler) lowerExtMul(v1, v2 ssa.Value, from, to ssa.VecLane, signed, low bool) ssa.Value { 3485 // TODO: The sequence `Widen; Widen; VIMul` can be substituted for a single instruction on some ISAs. 3486 builder := c.ssaBuilder 3487 3488 v1lo := builder.AllocateInstruction().AsWiden(v1, from, signed, low).Insert(builder).Return() 3489 v2lo := builder.AllocateInstruction().AsWiden(v2, from, signed, low).Insert(builder).Return() 3490 3491 return builder.AllocateInstruction().AsVImul(v1lo, v2lo, to).Insert(builder).Return() 3492 } 3493 3494 const ( 3495 tableInstanceBaseAddressOffset = 0 3496 tableInstanceLenOffset = tableInstanceBaseAddressOffset + 8 3497 ) 3498 3499 func (c *Compiler) lowerAccessTableWithBoundsCheck(tableIndex uint32, elementOffsetInTable ssa.Value) (elementAddress ssa.Value) { 3500 builder := c.ssaBuilder 3501 3502 // Load the table. 3503 loadTableInstancePtr := builder.AllocateInstruction() 3504 loadTableInstancePtr.AsLoad(c.moduleCtxPtrValue, c.offset.TableOffset(int(tableIndex)).U32(), ssa.TypeI64) 3505 builder.InsertInstruction(loadTableInstancePtr) 3506 tableInstancePtr := loadTableInstancePtr.Return() 3507 3508 // Load the table's length. 3509 loadTableLen := builder.AllocateInstruction() 3510 loadTableLen.AsLoad(tableInstancePtr, tableInstanceLenOffset, ssa.TypeI32) 3511 builder.InsertInstruction(loadTableLen) 3512 tableLen := loadTableLen.Return() 3513 3514 // Compare the length and the target, and trap if out of bounds. 3515 checkOOB := builder.AllocateInstruction() 3516 checkOOB.AsIcmp(elementOffsetInTable, tableLen, ssa.IntegerCmpCondUnsignedGreaterThanOrEqual) 3517 builder.InsertInstruction(checkOOB) 3518 exitIfOOB := builder.AllocateInstruction() 3519 exitIfOOB.AsExitIfTrueWithCode(c.execCtxPtrValue, checkOOB.Return(), wazevoapi.ExitCodeTableOutOfBounds) 3520 builder.InsertInstruction(exitIfOOB) 3521 3522 // Get the base address of wasm.TableInstance.References. 3523 loadTableBaseAddress := builder.AllocateInstruction() 3524 loadTableBaseAddress.AsLoad(tableInstancePtr, tableInstanceBaseAddressOffset, ssa.TypeI64) 3525 builder.InsertInstruction(loadTableBaseAddress) 3526 tableBase := loadTableBaseAddress.Return() 3527 3528 // Calculate the address of the target function. First we need to multiply targetOffsetInTable by 8 (pointer size). 3529 multiplyBy8 := builder.AllocateInstruction() 3530 three := builder.AllocateInstruction() 3531 three.AsIconst64(3) 3532 builder.InsertInstruction(three) 3533 multiplyBy8.AsIshl(elementOffsetInTable, three.Return()) 3534 builder.InsertInstruction(multiplyBy8) 3535 targetOffsetInTableMultipliedBy8 := multiplyBy8.Return() 3536 3537 // Then add the multiplied value to the base which results in the address of the target function (*wazevo.functionInstance) 3538 calcElementAddressInTable := builder.AllocateInstruction() 3539 calcElementAddressInTable.AsIadd(tableBase, targetOffsetInTableMultipliedBy8) 3540 builder.InsertInstruction(calcElementAddressInTable) 3541 return calcElementAddressInTable.Return() 3542 } 3543 3544 func (c *Compiler) lowerCallIndirect(typeIndex, tableIndex uint32) { 3545 builder := c.ssaBuilder 3546 state := c.state() 3547 3548 elementOffsetInTable := state.pop() 3549 functionInstancePtrAddress := c.lowerAccessTableWithBoundsCheck(tableIndex, elementOffsetInTable) 3550 loadFunctionInstancePtr := builder.AllocateInstruction() 3551 loadFunctionInstancePtr.AsLoad(functionInstancePtrAddress, 0, ssa.TypeI64) 3552 builder.InsertInstruction(loadFunctionInstancePtr) 3553 functionInstancePtr := loadFunctionInstancePtr.Return() 3554 3555 // Check if it is not the null pointer. 3556 zero := builder.AllocateInstruction() 3557 zero.AsIconst64(0) 3558 builder.InsertInstruction(zero) 3559 checkNull := builder.AllocateInstruction() 3560 checkNull.AsIcmp(functionInstancePtr, zero.Return(), ssa.IntegerCmpCondEqual) 3561 builder.InsertInstruction(checkNull) 3562 exitIfNull := builder.AllocateInstruction() 3563 exitIfNull.AsExitIfTrueWithCode(c.execCtxPtrValue, checkNull.Return(), wazevoapi.ExitCodeIndirectCallNullPointer) 3564 builder.InsertInstruction(exitIfNull) 3565 3566 // We need to do the type check. First, load the target function instance's typeID. 3567 loadTypeID := builder.AllocateInstruction() 3568 loadTypeID.AsLoad(functionInstancePtr, wazevoapi.FunctionInstanceTypeIDOffset, ssa.TypeI32) 3569 builder.InsertInstruction(loadTypeID) 3570 actualTypeID := loadTypeID.Return() 3571 3572 // Next, we load the expected TypeID: 3573 loadTypeIDsBegin := builder.AllocateInstruction() 3574 loadTypeIDsBegin.AsLoad(c.moduleCtxPtrValue, c.offset.TypeIDs1stElement.U32(), ssa.TypeI64) 3575 builder.InsertInstruction(loadTypeIDsBegin) 3576 typeIDsBegin := loadTypeIDsBegin.Return() 3577 3578 loadExpectedTypeID := builder.AllocateInstruction() 3579 loadExpectedTypeID.AsLoad(typeIDsBegin, uint32(typeIndex)*4 /* size of wasm.FunctionTypeID */, ssa.TypeI32) 3580 builder.InsertInstruction(loadExpectedTypeID) 3581 expectedTypeID := loadExpectedTypeID.Return() 3582 3583 // Check if the type ID matches. 3584 checkTypeID := builder.AllocateInstruction() 3585 checkTypeID.AsIcmp(actualTypeID, expectedTypeID, ssa.IntegerCmpCondNotEqual) 3586 builder.InsertInstruction(checkTypeID) 3587 exitIfNotMatch := builder.AllocateInstruction() 3588 exitIfNotMatch.AsExitIfTrueWithCode(c.execCtxPtrValue, checkTypeID.Return(), wazevoapi.ExitCodeIndirectCallTypeMismatch) 3589 builder.InsertInstruction(exitIfNotMatch) 3590 3591 // Now ready to call the function. Load the executable and moduleContextOpaquePtr from the function instance. 3592 loadExecutablePtr := builder.AllocateInstruction() 3593 loadExecutablePtr.AsLoad(functionInstancePtr, wazevoapi.FunctionInstanceExecutableOffset, ssa.TypeI64) 3594 builder.InsertInstruction(loadExecutablePtr) 3595 executablePtr := loadExecutablePtr.Return() 3596 loadModuleContextOpaquePtr := builder.AllocateInstruction() 3597 loadModuleContextOpaquePtr.AsLoad(functionInstancePtr, wazevoapi.FunctionInstanceModuleContextOpaquePtrOffset, ssa.TypeI64) 3598 builder.InsertInstruction(loadModuleContextOpaquePtr) 3599 moduleContextOpaquePtr := loadModuleContextOpaquePtr.Return() 3600 3601 typ := &c.m.TypeSection[typeIndex] 3602 tail := len(state.values) - len(typ.Params) 3603 vs := state.values[tail:] 3604 state.values = state.values[:tail] 3605 args := c.allocateVarLengthValues(2+len(vs), c.execCtxPtrValue, moduleContextOpaquePtr) 3606 args = args.Append(builder.VarLengthPool(), vs...) 3607 3608 // Before transfer the control to the callee, we have to store the current module's moduleContextPtr 3609 // into execContext.callerModuleContextPtr in case when the callee is a Go function. 3610 c.storeCallerModuleContext() 3611 3612 call := builder.AllocateInstruction() 3613 call.AsCallIndirect(executablePtr, c.signatures[typ], args) 3614 builder.InsertInstruction(call) 3615 3616 first, rest := call.Returns() 3617 if first.Valid() { 3618 state.push(first) 3619 } 3620 for _, v := range rest { 3621 state.push(v) 3622 } 3623 3624 c.reloadAfterCall() 3625 } 3626 3627 // memOpSetup inserts the bounds check and calculates the address of the memory operation (loads/stores). 3628 func (c *Compiler) memOpSetup(baseAddr ssa.Value, constOffset, operationSizeInBytes uint64) (address ssa.Value) { 3629 address = ssa.ValueInvalid 3630 builder := c.ssaBuilder 3631 3632 baseAddrID := baseAddr.ID() 3633 ceil := constOffset + operationSizeInBytes 3634 if known := c.getKnownSafeBound(baseAddrID); known.valid() { 3635 // We reuse the calculated absolute address even if the bound is not known to be safe. 3636 address = known.absoluteAddr 3637 if ceil <= known.bound { 3638 if !address.Valid() { 3639 // This means that, the bound is known to be safe, but the memory base might have changed. 3640 // So, we re-calculate the address. 3641 memBase := c.getMemoryBaseValue(false) 3642 extBaseAddr := builder.AllocateInstruction(). 3643 AsUExtend(baseAddr, 32, 64). 3644 Insert(builder). 3645 Return() 3646 address = builder.AllocateInstruction(). 3647 AsIadd(memBase, extBaseAddr).Insert(builder).Return() 3648 known.absoluteAddr = address // Update the absolute address for the subsequent memory access. 3649 } 3650 return 3651 } 3652 } 3653 3654 ceilConst := builder.AllocateInstruction() 3655 ceilConst.AsIconst64(ceil) 3656 builder.InsertInstruction(ceilConst) 3657 3658 // We calculate the offset in 64-bit space. 3659 extBaseAddr := builder.AllocateInstruction(). 3660 AsUExtend(baseAddr, 32, 64). 3661 Insert(builder). 3662 Return() 3663 3664 // Note: memLen is already zero extended to 64-bit space at the load time. 3665 memLen := c.getMemoryLenValue(false) 3666 3667 // baseAddrPlusCeil = baseAddr + ceil 3668 baseAddrPlusCeil := builder.AllocateInstruction() 3669 baseAddrPlusCeil.AsIadd(extBaseAddr, ceilConst.Return()) 3670 builder.InsertInstruction(baseAddrPlusCeil) 3671 3672 // Check for out of bounds memory access: `memLen >= baseAddrPlusCeil`. 3673 cmp := builder.AllocateInstruction() 3674 cmp.AsIcmp(memLen, baseAddrPlusCeil.Return(), ssa.IntegerCmpCondUnsignedLessThan) 3675 builder.InsertInstruction(cmp) 3676 exitIfNZ := builder.AllocateInstruction() 3677 exitIfNZ.AsExitIfTrueWithCode(c.execCtxPtrValue, cmp.Return(), wazevoapi.ExitCodeMemoryOutOfBounds) 3678 builder.InsertInstruction(exitIfNZ) 3679 3680 // Load the value from memBase + extBaseAddr. 3681 if address == ssa.ValueInvalid { // Reuse the value if the memBase is already calculated at this point. 3682 memBase := c.getMemoryBaseValue(false) 3683 address = builder.AllocateInstruction(). 3684 AsIadd(memBase, extBaseAddr).Insert(builder).Return() 3685 } 3686 3687 // Record the bound ceil for this baseAddr is known to be safe for the subsequent memory access in the same block. 3688 c.recordKnownSafeBound(baseAddrID, ceil, address) 3689 return 3690 } 3691 3692 // atomicMemOpSetup inserts the bounds check and calculates the address of the memory operation (loads/stores), including 3693 // the constant offset and performs an alignment check on the final address. 3694 func (c *Compiler) atomicMemOpSetup(baseAddr ssa.Value, constOffset, operationSizeInBytes uint64) (address ssa.Value) { 3695 builder := c.ssaBuilder 3696 3697 addrWithoutOffset := c.memOpSetup(baseAddr, constOffset, operationSizeInBytes) 3698 var addr ssa.Value 3699 if constOffset == 0 { 3700 addr = addrWithoutOffset 3701 } else { 3702 offset := builder.AllocateInstruction().AsIconst64(constOffset).Insert(builder).Return() 3703 addr = builder.AllocateInstruction().AsIadd(addrWithoutOffset, offset).Insert(builder).Return() 3704 } 3705 3706 c.memAlignmentCheck(addr, operationSizeInBytes) 3707 3708 return addr 3709 } 3710 3711 func (c *Compiler) memAlignmentCheck(addr ssa.Value, operationSizeInBytes uint64) { 3712 if operationSizeInBytes == 1 { 3713 return // No alignment restrictions when accessing a byte 3714 } 3715 var checkBits uint64 3716 switch operationSizeInBytes { 3717 case 2: 3718 checkBits = 0b1 3719 case 4: 3720 checkBits = 0b11 3721 case 8: 3722 checkBits = 0b111 3723 } 3724 3725 builder := c.ssaBuilder 3726 3727 mask := builder.AllocateInstruction().AsIconst64(checkBits).Insert(builder).Return() 3728 masked := builder.AllocateInstruction().AsBand(addr, mask).Insert(builder).Return() 3729 zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return() 3730 cmp := builder.AllocateInstruction().AsIcmp(masked, zero, ssa.IntegerCmpCondNotEqual).Insert(builder).Return() 3731 builder.AllocateInstruction().AsExitIfTrueWithCode(c.execCtxPtrValue, cmp, wazevoapi.ExitCodeUnalignedAtomic).Insert(builder) 3732 } 3733 3734 func (c *Compiler) callMemmove(dst, src, size ssa.Value) { 3735 args := c.allocateVarLengthValues(3, dst, src, size) 3736 if size.Type() != ssa.TypeI64 { 3737 panic("TODO: memmove size must be i64") 3738 } 3739 3740 builder := c.ssaBuilder 3741 memmovePtr := builder.AllocateInstruction(). 3742 AsLoad(c.execCtxPtrValue, 3743 wazevoapi.ExecutionContextOffsetMemmoveAddress.U32(), 3744 ssa.TypeI64, 3745 ).Insert(builder).Return() 3746 builder.AllocateInstruction().AsCallGoRuntimeMemmove(memmovePtr, &c.memmoveSig, args).Insert(builder) 3747 } 3748 3749 func (c *Compiler) reloadAfterCall() { 3750 // Note that when these are not used in the following instructions, they will be optimized out. 3751 // So in any ways, we define them! 3752 3753 // After calling any function, memory buffer might have changed. So we need to re-define the variable. 3754 // However, if the memory is shared, we don't need to reload the memory base and length as the base will never change. 3755 if c.needMemory && !c.memoryShared { 3756 c.reloadMemoryBaseLen() 3757 } 3758 3759 // Also, any mutable Global can change. 3760 for _, index := range c.mutableGlobalVariablesIndexes { 3761 _ = c.getWasmGlobalValue(index, true) 3762 } 3763 } 3764 3765 func (c *Compiler) reloadMemoryBaseLen() { 3766 _ = c.getMemoryBaseValue(true) 3767 _ = c.getMemoryLenValue(true) 3768 3769 // This function being called means that the memory base might have changed. 3770 // Therefore, we need to clear the absolute addresses recorded in the known safe bounds 3771 // because we cache the absolute address of the memory access per each base offset. 3772 c.resetAbsoluteAddressInSafeBounds() 3773 } 3774 3775 func (c *Compiler) setWasmGlobalValue(index wasm.Index, v ssa.Value) { 3776 variable := c.globalVariables[index] 3777 opaqueOffset := c.offset.GlobalInstanceOffset(index) 3778 3779 builder := c.ssaBuilder 3780 if index < c.m.ImportGlobalCount { 3781 loadGlobalInstPtr := builder.AllocateInstruction() 3782 loadGlobalInstPtr.AsLoad(c.moduleCtxPtrValue, uint32(opaqueOffset), ssa.TypeI64) 3783 builder.InsertInstruction(loadGlobalInstPtr) 3784 3785 store := builder.AllocateInstruction() 3786 store.AsStore(ssa.OpcodeStore, v, loadGlobalInstPtr.Return(), uint32(0)) 3787 builder.InsertInstruction(store) 3788 3789 } else { 3790 store := builder.AllocateInstruction() 3791 store.AsStore(ssa.OpcodeStore, v, c.moduleCtxPtrValue, uint32(opaqueOffset)) 3792 builder.InsertInstruction(store) 3793 } 3794 3795 // The value has changed to `v`, so we record it. 3796 builder.DefineVariableInCurrentBB(variable, v) 3797 } 3798 3799 func (c *Compiler) getWasmGlobalValue(index wasm.Index, forceLoad bool) ssa.Value { 3800 variable := c.globalVariables[index] 3801 typ := c.globalVariablesTypes[index] 3802 opaqueOffset := c.offset.GlobalInstanceOffset(index) 3803 3804 builder := c.ssaBuilder 3805 if !forceLoad { 3806 if v := builder.FindValueInLinearPath(variable); v.Valid() { 3807 return v 3808 } 3809 } 3810 3811 var load *ssa.Instruction 3812 if index < c.m.ImportGlobalCount { 3813 loadGlobalInstPtr := builder.AllocateInstruction() 3814 loadGlobalInstPtr.AsLoad(c.moduleCtxPtrValue, uint32(opaqueOffset), ssa.TypeI64) 3815 builder.InsertInstruction(loadGlobalInstPtr) 3816 load = builder.AllocateInstruction(). 3817 AsLoad(loadGlobalInstPtr.Return(), uint32(0), typ) 3818 } else { 3819 load = builder.AllocateInstruction(). 3820 AsLoad(c.moduleCtxPtrValue, uint32(opaqueOffset), typ) 3821 } 3822 3823 v := load.Insert(builder).Return() 3824 builder.DefineVariableInCurrentBB(variable, v) 3825 return v 3826 } 3827 3828 const ( 3829 memoryInstanceBufOffset = 0 3830 memoryInstanceBufSizeOffset = memoryInstanceBufOffset + 8 3831 ) 3832 3833 func (c *Compiler) getMemoryBaseValue(forceReload bool) ssa.Value { 3834 builder := c.ssaBuilder 3835 variable := c.memoryBaseVariable 3836 if !forceReload { 3837 if v := builder.FindValueInLinearPath(variable); v.Valid() { 3838 return v 3839 } 3840 } 3841 3842 var ret ssa.Value 3843 if c.offset.LocalMemoryBegin < 0 { 3844 loadMemInstPtr := builder.AllocateInstruction() 3845 loadMemInstPtr.AsLoad(c.moduleCtxPtrValue, c.offset.ImportedMemoryBegin.U32(), ssa.TypeI64) 3846 builder.InsertInstruction(loadMemInstPtr) 3847 memInstPtr := loadMemInstPtr.Return() 3848 3849 loadBufPtr := builder.AllocateInstruction() 3850 loadBufPtr.AsLoad(memInstPtr, memoryInstanceBufOffset, ssa.TypeI64) 3851 builder.InsertInstruction(loadBufPtr) 3852 ret = loadBufPtr.Return() 3853 } else { 3854 load := builder.AllocateInstruction() 3855 load.AsLoad(c.moduleCtxPtrValue, c.offset.LocalMemoryBase().U32(), ssa.TypeI64) 3856 builder.InsertInstruction(load) 3857 ret = load.Return() 3858 } 3859 3860 builder.DefineVariableInCurrentBB(variable, ret) 3861 return ret 3862 } 3863 3864 func (c *Compiler) getMemoryLenValue(forceReload bool) ssa.Value { 3865 variable := c.memoryLenVariable 3866 builder := c.ssaBuilder 3867 if !forceReload && !c.memoryShared { 3868 if v := builder.FindValueInLinearPath(variable); v.Valid() { 3869 return v 3870 } 3871 } 3872 3873 var ret ssa.Value 3874 if c.offset.LocalMemoryBegin < 0 { 3875 loadMemInstPtr := builder.AllocateInstruction() 3876 loadMemInstPtr.AsLoad(c.moduleCtxPtrValue, c.offset.ImportedMemoryBegin.U32(), ssa.TypeI64) 3877 builder.InsertInstruction(loadMemInstPtr) 3878 memInstPtr := loadMemInstPtr.Return() 3879 3880 loadBufSizePtr := builder.AllocateInstruction() 3881 if c.memoryShared { 3882 sizeOffset := builder.AllocateInstruction().AsIconst64(memoryInstanceBufSizeOffset).Insert(builder).Return() 3883 addr := builder.AllocateInstruction().AsIadd(memInstPtr, sizeOffset).Insert(builder).Return() 3884 loadBufSizePtr.AsAtomicLoad(addr, 8, ssa.TypeI64) 3885 } else { 3886 loadBufSizePtr.AsLoad(memInstPtr, memoryInstanceBufSizeOffset, ssa.TypeI64) 3887 } 3888 builder.InsertInstruction(loadBufSizePtr) 3889 3890 ret = loadBufSizePtr.Return() 3891 } else { 3892 load := builder.AllocateInstruction() 3893 if c.memoryShared { 3894 lenOffset := builder.AllocateInstruction().AsIconst64(c.offset.LocalMemoryLen().U64()).Insert(builder).Return() 3895 addr := builder.AllocateInstruction().AsIadd(c.moduleCtxPtrValue, lenOffset).Insert(builder).Return() 3896 load.AsAtomicLoad(addr, 8, ssa.TypeI64) 3897 } else { 3898 load.AsExtLoad(ssa.OpcodeUload32, c.moduleCtxPtrValue, c.offset.LocalMemoryLen().U32(), true) 3899 } 3900 builder.InsertInstruction(load) 3901 ret = load.Return() 3902 } 3903 3904 builder.DefineVariableInCurrentBB(variable, ret) 3905 return ret 3906 } 3907 3908 func (c *Compiler) insertIcmp(cond ssa.IntegerCmpCond) { 3909 state, builder := c.state(), c.ssaBuilder 3910 y, x := state.pop(), state.pop() 3911 cmp := builder.AllocateInstruction() 3912 cmp.AsIcmp(x, y, cond) 3913 builder.InsertInstruction(cmp) 3914 value := cmp.Return() 3915 state.push(value) 3916 } 3917 3918 func (c *Compiler) insertFcmp(cond ssa.FloatCmpCond) { 3919 state, builder := c.state(), c.ssaBuilder 3920 y, x := state.pop(), state.pop() 3921 cmp := builder.AllocateInstruction() 3922 cmp.AsFcmp(x, y, cond) 3923 builder.InsertInstruction(cmp) 3924 value := cmp.Return() 3925 state.push(value) 3926 } 3927 3928 // storeCallerModuleContext stores the current module's moduleContextPtr into execContext.callerModuleContextPtr. 3929 func (c *Compiler) storeCallerModuleContext() { 3930 builder := c.ssaBuilder 3931 execCtx := c.execCtxPtrValue 3932 store := builder.AllocateInstruction() 3933 store.AsStore(ssa.OpcodeStore, 3934 c.moduleCtxPtrValue, execCtx, wazevoapi.ExecutionContextOffsetCallerModuleContextPtr.U32()) 3935 builder.InsertInstruction(store) 3936 } 3937 3938 func (c *Compiler) readByte() byte { 3939 v := c.wasmFunctionBody[c.loweringState.pc+1] 3940 c.loweringState.pc++ 3941 return v 3942 } 3943 3944 func (c *Compiler) readI32u() uint32 { 3945 v, n, err := leb128.LoadUint32(c.wasmFunctionBody[c.loweringState.pc+1:]) 3946 if err != nil { 3947 panic(err) // shouldn't be reached since compilation comes after validation. 3948 } 3949 c.loweringState.pc += int(n) 3950 return v 3951 } 3952 3953 func (c *Compiler) readI32s() int32 { 3954 v, n, err := leb128.LoadInt32(c.wasmFunctionBody[c.loweringState.pc+1:]) 3955 if err != nil { 3956 panic(err) // shouldn't be reached since compilation comes after validation. 3957 } 3958 c.loweringState.pc += int(n) 3959 return v 3960 } 3961 3962 func (c *Compiler) readI64s() int64 { 3963 v, n, err := leb128.LoadInt64(c.wasmFunctionBody[c.loweringState.pc+1:]) 3964 if err != nil { 3965 panic(err) // shouldn't be reached since compilation comes after validation. 3966 } 3967 c.loweringState.pc += int(n) 3968 return v 3969 } 3970 3971 func (c *Compiler) readF32() float32 { 3972 v := math.Float32frombits(binary.LittleEndian.Uint32(c.wasmFunctionBody[c.loweringState.pc+1:])) 3973 c.loweringState.pc += 4 3974 return v 3975 } 3976 3977 func (c *Compiler) readF64() float64 { 3978 v := math.Float64frombits(binary.LittleEndian.Uint64(c.wasmFunctionBody[c.loweringState.pc+1:])) 3979 c.loweringState.pc += 8 3980 return v 3981 } 3982 3983 // readBlockType reads the block type from the current position of the bytecode reader. 3984 func (c *Compiler) readBlockType() *wasm.FunctionType { 3985 state := c.state() 3986 3987 c.br.Reset(c.wasmFunctionBody[state.pc+1:]) 3988 bt, num, err := wasm.DecodeBlockType(c.m.TypeSection, c.br, api.CoreFeaturesV2) 3989 if err != nil { 3990 panic(err) // shouldn't be reached since compilation comes after validation. 3991 } 3992 state.pc += int(num) 3993 3994 return bt 3995 } 3996 3997 func (c *Compiler) readMemArg() (align, offset uint32) { 3998 state := c.state() 3999 4000 align, num, err := leb128.LoadUint32(c.wasmFunctionBody[state.pc+1:]) 4001 if err != nil { 4002 panic(fmt.Errorf("read memory align: %v", err)) 4003 } 4004 4005 state.pc += int(num) 4006 offset, num, err = leb128.LoadUint32(c.wasmFunctionBody[state.pc+1:]) 4007 if err != nil { 4008 panic(fmt.Errorf("read memory offset: %v", err)) 4009 } 4010 4011 state.pc += int(num) 4012 return align, offset 4013 } 4014 4015 // insertJumpToBlock inserts a jump instruction to the given block in the current block. 4016 func (c *Compiler) insertJumpToBlock(args ssa.Values, targetBlk ssa.BasicBlock) { 4017 if targetBlk.ReturnBlock() { 4018 if c.needListener { 4019 c.callListenerAfter() 4020 } 4021 } 4022 4023 builder := c.ssaBuilder 4024 jmp := builder.AllocateInstruction() 4025 jmp.AsJump(args, targetBlk) 4026 builder.InsertInstruction(jmp) 4027 } 4028 4029 func (c *Compiler) insertIntegerExtend(signed bool, from, to byte) { 4030 state := c.state() 4031 builder := c.ssaBuilder 4032 v := state.pop() 4033 extend := builder.AllocateInstruction() 4034 if signed { 4035 extend.AsSExtend(v, from, to) 4036 } else { 4037 extend.AsUExtend(v, from, to) 4038 } 4039 builder.InsertInstruction(extend) 4040 value := extend.Return() 4041 state.push(value) 4042 } 4043 4044 func (c *Compiler) switchTo(originalStackLen int, targetBlk ssa.BasicBlock) { 4045 if targetBlk.Preds() == 0 { 4046 c.loweringState.unreachable = true 4047 } 4048 4049 // Now we should adjust the stack and start translating the continuation block. 4050 c.loweringState.values = c.loweringState.values[:originalStackLen] 4051 4052 c.ssaBuilder.SetCurrentBlock(targetBlk) 4053 4054 // At this point, blocks params consist only of the Wasm-level parameters, 4055 // (since it's added only when we are trying to resolve variable *inside* this block). 4056 for i := 0; i < targetBlk.Params(); i++ { 4057 value := targetBlk.Param(i) 4058 c.loweringState.push(value) 4059 } 4060 } 4061 4062 // results returns the number of results of the current function. 4063 func (c *Compiler) results() int { 4064 return len(c.wasmFunctionTyp.Results) 4065 } 4066 4067 func (c *Compiler) lowerBrTable(labels []uint32, index ssa.Value) { 4068 state := c.state() 4069 builder := c.ssaBuilder 4070 4071 f := state.ctrlPeekAt(int(labels[0])) 4072 var numArgs int 4073 if f.isLoop() { 4074 numArgs = len(f.blockType.Params) 4075 } else { 4076 numArgs = len(f.blockType.Results) 4077 } 4078 4079 targets := make([]ssa.BasicBlock, len(labels)) 4080 4081 // We need trampoline blocks since depending on the target block structure, we might end up inserting moves before jumps, 4082 // which cannot be done with br_table. Instead, we can do such per-block moves in the trampoline blocks. 4083 // At the linking phase (very end of the backend), we can remove the unnecessary jumps, and therefore no runtime overhead. 4084 currentBlk := builder.CurrentBlock() 4085 for i, l := range labels { 4086 // Args are always on the top of the stack. Note that we should not share the args slice 4087 // among the jump instructions since the args are modified during passes (e.g. redundant phi elimination). 4088 args := c.nPeekDup(numArgs) 4089 targetBlk, _ := state.brTargetArgNumFor(l) 4090 trampoline := builder.AllocateBasicBlock() 4091 builder.SetCurrentBlock(trampoline) 4092 c.insertJumpToBlock(args, targetBlk) 4093 targets[i] = trampoline 4094 } 4095 builder.SetCurrentBlock(currentBlk) 4096 4097 // If the target block has no arguments, we can just jump to the target block. 4098 brTable := builder.AllocateInstruction() 4099 brTable.AsBrTable(index, targets) 4100 builder.InsertInstruction(brTable) 4101 4102 for _, trampoline := range targets { 4103 builder.Seal(trampoline) 4104 } 4105 } 4106 4107 func (l *loweringState) brTargetArgNumFor(labelIndex uint32) (targetBlk ssa.BasicBlock, argNum int) { 4108 targetFrame := l.ctrlPeekAt(int(labelIndex)) 4109 if targetFrame.isLoop() { 4110 targetBlk, argNum = targetFrame.blk, len(targetFrame.blockType.Params) 4111 } else { 4112 targetBlk, argNum = targetFrame.followingBlock, len(targetFrame.blockType.Results) 4113 } 4114 return 4115 } 4116 4117 func (c *Compiler) callListenerBefore() { 4118 c.storeCallerModuleContext() 4119 4120 builder := c.ssaBuilder 4121 beforeListeners1stElement := builder.AllocateInstruction(). 4122 AsLoad(c.moduleCtxPtrValue, 4123 c.offset.BeforeListenerTrampolines1stElement.U32(), 4124 ssa.TypeI64, 4125 ).Insert(builder).Return() 4126 4127 beforeListenerPtr := builder.AllocateInstruction(). 4128 AsLoad(beforeListeners1stElement, uint32(c.wasmFunctionTypeIndex)*8 /* 8 bytes per index */, ssa.TypeI64).Insert(builder).Return() 4129 4130 entry := builder.EntryBlock() 4131 ps := entry.Params() 4132 4133 args := c.allocateVarLengthValues(ps, c.execCtxPtrValue, 4134 builder.AllocateInstruction().AsIconst32(c.wasmLocalFunctionIndex).Insert(builder).Return()) 4135 for i := 2; i < ps; i++ { 4136 args = args.Append(builder.VarLengthPool(), entry.Param(i)) 4137 } 4138 4139 beforeSig := c.listenerSignatures[c.wasmFunctionTyp][0] 4140 builder.AllocateInstruction(). 4141 AsCallIndirect(beforeListenerPtr, beforeSig, args). 4142 Insert(builder) 4143 } 4144 4145 func (c *Compiler) callListenerAfter() { 4146 c.storeCallerModuleContext() 4147 4148 builder := c.ssaBuilder 4149 afterListeners1stElement := builder.AllocateInstruction(). 4150 AsLoad(c.moduleCtxPtrValue, 4151 c.offset.AfterListenerTrampolines1stElement.U32(), 4152 ssa.TypeI64, 4153 ).Insert(builder).Return() 4154 4155 afterListenerPtr := builder.AllocateInstruction(). 4156 AsLoad(afterListeners1stElement, 4157 uint32(c.wasmFunctionTypeIndex)*8 /* 8 bytes per index */, ssa.TypeI64). 4158 Insert(builder). 4159 Return() 4160 4161 afterSig := c.listenerSignatures[c.wasmFunctionTyp][1] 4162 args := c.allocateVarLengthValues( 4163 c.results()+2, 4164 c.execCtxPtrValue, 4165 builder.AllocateInstruction().AsIconst32(c.wasmLocalFunctionIndex).Insert(builder).Return(), 4166 ) 4167 4168 l := c.state() 4169 tail := len(l.values) 4170 args = args.Append(c.ssaBuilder.VarLengthPool(), l.values[tail-c.results():tail]...) 4171 builder.AllocateInstruction(). 4172 AsCallIndirect(afterListenerPtr, afterSig, args). 4173 Insert(builder) 4174 } 4175 4176 const ( 4177 elementOrDataInstanceLenOffset = 8 4178 elementOrDataInstanceSize = 24 4179 ) 4180 4181 // dropInstance inserts instructions to drop the element/data instance specified by the given index. 4182 func (c *Compiler) dropDataOrElementInstance(index uint32, firstItemOffset wazevoapi.Offset) { 4183 builder := c.ssaBuilder 4184 instPtr := c.dataOrElementInstanceAddr(index, firstItemOffset) 4185 4186 zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return() 4187 4188 // Clear the instance. 4189 builder.AllocateInstruction().AsStore(ssa.OpcodeStore, zero, instPtr, 0).Insert(builder) 4190 builder.AllocateInstruction().AsStore(ssa.OpcodeStore, zero, instPtr, elementOrDataInstanceLenOffset).Insert(builder) 4191 builder.AllocateInstruction().AsStore(ssa.OpcodeStore, zero, instPtr, elementOrDataInstanceLenOffset+8).Insert(builder) 4192 } 4193 4194 func (c *Compiler) dataOrElementInstanceAddr(index uint32, firstItemOffset wazevoapi.Offset) ssa.Value { 4195 builder := c.ssaBuilder 4196 4197 _1stItemPtr := builder. 4198 AllocateInstruction(). 4199 AsLoad(c.moduleCtxPtrValue, firstItemOffset.U32(), ssa.TypeI64). 4200 Insert(builder).Return() 4201 4202 // Each data/element instance is a slice, so we need to multiply index by 16 to get the offset of the target instance. 4203 index = index * elementOrDataInstanceSize 4204 indexExt := builder.AllocateInstruction().AsIconst64(uint64(index)).Insert(builder).Return() 4205 // Then, add the offset to the address of the instance. 4206 instPtr := builder.AllocateInstruction().AsIadd(_1stItemPtr, indexExt).Insert(builder).Return() 4207 return instPtr 4208 } 4209 4210 func (c *Compiler) boundsCheckInDataOrElementInstance(instPtr, offsetInInstance, copySize ssa.Value, exitCode wazevoapi.ExitCode) { 4211 builder := c.ssaBuilder 4212 dataInstLen := builder.AllocateInstruction(). 4213 AsLoad(instPtr, elementOrDataInstanceLenOffset, ssa.TypeI64). 4214 Insert(builder).Return() 4215 ceil := builder.AllocateInstruction().AsIadd(offsetInInstance, copySize).Insert(builder).Return() 4216 cmp := builder.AllocateInstruction(). 4217 AsIcmp(dataInstLen, ceil, ssa.IntegerCmpCondUnsignedLessThan). 4218 Insert(builder). 4219 Return() 4220 builder.AllocateInstruction(). 4221 AsExitIfTrueWithCode(c.execCtxPtrValue, cmp, exitCode). 4222 Insert(builder) 4223 } 4224 4225 func (c *Compiler) boundsCheckInTable(tableIndex uint32, offset, size ssa.Value) (tableInstancePtr ssa.Value) { 4226 builder := c.ssaBuilder 4227 dstCeil := builder.AllocateInstruction().AsIadd(offset, size).Insert(builder).Return() 4228 4229 // Load the table. 4230 tableInstancePtr = builder.AllocateInstruction(). 4231 AsLoad(c.moduleCtxPtrValue, c.offset.TableOffset(int(tableIndex)).U32(), ssa.TypeI64). 4232 Insert(builder).Return() 4233 4234 // Load the table's length. 4235 tableLen := builder.AllocateInstruction(). 4236 AsLoad(tableInstancePtr, tableInstanceLenOffset, ssa.TypeI32).Insert(builder).Return() 4237 tableLenExt := builder.AllocateInstruction().AsUExtend(tableLen, 32, 64).Insert(builder).Return() 4238 4239 // Compare the length and the target, and trap if out of bounds. 4240 checkOOB := builder.AllocateInstruction() 4241 checkOOB.AsIcmp(tableLenExt, dstCeil, ssa.IntegerCmpCondUnsignedLessThan) 4242 builder.InsertInstruction(checkOOB) 4243 exitIfOOB := builder.AllocateInstruction() 4244 exitIfOOB.AsExitIfTrueWithCode(c.execCtxPtrValue, checkOOB.Return(), wazevoapi.ExitCodeTableOutOfBounds) 4245 builder.InsertInstruction(exitIfOOB) 4246 return 4247 } 4248 4249 func (c *Compiler) loadTableBaseAddr(tableInstancePtr ssa.Value) ssa.Value { 4250 builder := c.ssaBuilder 4251 loadTableBaseAddress := builder. 4252 AllocateInstruction(). 4253 AsLoad(tableInstancePtr, tableInstanceBaseAddressOffset, ssa.TypeI64). 4254 Insert(builder) 4255 return loadTableBaseAddress.Return() 4256 } 4257 4258 func (c *Compiler) boundsCheckInMemory(memLen, offset, size ssa.Value) { 4259 builder := c.ssaBuilder 4260 ceil := builder.AllocateInstruction().AsIadd(offset, size).Insert(builder).Return() 4261 cmp := builder.AllocateInstruction(). 4262 AsIcmp(memLen, ceil, ssa.IntegerCmpCondUnsignedLessThan). 4263 Insert(builder). 4264 Return() 4265 builder.AllocateInstruction(). 4266 AsExitIfTrueWithCode(c.execCtxPtrValue, cmp, wazevoapi.ExitCodeMemoryOutOfBounds). 4267 Insert(builder) 4268 }