github.com/bananabytelabs/wazero@v0.0.0-20240105073314-54b22a776da8/internal/engine/wazevo/frontend/lower.go (about) 1 package frontend 2 3 import ( 4 "encoding/binary" 5 "fmt" 6 "math" 7 "strings" 8 9 "github.com/bananabytelabs/wazero/api" 10 "github.com/bananabytelabs/wazero/internal/engine/wazevo/ssa" 11 "github.com/bananabytelabs/wazero/internal/engine/wazevo/wazevoapi" 12 "github.com/bananabytelabs/wazero/internal/leb128" 13 "github.com/bananabytelabs/wazero/internal/wasm" 14 ) 15 16 type ( 17 // loweringState is used to keep the state of lowering. 18 loweringState struct { 19 // values holds the values on the Wasm stack. 20 values []ssa.Value 21 controlFrames []controlFrame 22 unreachable bool 23 unreachableDepth int 24 tmpForBrTable []uint32 25 pc int 26 } 27 controlFrame struct { 28 kind controlFrameKind 29 // originalStackLen holds the number of values on the Wasm stack 30 // when start executing this control frame minus params for the block. 31 originalStackLenWithoutParam int 32 // blk is the loop header if this is loop, and is the else-block if this is an if frame. 33 blk, 34 // followingBlock is the basic block we enter if we reach "end" of block. 35 followingBlock ssa.BasicBlock 36 blockType *wasm.FunctionType 37 // clonedArgs hold the arguments to Else block. 38 clonedArgs []ssa.Value 39 } 40 41 controlFrameKind byte 42 ) 43 44 // String implements fmt.Stringer for debugging. 45 func (l *loweringState) String() string { 46 var str []string 47 for _, v := range l.values { 48 str = append(str, fmt.Sprintf("v%v", v.ID())) 49 } 50 var frames []string 51 for i := range l.controlFrames { 52 frames = append(frames, l.controlFrames[i].kind.String()) 53 } 54 return fmt.Sprintf("\n\tunreachable=%v(depth=%d)\n\tstack: %s\n\tcontrol frames: %s", 55 l.unreachable, l.unreachableDepth, 56 strings.Join(str, ", "), 57 strings.Join(frames, ", "), 58 ) 59 } 60 61 const ( 62 controlFrameKindFunction = iota + 1 63 controlFrameKindLoop 64 controlFrameKindIfWithElse 65 controlFrameKindIfWithoutElse 66 controlFrameKindBlock 67 ) 68 69 // String implements fmt.Stringer for debugging. 70 func (k controlFrameKind) String() string { 71 switch k { 72 case controlFrameKindFunction: 73 return "function" 74 case controlFrameKindLoop: 75 return "loop" 76 case controlFrameKindIfWithElse: 77 return "if_with_else" 78 case controlFrameKindIfWithoutElse: 79 return "if_without_else" 80 case controlFrameKindBlock: 81 return "block" 82 default: 83 panic(k) 84 } 85 } 86 87 // isLoop returns true if this is a loop frame. 88 func (ctrl *controlFrame) isLoop() bool { 89 return ctrl.kind == controlFrameKindLoop 90 } 91 92 // reset resets the state of loweringState for reuse. 93 func (l *loweringState) reset() { 94 l.values = l.values[:0] 95 l.controlFrames = l.controlFrames[:0] 96 l.pc = 0 97 l.unreachable = false 98 l.unreachableDepth = 0 99 } 100 101 func (l *loweringState) peek() (ret ssa.Value) { 102 tail := len(l.values) - 1 103 return l.values[tail] 104 } 105 106 func (l *loweringState) pop() (ret ssa.Value) { 107 tail := len(l.values) - 1 108 ret = l.values[tail] 109 l.values = l.values[:tail] 110 return 111 } 112 113 func (l *loweringState) push(ret ssa.Value) { 114 l.values = append(l.values, ret) 115 } 116 117 func (l *loweringState) nPopInto(n int, dst []ssa.Value) { 118 if n == 0 { 119 return 120 } 121 tail := len(l.values) 122 begin := tail - n 123 view := l.values[begin:tail] 124 copy(dst, view) 125 l.values = l.values[:begin] 126 } 127 128 func (l *loweringState) nPeekDup(n int) []ssa.Value { 129 if n == 0 { 130 return nil 131 } 132 tail := len(l.values) 133 view := l.values[tail-n : tail] 134 return cloneValuesList(view) 135 } 136 137 func (l *loweringState) ctrlPop() (ret controlFrame) { 138 tail := len(l.controlFrames) - 1 139 ret = l.controlFrames[tail] 140 l.controlFrames = l.controlFrames[:tail] 141 return 142 } 143 144 func (l *loweringState) ctrlPush(ret controlFrame) { 145 l.controlFrames = append(l.controlFrames, ret) 146 } 147 148 func (l *loweringState) ctrlPeekAt(n int) (ret *controlFrame) { 149 tail := len(l.controlFrames) - 1 150 return &l.controlFrames[tail-n] 151 } 152 153 // lowerBody lowers the body of the Wasm function to the SSA form. 154 func (c *Compiler) lowerBody(entryBlk ssa.BasicBlock) { 155 c.ssaBuilder.Seal(entryBlk) 156 157 if c.needListener { 158 c.callListenerBefore() 159 } 160 161 // Pushes the empty control frame which corresponds to the function return. 162 c.loweringState.ctrlPush(controlFrame{ 163 kind: controlFrameKindFunction, 164 blockType: c.wasmFunctionTyp, 165 followingBlock: c.ssaBuilder.ReturnBlock(), 166 }) 167 168 for c.loweringState.pc < len(c.wasmFunctionBody) { 169 c.lowerCurrentOpcode() 170 } 171 } 172 173 func (c *Compiler) state() *loweringState { 174 return &c.loweringState 175 } 176 177 func (c *Compiler) lowerCurrentOpcode() { 178 op := c.wasmFunctionBody[c.loweringState.pc] 179 180 if c.needSourceOffsetInfo { 181 c.ssaBuilder.SetCurrentSourceOffset( 182 ssa.SourceOffset(c.loweringState.pc) + ssa.SourceOffset(c.wasmFunctionBodyOffsetInCodeSection), 183 ) 184 } 185 186 builder := c.ssaBuilder 187 state := c.state() 188 switch op { 189 case wasm.OpcodeI32Const: 190 c := c.readI32s() 191 if state.unreachable { 192 break 193 } 194 195 iconst := builder.AllocateInstruction().AsIconst32(uint32(c)).Insert(builder) 196 value := iconst.Return() 197 state.push(value) 198 case wasm.OpcodeI64Const: 199 c := c.readI64s() 200 if state.unreachable { 201 break 202 } 203 iconst := builder.AllocateInstruction().AsIconst64(uint64(c)).Insert(builder) 204 value := iconst.Return() 205 state.push(value) 206 case wasm.OpcodeF32Const: 207 f32 := c.readF32() 208 if state.unreachable { 209 break 210 } 211 f32const := builder.AllocateInstruction(). 212 AsF32const(f32). 213 Insert(builder). 214 Return() 215 state.push(f32const) 216 case wasm.OpcodeF64Const: 217 f64 := c.readF64() 218 if state.unreachable { 219 break 220 } 221 f64const := builder.AllocateInstruction(). 222 AsF64const(f64). 223 Insert(builder). 224 Return() 225 state.push(f64const) 226 case wasm.OpcodeI32Add, wasm.OpcodeI64Add: 227 if state.unreachable { 228 break 229 } 230 y, x := state.pop(), state.pop() 231 iadd := builder.AllocateInstruction() 232 iadd.AsIadd(x, y) 233 builder.InsertInstruction(iadd) 234 value := iadd.Return() 235 state.push(value) 236 case wasm.OpcodeI32Sub, wasm.OpcodeI64Sub: 237 if state.unreachable { 238 break 239 } 240 y, x := state.pop(), state.pop() 241 isub := builder.AllocateInstruction() 242 isub.AsIsub(x, y) 243 builder.InsertInstruction(isub) 244 value := isub.Return() 245 state.push(value) 246 case wasm.OpcodeF32Add, wasm.OpcodeF64Add: 247 if state.unreachable { 248 break 249 } 250 y, x := state.pop(), state.pop() 251 iadd := builder.AllocateInstruction() 252 iadd.AsFadd(x, y) 253 builder.InsertInstruction(iadd) 254 value := iadd.Return() 255 state.push(value) 256 case wasm.OpcodeI32Mul, wasm.OpcodeI64Mul: 257 if state.unreachable { 258 break 259 } 260 y, x := state.pop(), state.pop() 261 imul := builder.AllocateInstruction() 262 imul.AsImul(x, y) 263 builder.InsertInstruction(imul) 264 value := imul.Return() 265 state.push(value) 266 case wasm.OpcodeF32Sub, wasm.OpcodeF64Sub: 267 if state.unreachable { 268 break 269 } 270 y, x := state.pop(), state.pop() 271 isub := builder.AllocateInstruction() 272 isub.AsFsub(x, y) 273 builder.InsertInstruction(isub) 274 value := isub.Return() 275 state.push(value) 276 case wasm.OpcodeF32Mul, wasm.OpcodeF64Mul: 277 if state.unreachable { 278 break 279 } 280 y, x := state.pop(), state.pop() 281 isub := builder.AllocateInstruction() 282 isub.AsFmul(x, y) 283 builder.InsertInstruction(isub) 284 value := isub.Return() 285 state.push(value) 286 case wasm.OpcodeF32Div, wasm.OpcodeF64Div: 287 if state.unreachable { 288 break 289 } 290 y, x := state.pop(), state.pop() 291 isub := builder.AllocateInstruction() 292 isub.AsFdiv(x, y) 293 builder.InsertInstruction(isub) 294 value := isub.Return() 295 state.push(value) 296 case wasm.OpcodeF32Max, wasm.OpcodeF64Max: 297 if state.unreachable { 298 break 299 } 300 y, x := state.pop(), state.pop() 301 isub := builder.AllocateInstruction() 302 isub.AsFmax(x, y) 303 builder.InsertInstruction(isub) 304 value := isub.Return() 305 state.push(value) 306 case wasm.OpcodeF32Min, wasm.OpcodeF64Min: 307 if state.unreachable { 308 break 309 } 310 y, x := state.pop(), state.pop() 311 isub := builder.AllocateInstruction() 312 isub.AsFmin(x, y) 313 builder.InsertInstruction(isub) 314 value := isub.Return() 315 state.push(value) 316 case wasm.OpcodeI64Extend8S: 317 if state.unreachable { 318 break 319 } 320 c.insertIntegerExtend(true, 8, 64) 321 case wasm.OpcodeI64Extend16S: 322 if state.unreachable { 323 break 324 } 325 c.insertIntegerExtend(true, 16, 64) 326 case wasm.OpcodeI64Extend32S, wasm.OpcodeI64ExtendI32S: 327 if state.unreachable { 328 break 329 } 330 c.insertIntegerExtend(true, 32, 64) 331 case wasm.OpcodeI64ExtendI32U: 332 if state.unreachable { 333 break 334 } 335 c.insertIntegerExtend(false, 32, 64) 336 case wasm.OpcodeI32Extend8S: 337 if state.unreachable { 338 break 339 } 340 c.insertIntegerExtend(true, 8, 32) 341 case wasm.OpcodeI32Extend16S: 342 if state.unreachable { 343 break 344 } 345 c.insertIntegerExtend(true, 16, 32) 346 case wasm.OpcodeI32Eqz, wasm.OpcodeI64Eqz: 347 if state.unreachable { 348 break 349 } 350 x := state.pop() 351 zero := builder.AllocateInstruction() 352 if op == wasm.OpcodeI32Eqz { 353 zero.AsIconst32(0) 354 } else { 355 zero.AsIconst64(0) 356 } 357 builder.InsertInstruction(zero) 358 icmp := builder.AllocateInstruction(). 359 AsIcmp(x, zero.Return(), ssa.IntegerCmpCondEqual). 360 Insert(builder). 361 Return() 362 state.push(icmp) 363 case wasm.OpcodeI32Eq, wasm.OpcodeI64Eq: 364 if state.unreachable { 365 break 366 } 367 c.insertIcmp(ssa.IntegerCmpCondEqual) 368 case wasm.OpcodeI32Ne, wasm.OpcodeI64Ne: 369 if state.unreachable { 370 break 371 } 372 c.insertIcmp(ssa.IntegerCmpCondNotEqual) 373 case wasm.OpcodeI32LtS, wasm.OpcodeI64LtS: 374 if state.unreachable { 375 break 376 } 377 c.insertIcmp(ssa.IntegerCmpCondSignedLessThan) 378 case wasm.OpcodeI32LtU, wasm.OpcodeI64LtU: 379 if state.unreachable { 380 break 381 } 382 c.insertIcmp(ssa.IntegerCmpCondUnsignedLessThan) 383 case wasm.OpcodeI32GtS, wasm.OpcodeI64GtS: 384 if state.unreachable { 385 break 386 } 387 c.insertIcmp(ssa.IntegerCmpCondSignedGreaterThan) 388 case wasm.OpcodeI32GtU, wasm.OpcodeI64GtU: 389 if state.unreachable { 390 break 391 } 392 c.insertIcmp(ssa.IntegerCmpCondUnsignedGreaterThan) 393 case wasm.OpcodeI32LeS, wasm.OpcodeI64LeS: 394 if state.unreachable { 395 break 396 } 397 c.insertIcmp(ssa.IntegerCmpCondSignedLessThanOrEqual) 398 case wasm.OpcodeI32LeU, wasm.OpcodeI64LeU: 399 if state.unreachable { 400 break 401 } 402 c.insertIcmp(ssa.IntegerCmpCondUnsignedLessThanOrEqual) 403 case wasm.OpcodeI32GeS, wasm.OpcodeI64GeS: 404 if state.unreachable { 405 break 406 } 407 c.insertIcmp(ssa.IntegerCmpCondSignedGreaterThanOrEqual) 408 case wasm.OpcodeI32GeU, wasm.OpcodeI64GeU: 409 if state.unreachable { 410 break 411 } 412 c.insertIcmp(ssa.IntegerCmpCondUnsignedGreaterThanOrEqual) 413 414 case wasm.OpcodeF32Eq, wasm.OpcodeF64Eq: 415 if state.unreachable { 416 break 417 } 418 c.insertFcmp(ssa.FloatCmpCondEqual) 419 case wasm.OpcodeF32Ne, wasm.OpcodeF64Ne: 420 if state.unreachable { 421 break 422 } 423 c.insertFcmp(ssa.FloatCmpCondNotEqual) 424 case wasm.OpcodeF32Lt, wasm.OpcodeF64Lt: 425 if state.unreachable { 426 break 427 } 428 c.insertFcmp(ssa.FloatCmpCondLessThan) 429 case wasm.OpcodeF32Gt, wasm.OpcodeF64Gt: 430 if state.unreachable { 431 break 432 } 433 c.insertFcmp(ssa.FloatCmpCondGreaterThan) 434 case wasm.OpcodeF32Le, wasm.OpcodeF64Le: 435 if state.unreachable { 436 break 437 } 438 c.insertFcmp(ssa.FloatCmpCondLessThanOrEqual) 439 case wasm.OpcodeF32Ge, wasm.OpcodeF64Ge: 440 if state.unreachable { 441 break 442 } 443 c.insertFcmp(ssa.FloatCmpCondGreaterThanOrEqual) 444 case wasm.OpcodeF32Neg, wasm.OpcodeF64Neg: 445 if state.unreachable { 446 break 447 } 448 x := state.pop() 449 v := builder.AllocateInstruction().AsFneg(x).Insert(builder).Return() 450 state.push(v) 451 case wasm.OpcodeF32Sqrt, wasm.OpcodeF64Sqrt: 452 if state.unreachable { 453 break 454 } 455 x := state.pop() 456 v := builder.AllocateInstruction().AsSqrt(x).Insert(builder).Return() 457 state.push(v) 458 case wasm.OpcodeF32Abs, wasm.OpcodeF64Abs: 459 if state.unreachable { 460 break 461 } 462 x := state.pop() 463 v := builder.AllocateInstruction().AsFabs(x).Insert(builder).Return() 464 state.push(v) 465 case wasm.OpcodeF32Copysign, wasm.OpcodeF64Copysign: 466 if state.unreachable { 467 break 468 } 469 y, x := state.pop(), state.pop() 470 v := builder.AllocateInstruction().AsFcopysign(x, y).Insert(builder).Return() 471 state.push(v) 472 473 case wasm.OpcodeF32Ceil, wasm.OpcodeF64Ceil: 474 if state.unreachable { 475 break 476 } 477 x := state.pop() 478 v := builder.AllocateInstruction().AsCeil(x).Insert(builder).Return() 479 state.push(v) 480 case wasm.OpcodeF32Floor, wasm.OpcodeF64Floor: 481 if state.unreachable { 482 break 483 } 484 x := state.pop() 485 v := builder.AllocateInstruction().AsFloor(x).Insert(builder).Return() 486 state.push(v) 487 case wasm.OpcodeF32Trunc, wasm.OpcodeF64Trunc: 488 if state.unreachable { 489 break 490 } 491 x := state.pop() 492 v := builder.AllocateInstruction().AsTrunc(x).Insert(builder).Return() 493 state.push(v) 494 case wasm.OpcodeF32Nearest, wasm.OpcodeF64Nearest: 495 if state.unreachable { 496 break 497 } 498 x := state.pop() 499 v := builder.AllocateInstruction().AsNearest(x).Insert(builder).Return() 500 state.push(v) 501 case wasm.OpcodeI64TruncF64S, wasm.OpcodeI64TruncF32S, 502 wasm.OpcodeI32TruncF64S, wasm.OpcodeI32TruncF32S, 503 wasm.OpcodeI64TruncF64U, wasm.OpcodeI64TruncF32U, 504 wasm.OpcodeI32TruncF64U, wasm.OpcodeI32TruncF32U: 505 if state.unreachable { 506 break 507 } 508 ret := builder.AllocateInstruction().AsFcvtToInt( 509 state.pop(), 510 c.execCtxPtrValue, 511 op == wasm.OpcodeI64TruncF64S || op == wasm.OpcodeI64TruncF32S || op == wasm.OpcodeI32TruncF32S || op == wasm.OpcodeI32TruncF64S, 512 op == wasm.OpcodeI64TruncF64S || op == wasm.OpcodeI64TruncF32S || op == wasm.OpcodeI64TruncF64U || op == wasm.OpcodeI64TruncF32U, 513 false, 514 ).Insert(builder).Return() 515 state.push(ret) 516 case wasm.OpcodeMiscPrefix: 517 state.pc++ 518 // A misc opcode is encoded as an unsigned variable 32-bit integer. 519 miscOpUint, num, err := leb128.LoadUint32(c.wasmFunctionBody[state.pc:]) 520 if err != nil { 521 // In normal conditions this should never happen because the function has passed validation. 522 panic(fmt.Sprintf("failed to read misc opcode: %v", err)) 523 } 524 state.pc += int(num - 1) 525 miscOp := wasm.OpcodeMisc(miscOpUint) 526 switch miscOp { 527 case wasm.OpcodeMiscI64TruncSatF64S, wasm.OpcodeMiscI64TruncSatF32S, 528 wasm.OpcodeMiscI32TruncSatF64S, wasm.OpcodeMiscI32TruncSatF32S, 529 wasm.OpcodeMiscI64TruncSatF64U, wasm.OpcodeMiscI64TruncSatF32U, 530 wasm.OpcodeMiscI32TruncSatF64U, wasm.OpcodeMiscI32TruncSatF32U: 531 if state.unreachable { 532 break 533 } 534 ret := builder.AllocateInstruction().AsFcvtToInt( 535 state.pop(), 536 c.execCtxPtrValue, 537 miscOp == wasm.OpcodeMiscI64TruncSatF64S || miscOp == wasm.OpcodeMiscI64TruncSatF32S || miscOp == wasm.OpcodeMiscI32TruncSatF32S || miscOp == wasm.OpcodeMiscI32TruncSatF64S, 538 miscOp == wasm.OpcodeMiscI64TruncSatF64S || miscOp == wasm.OpcodeMiscI64TruncSatF32S || miscOp == wasm.OpcodeMiscI64TruncSatF64U || miscOp == wasm.OpcodeMiscI64TruncSatF32U, 539 true, 540 ).Insert(builder).Return() 541 state.push(ret) 542 543 case wasm.OpcodeMiscTableSize: 544 tableIndex := c.readI32u() 545 if state.unreachable { 546 break 547 } 548 549 // Load the table. 550 loadTableInstancePtr := builder.AllocateInstruction() 551 loadTableInstancePtr.AsLoad(c.moduleCtxPtrValue, c.offset.TableOffset(int(tableIndex)).U32(), ssa.TypeI64) 552 builder.InsertInstruction(loadTableInstancePtr) 553 tableInstancePtr := loadTableInstancePtr.Return() 554 555 // Load the table's length. 556 loadTableLen := builder.AllocateInstruction(). 557 AsLoad(tableInstancePtr, tableInstanceLenOffset, ssa.TypeI32). 558 Insert(builder) 559 state.push(loadTableLen.Return()) 560 561 case wasm.OpcodeMiscTableGrow: 562 tableIndex := c.readI32u() 563 if state.unreachable { 564 break 565 } 566 567 c.storeCallerModuleContext() 568 569 tableIndexVal := builder.AllocateInstruction().AsIconst32(tableIndex).Insert(builder).Return() 570 571 num := state.pop() 572 r := state.pop() 573 574 tableGrowPtr := builder.AllocateInstruction(). 575 AsLoad(c.execCtxPtrValue, 576 wazevoapi.ExecutionContextOffsetTableGrowTrampolineAddress.U32(), 577 ssa.TypeI64, 578 ).Insert(builder).Return() 579 580 // TODO: reuse the slice. 581 args := []ssa.Value{c.execCtxPtrValue, tableIndexVal, num, r} 582 callGrowRet := builder. 583 AllocateInstruction(). 584 AsCallIndirect(tableGrowPtr, &c.tableGrowSig, args). 585 Insert(builder).Return() 586 state.push(callGrowRet) 587 588 case wasm.OpcodeMiscTableCopy: 589 dstTableIndex := c.readI32u() 590 srcTableIndex := c.readI32u() 591 if state.unreachable { 592 break 593 } 594 595 copySize := builder. 596 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 597 srcOffset := builder. 598 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 599 dstOffset := builder. 600 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 601 602 // Out of bounds check. 603 dstTableInstancePtr := c.boundsCheckInTable(dstTableIndex, dstOffset, copySize) 604 srcTableInstancePtr := c.boundsCheckInTable(srcTableIndex, srcOffset, copySize) 605 606 dstTableBaseAddr := c.loadTableBaseAddr(dstTableInstancePtr) 607 srcTableBaseAddr := c.loadTableBaseAddr(srcTableInstancePtr) 608 609 three := builder.AllocateInstruction().AsIconst64(3).Insert(builder).Return() 610 611 dstOffsetInBytes := builder.AllocateInstruction().AsIshl(dstOffset, three).Insert(builder).Return() 612 dstAddr := builder.AllocateInstruction().AsIadd(dstTableBaseAddr, dstOffsetInBytes).Insert(builder).Return() 613 srcOffsetInBytes := builder.AllocateInstruction().AsIshl(srcOffset, three).Insert(builder).Return() 614 srcAddr := builder.AllocateInstruction().AsIadd(srcTableBaseAddr, srcOffsetInBytes).Insert(builder).Return() 615 616 copySizeInBytes := builder.AllocateInstruction().AsIshl(copySize, three).Insert(builder).Return() 617 c.callMemmove(dstAddr, srcAddr, copySizeInBytes) 618 619 case wasm.OpcodeMiscMemoryCopy: 620 state.pc += 2 // +2 to skip two memory indexes which are fixed to zero. 621 if state.unreachable { 622 break 623 } 624 625 copySize := builder. 626 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 627 srcOffset := builder. 628 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 629 dstOffset := builder. 630 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 631 632 // Out of bounds check. 633 memLen := c.getMemoryLenValue(false) 634 c.boundsCheckInMemory(memLen, dstOffset, copySize) 635 c.boundsCheckInMemory(memLen, srcOffset, copySize) 636 637 memBase := c.getMemoryBaseValue(false) 638 dstAddr := builder.AllocateInstruction().AsIadd(memBase, dstOffset).Insert(builder).Return() 639 srcAddr := builder.AllocateInstruction().AsIadd(memBase, srcOffset).Insert(builder).Return() 640 641 c.callMemmove(dstAddr, srcAddr, copySize) 642 643 case wasm.OpcodeMiscTableFill: 644 tableIndex := c.readI32u() 645 if state.unreachable { 646 break 647 } 648 fillSize := state.pop() 649 value := state.pop() 650 offset := state.pop() 651 652 fillSizeExt := builder. 653 AllocateInstruction().AsUExtend(fillSize, 32, 64).Insert(builder).Return() 654 offsetExt := builder. 655 AllocateInstruction().AsUExtend(offset, 32, 64).Insert(builder).Return() 656 tableInstancePtr := c.boundsCheckInTable(tableIndex, offsetExt, fillSizeExt) 657 658 three := builder.AllocateInstruction().AsIconst64(3).Insert(builder).Return() 659 offsetInBytes := builder.AllocateInstruction().AsIshl(offsetExt, three).Insert(builder).Return() 660 fillSizeInBytes := builder.AllocateInstruction().AsIshl(fillSizeExt, three).Insert(builder).Return() 661 662 // Calculate the base address of the table. 663 tableBaseAddr := c.loadTableBaseAddr(tableInstancePtr) 664 addr := builder.AllocateInstruction().AsIadd(tableBaseAddr, offsetInBytes).Insert(builder).Return() 665 666 // Prepare the loop and following block. 667 beforeLoop := builder.AllocateBasicBlock() 668 loopBlk := builder.AllocateBasicBlock() 669 loopVar := loopBlk.AddParam(builder, ssa.TypeI64) 670 followingBlk := builder.AllocateBasicBlock() 671 672 // Uses the copy trick for faster filling buffer like memory.fill, but in this case we copy 8 bytes at a time. 673 // buf := memoryInst.Buffer[offset : offset+fillSize] 674 // buf[0:8] = value 675 // for i := 8; i < fillSize; i *= 2 { Begin with 8 bytes. 676 // copy(buf[i:], buf[:i]) 677 // } 678 679 // Insert the jump to the beforeLoop block; If the fillSize is zero, then jump to the following block to skip entire logics. 680 zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return() 681 ifFillSizeZero := builder.AllocateInstruction().AsIcmp(fillSizeExt, zero, ssa.IntegerCmpCondEqual). 682 Insert(builder).Return() 683 builder.AllocateInstruction().AsBrnz(ifFillSizeZero, nil, followingBlk).Insert(builder) 684 c.insertJumpToBlock(nil, beforeLoop) 685 686 // buf[0:8] = value 687 builder.SetCurrentBlock(beforeLoop) 688 builder.AllocateInstruction().AsStore(ssa.OpcodeStore, value, addr, 0).Insert(builder) 689 initValue := builder.AllocateInstruction().AsIconst64(8).Insert(builder).Return() 690 c.insertJumpToBlock([]ssa.Value{initValue}, loopBlk) // TODO: reuse the slice. 691 692 builder.SetCurrentBlock(loopBlk) 693 dstAddr := builder.AllocateInstruction().AsIadd(addr, loopVar).Insert(builder).Return() 694 695 // If loopVar*2 > fillSizeInBytes, then count must be fillSizeInBytes-loopVar. 696 var count ssa.Value 697 { 698 loopVarDoubled := builder.AllocateInstruction().AsIadd(loopVar, loopVar).Insert(builder).Return() 699 loopVarDoubledLargerThanFillSize := builder. 700 AllocateInstruction().AsIcmp(loopVarDoubled, fillSizeInBytes, ssa.IntegerCmpCondUnsignedGreaterThanOrEqual). 701 Insert(builder).Return() 702 diff := builder.AllocateInstruction().AsIsub(fillSizeInBytes, loopVar).Insert(builder).Return() 703 count = builder.AllocateInstruction().AsSelect(loopVarDoubledLargerThanFillSize, diff, loopVar).Insert(builder).Return() 704 } 705 706 c.callMemmove(dstAddr, addr, count) 707 708 shiftAmount := builder.AllocateInstruction().AsIconst64(1).Insert(builder).Return() 709 newLoopVar := builder.AllocateInstruction().AsIshl(loopVar, shiftAmount).Insert(builder).Return() 710 loopVarLessThanFillSize := builder.AllocateInstruction(). 711 AsIcmp(newLoopVar, fillSizeInBytes, ssa.IntegerCmpCondUnsignedLessThan).Insert(builder).Return() 712 713 builder.AllocateInstruction(). 714 AsBrnz(loopVarLessThanFillSize, []ssa.Value{newLoopVar}, loopBlk). // TODO: reuse the slice. 715 Insert(builder) 716 717 c.insertJumpToBlock(nil, followingBlk) 718 builder.SetCurrentBlock(followingBlk) 719 720 builder.Seal(beforeLoop) 721 builder.Seal(loopBlk) 722 builder.Seal(followingBlk) 723 724 case wasm.OpcodeMiscMemoryFill: 725 state.pc++ // Skip the memory index which is fixed to zero. 726 if state.unreachable { 727 break 728 } 729 730 fillSize := builder. 731 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 732 value := state.pop() 733 offset := builder. 734 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 735 736 // Out of bounds check. 737 c.boundsCheckInMemory(c.getMemoryLenValue(false), offset, fillSize) 738 739 // Calculate the base address: 740 addr := builder.AllocateInstruction().AsIadd(c.getMemoryBaseValue(false), offset).Insert(builder).Return() 741 742 // Uses the copy trick for faster filling buffer: https://gist.github.com/taylorza/df2f89d5f9ab3ffd06865062a4cf015d 743 // buf := memoryInst.Buffer[offset : offset+fillSize] 744 // buf[0] = value 745 // for i := 1; i < fillSize; i *= 2 { 746 // copy(buf[i:], buf[:i]) 747 // } 748 749 // Prepare the loop and following block. 750 beforeLoop := builder.AllocateBasicBlock() 751 loopBlk := builder.AllocateBasicBlock() 752 loopVar := loopBlk.AddParam(builder, ssa.TypeI64) 753 followingBlk := builder.AllocateBasicBlock() 754 755 // Insert the jump to the beforeLoop block; If the fillSize is zero, then jump to the following block to skip entire logics. 756 zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return() 757 ifFillSizeZero := builder.AllocateInstruction().AsIcmp(fillSize, zero, ssa.IntegerCmpCondEqual). 758 Insert(builder).Return() 759 builder.AllocateInstruction().AsBrnz(ifFillSizeZero, nil, followingBlk).Insert(builder) 760 c.insertJumpToBlock(nil, beforeLoop) 761 762 // buf[0] = value 763 builder.SetCurrentBlock(beforeLoop) 764 builder.AllocateInstruction().AsStore(ssa.OpcodeIstore8, value, addr, 0).Insert(builder) 765 initValue := builder.AllocateInstruction().AsIconst64(1).Insert(builder).Return() 766 c.insertJumpToBlock([]ssa.Value{initValue}, loopBlk) // TODO: reuse the slice. 767 768 builder.SetCurrentBlock(loopBlk) 769 dstAddr := builder.AllocateInstruction().AsIadd(addr, loopVar).Insert(builder).Return() 770 771 // If loopVar*2 > fillSizeExt, then count must be fillSizeExt-loopVar. 772 var count ssa.Value 773 { 774 loopVarDoubled := builder.AllocateInstruction().AsIadd(loopVar, loopVar).Insert(builder).Return() 775 loopVarDoubledLargerThanFillSize := builder. 776 AllocateInstruction().AsIcmp(loopVarDoubled, fillSize, ssa.IntegerCmpCondUnsignedGreaterThanOrEqual). 777 Insert(builder).Return() 778 diff := builder.AllocateInstruction().AsIsub(fillSize, loopVar).Insert(builder).Return() 779 count = builder.AllocateInstruction().AsSelect(loopVarDoubledLargerThanFillSize, diff, loopVar).Insert(builder).Return() 780 } 781 782 c.callMemmove(dstAddr, addr, count) 783 784 shiftAmount := builder.AllocateInstruction().AsIconst64(1).Insert(builder).Return() 785 newLoopVar := builder.AllocateInstruction().AsIshl(loopVar, shiftAmount).Insert(builder).Return() 786 loopVarLessThanFillSize := builder.AllocateInstruction(). 787 AsIcmp(newLoopVar, fillSize, ssa.IntegerCmpCondUnsignedLessThan).Insert(builder).Return() 788 789 builder.AllocateInstruction(). 790 AsBrnz(loopVarLessThanFillSize, []ssa.Value{newLoopVar}, loopBlk). // TODO: reuse the slice. 791 Insert(builder) 792 793 c.insertJumpToBlock(nil, followingBlk) 794 builder.SetCurrentBlock(followingBlk) 795 796 builder.Seal(beforeLoop) 797 builder.Seal(loopBlk) 798 builder.Seal(followingBlk) 799 800 case wasm.OpcodeMiscMemoryInit: 801 index := c.readI32u() 802 state.pc++ // Skip the memory index which is fixed to zero. 803 if state.unreachable { 804 break 805 } 806 807 copySize := builder. 808 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 809 offsetInDataInstance := builder. 810 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 811 offsetInMemory := builder. 812 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 813 814 dataInstPtr := c.dataOrElementInstanceAddr(index, c.offset.DataInstances1stElement) 815 816 // Bounds check. 817 c.boundsCheckInMemory(c.getMemoryLenValue(false), offsetInMemory, copySize) 818 c.boundsCheckInDataOrElementInstance(dataInstPtr, offsetInDataInstance, copySize, wazevoapi.ExitCodeMemoryOutOfBounds) 819 820 dataInstBaseAddr := builder.AllocateInstruction().AsLoad(dataInstPtr, 0, ssa.TypeI64).Insert(builder).Return() 821 srcAddr := builder.AllocateInstruction().AsIadd(dataInstBaseAddr, offsetInDataInstance).Insert(builder).Return() 822 823 memBase := c.getMemoryBaseValue(false) 824 dstAddr := builder.AllocateInstruction().AsIadd(memBase, offsetInMemory).Insert(builder).Return() 825 826 c.callMemmove(dstAddr, srcAddr, copySize) 827 828 case wasm.OpcodeMiscTableInit: 829 elemIndex := c.readI32u() 830 tableIndex := c.readI32u() 831 if state.unreachable { 832 break 833 } 834 835 copySize := builder. 836 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 837 offsetInElementInstance := builder. 838 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 839 offsetInTable := builder. 840 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 841 842 elemInstPtr := c.dataOrElementInstanceAddr(elemIndex, c.offset.ElementInstances1stElement) 843 844 // Bounds check. 845 tableInstancePtr := c.boundsCheckInTable(tableIndex, offsetInTable, copySize) 846 c.boundsCheckInDataOrElementInstance(elemInstPtr, offsetInElementInstance, copySize, wazevoapi.ExitCodeTableOutOfBounds) 847 848 three := builder.AllocateInstruction().AsIconst64(3).Insert(builder).Return() 849 // Calculates the destination address in the table. 850 tableOffsetInBytes := builder.AllocateInstruction().AsIshl(offsetInTable, three).Insert(builder).Return() 851 tableBaseAddr := c.loadTableBaseAddr(tableInstancePtr) 852 dstAddr := builder.AllocateInstruction().AsIadd(tableBaseAddr, tableOffsetInBytes).Insert(builder).Return() 853 854 // Calculates the source address in the element instance. 855 srcOffsetInBytes := builder.AllocateInstruction().AsIshl(offsetInElementInstance, three).Insert(builder).Return() 856 elemInstBaseAddr := builder.AllocateInstruction().AsLoad(elemInstPtr, 0, ssa.TypeI64).Insert(builder).Return() 857 srcAddr := builder.AllocateInstruction().AsIadd(elemInstBaseAddr, srcOffsetInBytes).Insert(builder).Return() 858 859 copySizeInBytes := builder.AllocateInstruction().AsIshl(copySize, three).Insert(builder).Return() 860 c.callMemmove(dstAddr, srcAddr, copySizeInBytes) 861 862 case wasm.OpcodeMiscElemDrop: 863 index := c.readI32u() 864 if state.unreachable { 865 break 866 } 867 868 c.dropDataOrElementInstance(index, c.offset.ElementInstances1stElement) 869 870 case wasm.OpcodeMiscDataDrop: 871 index := c.readI32u() 872 if state.unreachable { 873 break 874 } 875 c.dropDataOrElementInstance(index, c.offset.DataInstances1stElement) 876 877 default: 878 panic("Unknown MiscOp " + wasm.MiscInstructionName(miscOp)) 879 } 880 881 case wasm.OpcodeI32ReinterpretF32: 882 if state.unreachable { 883 break 884 } 885 reinterpret := builder.AllocateInstruction(). 886 AsBitcast(state.pop(), ssa.TypeI32). 887 Insert(builder).Return() 888 state.push(reinterpret) 889 890 case wasm.OpcodeI64ReinterpretF64: 891 if state.unreachable { 892 break 893 } 894 reinterpret := builder.AllocateInstruction(). 895 AsBitcast(state.pop(), ssa.TypeI64). 896 Insert(builder).Return() 897 state.push(reinterpret) 898 899 case wasm.OpcodeF32ReinterpretI32: 900 if state.unreachable { 901 break 902 } 903 reinterpret := builder.AllocateInstruction(). 904 AsBitcast(state.pop(), ssa.TypeF32). 905 Insert(builder).Return() 906 state.push(reinterpret) 907 908 case wasm.OpcodeF64ReinterpretI64: 909 if state.unreachable { 910 break 911 } 912 reinterpret := builder.AllocateInstruction(). 913 AsBitcast(state.pop(), ssa.TypeF64). 914 Insert(builder).Return() 915 state.push(reinterpret) 916 917 case wasm.OpcodeI32DivS, wasm.OpcodeI64DivS: 918 if state.unreachable { 919 break 920 } 921 y, x := state.pop(), state.pop() 922 result := builder.AllocateInstruction().AsSDiv(x, y, c.execCtxPtrValue).Insert(builder).Return() 923 state.push(result) 924 925 case wasm.OpcodeI32DivU, wasm.OpcodeI64DivU: 926 if state.unreachable { 927 break 928 } 929 y, x := state.pop(), state.pop() 930 result := builder.AllocateInstruction().AsUDiv(x, y, c.execCtxPtrValue).Insert(builder).Return() 931 state.push(result) 932 933 case wasm.OpcodeI32RemS, wasm.OpcodeI64RemS: 934 if state.unreachable { 935 break 936 } 937 y, x := state.pop(), state.pop() 938 result := builder.AllocateInstruction().AsSRem(x, y, c.execCtxPtrValue).Insert(builder).Return() 939 state.push(result) 940 941 case wasm.OpcodeI32RemU, wasm.OpcodeI64RemU: 942 if state.unreachable { 943 break 944 } 945 y, x := state.pop(), state.pop() 946 result := builder.AllocateInstruction().AsURem(x, y, c.execCtxPtrValue).Insert(builder).Return() 947 state.push(result) 948 949 case wasm.OpcodeI32And, wasm.OpcodeI64And: 950 if state.unreachable { 951 break 952 } 953 y, x := state.pop(), state.pop() 954 and := builder.AllocateInstruction() 955 and.AsBand(x, y) 956 builder.InsertInstruction(and) 957 value := and.Return() 958 state.push(value) 959 case wasm.OpcodeI32Or, wasm.OpcodeI64Or: 960 if state.unreachable { 961 break 962 } 963 y, x := state.pop(), state.pop() 964 or := builder.AllocateInstruction() 965 or.AsBor(x, y) 966 builder.InsertInstruction(or) 967 value := or.Return() 968 state.push(value) 969 case wasm.OpcodeI32Xor, wasm.OpcodeI64Xor: 970 if state.unreachable { 971 break 972 } 973 y, x := state.pop(), state.pop() 974 xor := builder.AllocateInstruction() 975 xor.AsBxor(x, y) 976 builder.InsertInstruction(xor) 977 value := xor.Return() 978 state.push(value) 979 case wasm.OpcodeI32Shl, wasm.OpcodeI64Shl: 980 if state.unreachable { 981 break 982 } 983 y, x := state.pop(), state.pop() 984 ishl := builder.AllocateInstruction() 985 ishl.AsIshl(x, y) 986 builder.InsertInstruction(ishl) 987 value := ishl.Return() 988 state.push(value) 989 case wasm.OpcodeI32ShrU, wasm.OpcodeI64ShrU: 990 if state.unreachable { 991 break 992 } 993 y, x := state.pop(), state.pop() 994 ishl := builder.AllocateInstruction() 995 ishl.AsUshr(x, y) 996 builder.InsertInstruction(ishl) 997 value := ishl.Return() 998 state.push(value) 999 case wasm.OpcodeI32ShrS, wasm.OpcodeI64ShrS: 1000 if state.unreachable { 1001 break 1002 } 1003 y, x := state.pop(), state.pop() 1004 ishl := builder.AllocateInstruction() 1005 ishl.AsSshr(x, y) 1006 builder.InsertInstruction(ishl) 1007 value := ishl.Return() 1008 state.push(value) 1009 case wasm.OpcodeI32Rotl, wasm.OpcodeI64Rotl: 1010 if state.unreachable { 1011 break 1012 } 1013 y, x := state.pop(), state.pop() 1014 rotl := builder.AllocateInstruction() 1015 rotl.AsRotl(x, y) 1016 builder.InsertInstruction(rotl) 1017 value := rotl.Return() 1018 state.push(value) 1019 case wasm.OpcodeI32Rotr, wasm.OpcodeI64Rotr: 1020 if state.unreachable { 1021 break 1022 } 1023 y, x := state.pop(), state.pop() 1024 rotr := builder.AllocateInstruction() 1025 rotr.AsRotr(x, y) 1026 builder.InsertInstruction(rotr) 1027 value := rotr.Return() 1028 state.push(value) 1029 case wasm.OpcodeI32Clz, wasm.OpcodeI64Clz: 1030 if state.unreachable { 1031 break 1032 } 1033 x := state.pop() 1034 clz := builder.AllocateInstruction() 1035 clz.AsClz(x) 1036 builder.InsertInstruction(clz) 1037 value := clz.Return() 1038 state.push(value) 1039 case wasm.OpcodeI32Ctz, wasm.OpcodeI64Ctz: 1040 if state.unreachable { 1041 break 1042 } 1043 x := state.pop() 1044 ctz := builder.AllocateInstruction() 1045 ctz.AsCtz(x) 1046 builder.InsertInstruction(ctz) 1047 value := ctz.Return() 1048 state.push(value) 1049 case wasm.OpcodeI32Popcnt, wasm.OpcodeI64Popcnt: 1050 if state.unreachable { 1051 break 1052 } 1053 x := state.pop() 1054 popcnt := builder.AllocateInstruction() 1055 popcnt.AsPopcnt(x) 1056 builder.InsertInstruction(popcnt) 1057 value := popcnt.Return() 1058 state.push(value) 1059 1060 case wasm.OpcodeI32WrapI64: 1061 if state.unreachable { 1062 break 1063 } 1064 x := state.pop() 1065 wrap := builder.AllocateInstruction().AsIreduce(x, ssa.TypeI32).Insert(builder).Return() 1066 state.push(wrap) 1067 case wasm.OpcodeGlobalGet: 1068 index := c.readI32u() 1069 if state.unreachable { 1070 break 1071 } 1072 v := c.getWasmGlobalValue(index, false) 1073 state.push(v) 1074 case wasm.OpcodeGlobalSet: 1075 index := c.readI32u() 1076 if state.unreachable { 1077 break 1078 } 1079 v := state.pop() 1080 c.setWasmGlobalValue(index, v) 1081 case wasm.OpcodeLocalGet: 1082 index := c.readI32u() 1083 if state.unreachable { 1084 break 1085 } 1086 variable := c.localVariable(index) 1087 v := builder.MustFindValue(variable) 1088 state.push(v) 1089 case wasm.OpcodeLocalSet: 1090 index := c.readI32u() 1091 if state.unreachable { 1092 break 1093 } 1094 variable := c.localVariable(index) 1095 newValue := state.pop() 1096 builder.DefineVariableInCurrentBB(variable, newValue) 1097 1098 case wasm.OpcodeLocalTee: 1099 index := c.readI32u() 1100 if state.unreachable { 1101 break 1102 } 1103 variable := c.localVariable(index) 1104 newValue := state.peek() 1105 builder.DefineVariableInCurrentBB(variable, newValue) 1106 1107 case wasm.OpcodeSelect, wasm.OpcodeTypedSelect: 1108 if op == wasm.OpcodeTypedSelect { 1109 state.pc += 2 // ignores the type which is only needed during validation. 1110 } 1111 1112 if state.unreachable { 1113 break 1114 } 1115 1116 cond := state.pop() 1117 v2 := state.pop() 1118 v1 := state.pop() 1119 1120 sl := builder.AllocateInstruction(). 1121 AsSelect(cond, v1, v2). 1122 Insert(builder). 1123 Return() 1124 state.push(sl) 1125 1126 case wasm.OpcodeMemorySize: 1127 state.pc++ // skips the memory index. 1128 if state.unreachable { 1129 break 1130 } 1131 1132 var memSizeInBytes ssa.Value 1133 if c.offset.LocalMemoryBegin < 0 { 1134 memInstPtr := builder.AllocateInstruction(). 1135 AsLoad(c.moduleCtxPtrValue, c.offset.ImportedMemoryBegin.U32(), ssa.TypeI64). 1136 Insert(builder). 1137 Return() 1138 1139 memSizeInBytes = builder.AllocateInstruction(). 1140 AsLoad(memInstPtr, memoryInstanceBufSizeOffset, ssa.TypeI32). 1141 Insert(builder). 1142 Return() 1143 } else { 1144 memSizeInBytes = builder.AllocateInstruction(). 1145 AsLoad(c.moduleCtxPtrValue, c.offset.LocalMemoryLen().U32(), ssa.TypeI32). 1146 Insert(builder). 1147 Return() 1148 } 1149 1150 amount := builder.AllocateInstruction() 1151 amount.AsIconst32(uint32(wasm.MemoryPageSizeInBits)) 1152 builder.InsertInstruction(amount) 1153 memSize := builder.AllocateInstruction(). 1154 AsUshr(memSizeInBytes, amount.Return()). 1155 Insert(builder). 1156 Return() 1157 state.push(memSize) 1158 1159 case wasm.OpcodeMemoryGrow: 1160 state.pc++ // skips the memory index. 1161 if state.unreachable { 1162 break 1163 } 1164 1165 c.storeCallerModuleContext() 1166 1167 pages := state.pop() 1168 memoryGrowPtr := builder.AllocateInstruction(). 1169 AsLoad(c.execCtxPtrValue, 1170 wazevoapi.ExecutionContextOffsetMemoryGrowTrampolineAddress.U32(), 1171 ssa.TypeI64, 1172 ).Insert(builder).Return() 1173 1174 // TODO: reuse the slice. 1175 args := []ssa.Value{c.execCtxPtrValue, pages} 1176 1177 callGrowRet := builder. 1178 AllocateInstruction(). 1179 AsCallIndirect(memoryGrowPtr, &c.memoryGrowSig, args). 1180 Insert(builder).Return() 1181 state.push(callGrowRet) 1182 1183 // After the memory grow, reload the cached memory base and len. 1184 c.reloadMemoryBaseLen() 1185 1186 case wasm.OpcodeI32Store, 1187 wasm.OpcodeI64Store, 1188 wasm.OpcodeF32Store, 1189 wasm.OpcodeF64Store, 1190 wasm.OpcodeI32Store8, 1191 wasm.OpcodeI32Store16, 1192 wasm.OpcodeI64Store8, 1193 wasm.OpcodeI64Store16, 1194 wasm.OpcodeI64Store32: 1195 1196 _, offset := c.readMemArg() 1197 if state.unreachable { 1198 break 1199 } 1200 var opSize uint64 1201 var opcode ssa.Opcode 1202 switch op { 1203 case wasm.OpcodeI32Store, wasm.OpcodeF32Store: 1204 opcode = ssa.OpcodeStore 1205 opSize = 4 1206 case wasm.OpcodeI64Store, wasm.OpcodeF64Store: 1207 opcode = ssa.OpcodeStore 1208 opSize = 8 1209 case wasm.OpcodeI32Store8, wasm.OpcodeI64Store8: 1210 opcode = ssa.OpcodeIstore8 1211 opSize = 1 1212 case wasm.OpcodeI32Store16, wasm.OpcodeI64Store16: 1213 opcode = ssa.OpcodeIstore16 1214 opSize = 2 1215 case wasm.OpcodeI64Store32: 1216 opcode = ssa.OpcodeIstore32 1217 opSize = 4 1218 default: 1219 panic("BUG") 1220 } 1221 1222 value := state.pop() 1223 baseAddr := state.pop() 1224 addr := c.memOpSetup(baseAddr, uint64(offset), opSize) 1225 builder.AllocateInstruction(). 1226 AsStore(opcode, value, addr, offset). 1227 Insert(builder) 1228 1229 case wasm.OpcodeI32Load, 1230 wasm.OpcodeI64Load, 1231 wasm.OpcodeF32Load, 1232 wasm.OpcodeF64Load, 1233 wasm.OpcodeI32Load8S, 1234 wasm.OpcodeI32Load8U, 1235 wasm.OpcodeI32Load16S, 1236 wasm.OpcodeI32Load16U, 1237 wasm.OpcodeI64Load8S, 1238 wasm.OpcodeI64Load8U, 1239 wasm.OpcodeI64Load16S, 1240 wasm.OpcodeI64Load16U, 1241 wasm.OpcodeI64Load32S, 1242 wasm.OpcodeI64Load32U: 1243 _, offset := c.readMemArg() 1244 if state.unreachable { 1245 break 1246 } 1247 1248 var opSize uint64 1249 switch op { 1250 case wasm.OpcodeI32Load, wasm.OpcodeF32Load: 1251 opSize = 4 1252 case wasm.OpcodeI64Load, wasm.OpcodeF64Load: 1253 opSize = 8 1254 case wasm.OpcodeI32Load8S, wasm.OpcodeI32Load8U: 1255 opSize = 1 1256 case wasm.OpcodeI32Load16S, wasm.OpcodeI32Load16U: 1257 opSize = 2 1258 case wasm.OpcodeI64Load8S, wasm.OpcodeI64Load8U: 1259 opSize = 1 1260 case wasm.OpcodeI64Load16S, wasm.OpcodeI64Load16U: 1261 opSize = 2 1262 case wasm.OpcodeI64Load32S, wasm.OpcodeI64Load32U: 1263 opSize = 4 1264 default: 1265 panic("BUG") 1266 } 1267 1268 baseAddr := state.pop() 1269 addr := c.memOpSetup(baseAddr, uint64(offset), opSize) 1270 load := builder.AllocateInstruction() 1271 switch op { 1272 case wasm.OpcodeI32Load: 1273 load.AsLoad(addr, offset, ssa.TypeI32) 1274 case wasm.OpcodeI64Load: 1275 load.AsLoad(addr, offset, ssa.TypeI64) 1276 case wasm.OpcodeF32Load: 1277 load.AsLoad(addr, offset, ssa.TypeF32) 1278 case wasm.OpcodeF64Load: 1279 load.AsLoad(addr, offset, ssa.TypeF64) 1280 case wasm.OpcodeI32Load8S: 1281 load.AsExtLoad(ssa.OpcodeSload8, addr, offset, false) 1282 case wasm.OpcodeI32Load8U: 1283 load.AsExtLoad(ssa.OpcodeUload8, addr, offset, false) 1284 case wasm.OpcodeI32Load16S: 1285 load.AsExtLoad(ssa.OpcodeSload16, addr, offset, false) 1286 case wasm.OpcodeI32Load16U: 1287 load.AsExtLoad(ssa.OpcodeUload16, addr, offset, false) 1288 case wasm.OpcodeI64Load8S: 1289 load.AsExtLoad(ssa.OpcodeSload8, addr, offset, true) 1290 case wasm.OpcodeI64Load8U: 1291 load.AsExtLoad(ssa.OpcodeUload8, addr, offset, true) 1292 case wasm.OpcodeI64Load16S: 1293 load.AsExtLoad(ssa.OpcodeSload16, addr, offset, true) 1294 case wasm.OpcodeI64Load16U: 1295 load.AsExtLoad(ssa.OpcodeUload16, addr, offset, true) 1296 case wasm.OpcodeI64Load32S: 1297 load.AsExtLoad(ssa.OpcodeSload32, addr, offset, true) 1298 case wasm.OpcodeI64Load32U: 1299 load.AsExtLoad(ssa.OpcodeUload32, addr, offset, true) 1300 default: 1301 panic("BUG") 1302 } 1303 builder.InsertInstruction(load) 1304 state.push(load.Return()) 1305 case wasm.OpcodeBlock: 1306 // Note: we do not need to create a BB for this as that would always have only one predecessor 1307 // which is the current BB, and therefore it's always ok to merge them in any way. 1308 1309 bt := c.readBlockType() 1310 1311 if state.unreachable { 1312 state.unreachableDepth++ 1313 break 1314 } 1315 1316 followingBlk := builder.AllocateBasicBlock() 1317 c.addBlockParamsFromWasmTypes(bt.Results, followingBlk) 1318 1319 state.ctrlPush(controlFrame{ 1320 kind: controlFrameKindBlock, 1321 originalStackLenWithoutParam: len(state.values) - len(bt.Params), 1322 followingBlock: followingBlk, 1323 blockType: bt, 1324 }) 1325 case wasm.OpcodeLoop: 1326 bt := c.readBlockType() 1327 1328 if state.unreachable { 1329 state.unreachableDepth++ 1330 break 1331 } 1332 1333 loopHeader, afterLoopBlock := builder.AllocateBasicBlock(), builder.AllocateBasicBlock() 1334 c.addBlockParamsFromWasmTypes(bt.Params, loopHeader) 1335 c.addBlockParamsFromWasmTypes(bt.Results, afterLoopBlock) 1336 1337 originalLen := len(state.values) - len(bt.Params) 1338 state.ctrlPush(controlFrame{ 1339 originalStackLenWithoutParam: originalLen, 1340 kind: controlFrameKindLoop, 1341 blk: loopHeader, 1342 followingBlock: afterLoopBlock, 1343 blockType: bt, 1344 }) 1345 1346 var args []ssa.Value 1347 if len(bt.Params) > 0 { 1348 args = cloneValuesList(state.values[originalLen:]) 1349 } 1350 1351 // Insert the jump to the header of loop. 1352 br := builder.AllocateInstruction() 1353 br.AsJump(args, loopHeader) 1354 builder.InsertInstruction(br) 1355 1356 c.switchTo(originalLen, loopHeader) 1357 1358 if c.ensureTermination { 1359 checkModuleExitCodePtr := builder.AllocateInstruction(). 1360 AsLoad(c.execCtxPtrValue, 1361 wazevoapi.ExecutionContextOffsetCheckModuleExitCodeTrampolineAddress.U32(), 1362 ssa.TypeI64, 1363 ).Insert(builder).Return() 1364 1365 c.checkModuleExitCodeArg[0] = c.execCtxPtrValue 1366 1367 builder.AllocateInstruction(). 1368 AsCallIndirect(checkModuleExitCodePtr, &c.checkModuleExitCodeSig, c.checkModuleExitCodeArg[:]). 1369 Insert(builder) 1370 } 1371 case wasm.OpcodeIf: 1372 bt := c.readBlockType() 1373 1374 if state.unreachable { 1375 state.unreachableDepth++ 1376 break 1377 } 1378 1379 v := state.pop() 1380 thenBlk, elseBlk, followingBlk := builder.AllocateBasicBlock(), builder.AllocateBasicBlock(), builder.AllocateBasicBlock() 1381 1382 // We do not make the Wasm-level block parameters as SSA-level block params for if-else blocks 1383 // since they won't be PHI and the definition is unique. 1384 1385 // On the other hand, the following block after if-else-end will likely have 1386 // multiple definitions (one in Then and another in Else blocks). 1387 c.addBlockParamsFromWasmTypes(bt.Results, followingBlk) 1388 1389 var args []ssa.Value 1390 if len(bt.Params) > 0 { 1391 args = cloneValuesList(state.values[len(state.values)-len(bt.Params):]) 1392 } 1393 1394 // Insert the conditional jump to the Else block. 1395 brz := builder.AllocateInstruction() 1396 brz.AsBrz(v, nil, elseBlk) 1397 builder.InsertInstruction(brz) 1398 1399 // Then, insert the jump to the Then block. 1400 br := builder.AllocateInstruction() 1401 br.AsJump(nil, thenBlk) 1402 builder.InsertInstruction(br) 1403 1404 state.ctrlPush(controlFrame{ 1405 kind: controlFrameKindIfWithoutElse, 1406 originalStackLenWithoutParam: len(state.values) - len(bt.Params), 1407 blk: elseBlk, 1408 followingBlock: followingBlk, 1409 blockType: bt, 1410 clonedArgs: args, 1411 }) 1412 1413 builder.SetCurrentBlock(thenBlk) 1414 1415 // Then and Else (if exists) have only one predecessor. 1416 builder.Seal(thenBlk) 1417 builder.Seal(elseBlk) 1418 case wasm.OpcodeElse: 1419 c.clearSafeBounds() // Reset the safe bounds since we are entering the Else block. 1420 1421 ifctrl := state.ctrlPeekAt(0) 1422 if unreachable := state.unreachable; unreachable && state.unreachableDepth > 0 { 1423 // If it is currently in unreachable and is a nested if, 1424 // we just remove the entire else block. 1425 break 1426 } 1427 1428 ifctrl.kind = controlFrameKindIfWithElse 1429 if !state.unreachable { 1430 // If this Then block is currently reachable, we have to insert the branching to the following BB. 1431 followingBlk := ifctrl.followingBlock // == the BB after if-then-else. 1432 args := c.loweringState.nPeekDup(len(ifctrl.blockType.Results)) 1433 c.insertJumpToBlock(args, followingBlk) 1434 } else { 1435 state.unreachable = false 1436 } 1437 1438 // Reset the stack so that we can correctly handle the else block. 1439 state.values = state.values[:ifctrl.originalStackLenWithoutParam] 1440 elseBlk := ifctrl.blk 1441 for _, arg := range ifctrl.clonedArgs { 1442 state.push(arg) 1443 } 1444 1445 builder.SetCurrentBlock(elseBlk) 1446 1447 case wasm.OpcodeEnd: 1448 c.clearSafeBounds() // Reset the safe bounds since we are exiting the block. 1449 1450 if state.unreachableDepth > 0 { 1451 state.unreachableDepth-- 1452 break 1453 } 1454 1455 ctrl := state.ctrlPop() 1456 followingBlk := ctrl.followingBlock 1457 1458 unreachable := state.unreachable 1459 if !unreachable { 1460 // Top n-th args will be used as a result of the current control frame. 1461 args := c.loweringState.nPeekDup(len(ctrl.blockType.Results)) 1462 1463 // Insert the unconditional branch to the target. 1464 c.insertJumpToBlock(args, followingBlk) 1465 } else { // recover from the unreachable state. 1466 state.unreachable = false 1467 } 1468 1469 switch ctrl.kind { 1470 case controlFrameKindFunction: 1471 break // This is the very end of function. 1472 case controlFrameKindLoop: 1473 // Loop header block can be reached from any br/br_table contained in the loop, 1474 // so now that we've reached End of it, we can seal it. 1475 builder.Seal(ctrl.blk) 1476 case controlFrameKindIfWithoutElse: 1477 // If this is the end of Then block, we have to emit the empty Else block. 1478 elseBlk := ctrl.blk 1479 builder.SetCurrentBlock(elseBlk) 1480 c.insertJumpToBlock(ctrl.clonedArgs, followingBlk) 1481 } 1482 1483 builder.Seal(ctrl.followingBlock) 1484 1485 // Ready to start translating the following block. 1486 c.switchTo(ctrl.originalStackLenWithoutParam, followingBlk) 1487 1488 case wasm.OpcodeBr: 1489 labelIndex := c.readI32u() 1490 if state.unreachable { 1491 break 1492 } 1493 1494 targetBlk, argNum := state.brTargetArgNumFor(labelIndex) 1495 args := c.loweringState.nPeekDup(argNum) 1496 c.insertJumpToBlock(args, targetBlk) 1497 1498 state.unreachable = true 1499 1500 case wasm.OpcodeBrIf: 1501 labelIndex := c.readI32u() 1502 if state.unreachable { 1503 break 1504 } 1505 1506 v := state.pop() 1507 1508 targetBlk, argNum := state.brTargetArgNumFor(labelIndex) 1509 args := c.loweringState.nPeekDup(argNum) 1510 1511 // Insert the conditional jump to the target block. 1512 brnz := builder.AllocateInstruction() 1513 brnz.AsBrnz(v, args, targetBlk) 1514 builder.InsertInstruction(brnz) 1515 1516 // Insert the unconditional jump to the Else block which corresponds to after br_if. 1517 elseBlk := builder.AllocateBasicBlock() 1518 c.insertJumpToBlock(nil, elseBlk) 1519 1520 // Now start translating the instructions after br_if. 1521 builder.Seal(elseBlk) // Else of br_if has the current block as the only one successor. 1522 builder.SetCurrentBlock(elseBlk) 1523 1524 case wasm.OpcodeBrTable: 1525 labels := state.tmpForBrTable 1526 labels = labels[:0] 1527 labelCount := c.readI32u() 1528 for i := 0; i < int(labelCount); i++ { 1529 labels = append(labels, c.readI32u()) 1530 } 1531 labels = append(labels, c.readI32u()) // default label. 1532 if state.unreachable { 1533 break 1534 } 1535 1536 index := state.pop() 1537 if labelCount == 0 { // If this br_table is empty, we can just emit the unconditional jump. 1538 targetBlk, argNum := state.brTargetArgNumFor(labels[0]) 1539 args := c.loweringState.nPeekDup(argNum) 1540 c.insertJumpToBlock(args, targetBlk) 1541 } else { 1542 c.lowerBrTable(labels, index) 1543 } 1544 state.unreachable = true 1545 1546 case wasm.OpcodeNop: 1547 case wasm.OpcodeReturn: 1548 if state.unreachable { 1549 break 1550 } 1551 if c.needListener { 1552 c.callListenerAfter() 1553 } 1554 1555 results := c.loweringState.nPeekDup(c.results()) 1556 instr := builder.AllocateInstruction() 1557 1558 instr.AsReturn(results) 1559 builder.InsertInstruction(instr) 1560 state.unreachable = true 1561 1562 case wasm.OpcodeUnreachable: 1563 if state.unreachable { 1564 break 1565 } 1566 exit := builder.AllocateInstruction() 1567 exit.AsExitWithCode(c.execCtxPtrValue, wazevoapi.ExitCodeUnreachable) 1568 builder.InsertInstruction(exit) 1569 state.unreachable = true 1570 1571 case wasm.OpcodeCallIndirect: 1572 typeIndex := c.readI32u() 1573 tableIndex := c.readI32u() 1574 if state.unreachable { 1575 break 1576 } 1577 c.lowerCallIndirect(typeIndex, tableIndex) 1578 1579 case wasm.OpcodeCall: 1580 fnIndex := c.readI32u() 1581 if state.unreachable { 1582 break 1583 } 1584 1585 // Before transfer the control to the callee, we have to store the current module's moduleContextPtr 1586 // into execContext.callerModuleContextPtr in case when the callee is a Go function. 1587 // 1588 // TODO: maybe this can be optimized out if this is in-module function calls. Investigate later. 1589 c.storeCallerModuleContext() 1590 1591 var typIndex wasm.Index 1592 if fnIndex < c.m.ImportFunctionCount { 1593 var fi int 1594 for i := range c.m.ImportSection { 1595 imp := &c.m.ImportSection[i] 1596 if imp.Type == wasm.ExternTypeFunc { 1597 if fi == int(fnIndex) { 1598 typIndex = imp.DescFunc 1599 break 1600 } 1601 fi++ 1602 } 1603 } 1604 } else { 1605 typIndex = c.m.FunctionSection[fnIndex-c.m.ImportFunctionCount] 1606 } 1607 typ := &c.m.TypeSection[typIndex] 1608 1609 // TODO: reuse slice? 1610 argN := len(typ.Params) 1611 args := make([]ssa.Value, argN+2) 1612 args[0] = c.execCtxPtrValue 1613 state.nPopInto(argN, args[2:]) 1614 1615 sig := c.signatures[typ] 1616 call := builder.AllocateInstruction() 1617 if fnIndex >= c.m.ImportFunctionCount { 1618 args[1] = c.moduleCtxPtrValue // This case the callee module is itself. 1619 call.AsCall(FunctionIndexToFuncRef(fnIndex), sig, args) 1620 builder.InsertInstruction(call) 1621 } else { 1622 // This case we have to read the address of the imported function from the module context. 1623 moduleCtx := c.moduleCtxPtrValue 1624 loadFuncPtr, loadModuleCtxPtr := builder.AllocateInstruction(), builder.AllocateInstruction() 1625 funcPtrOffset, moduleCtxPtrOffset, _ := c.offset.ImportedFunctionOffset(fnIndex) 1626 loadFuncPtr.AsLoad(moduleCtx, funcPtrOffset.U32(), ssa.TypeI64) 1627 loadModuleCtxPtr.AsLoad(moduleCtx, moduleCtxPtrOffset.U32(), ssa.TypeI64) 1628 builder.InsertInstruction(loadFuncPtr) 1629 builder.InsertInstruction(loadModuleCtxPtr) 1630 1631 args[1] = loadModuleCtxPtr.Return() // This case the callee module is itself. 1632 1633 call.AsCallIndirect(loadFuncPtr.Return(), sig, args) 1634 builder.InsertInstruction(call) 1635 } 1636 1637 first, rest := call.Returns() 1638 if first.Valid() { 1639 state.push(first) 1640 } 1641 for _, v := range rest { 1642 state.push(v) 1643 } 1644 1645 c.reloadAfterCall() 1646 1647 case wasm.OpcodeDrop: 1648 if state.unreachable { 1649 break 1650 } 1651 _ = state.pop() 1652 case wasm.OpcodeF64ConvertI32S, wasm.OpcodeF64ConvertI64S, wasm.OpcodeF64ConvertI32U, wasm.OpcodeF64ConvertI64U: 1653 if state.unreachable { 1654 break 1655 } 1656 result := builder.AllocateInstruction().AsFcvtFromInt( 1657 state.pop(), 1658 op == wasm.OpcodeF64ConvertI32S || op == wasm.OpcodeF64ConvertI64S, 1659 true, 1660 ).Insert(builder).Return() 1661 state.push(result) 1662 case wasm.OpcodeF32ConvertI32S, wasm.OpcodeF32ConvertI64S, wasm.OpcodeF32ConvertI32U, wasm.OpcodeF32ConvertI64U: 1663 if state.unreachable { 1664 break 1665 } 1666 result := builder.AllocateInstruction().AsFcvtFromInt( 1667 state.pop(), 1668 op == wasm.OpcodeF32ConvertI32S || op == wasm.OpcodeF32ConvertI64S, 1669 false, 1670 ).Insert(builder).Return() 1671 state.push(result) 1672 case wasm.OpcodeF32DemoteF64: 1673 if state.unreachable { 1674 break 1675 } 1676 cvt := builder.AllocateInstruction() 1677 cvt.AsFdemote(state.pop()) 1678 builder.InsertInstruction(cvt) 1679 state.push(cvt.Return()) 1680 case wasm.OpcodeF64PromoteF32: 1681 if state.unreachable { 1682 break 1683 } 1684 cvt := builder.AllocateInstruction() 1685 cvt.AsFpromote(state.pop()) 1686 builder.InsertInstruction(cvt) 1687 state.push(cvt.Return()) 1688 1689 case wasm.OpcodeVecPrefix: 1690 state.pc++ 1691 vecOp := c.wasmFunctionBody[state.pc] 1692 switch vecOp { 1693 case wasm.OpcodeVecV128Const: 1694 state.pc++ 1695 lo := binary.LittleEndian.Uint64(c.wasmFunctionBody[state.pc:]) 1696 state.pc += 8 1697 hi := binary.LittleEndian.Uint64(c.wasmFunctionBody[state.pc:]) 1698 state.pc += 7 1699 if state.unreachable { 1700 break 1701 } 1702 ret := builder.AllocateInstruction().AsVconst(lo, hi).Insert(builder).Return() 1703 state.push(ret) 1704 case wasm.OpcodeVecV128Load: 1705 _, offset := c.readMemArg() 1706 if state.unreachable { 1707 break 1708 } 1709 baseAddr := state.pop() 1710 addr := c.memOpSetup(baseAddr, uint64(offset), 16) 1711 load := builder.AllocateInstruction() 1712 load.AsLoad(addr, offset, ssa.TypeV128) 1713 builder.InsertInstruction(load) 1714 state.push(load.Return()) 1715 case wasm.OpcodeVecV128Load8Lane, wasm.OpcodeVecV128Load16Lane, wasm.OpcodeVecV128Load32Lane: 1716 _, offset := c.readMemArg() 1717 state.pc++ 1718 if state.unreachable { 1719 break 1720 } 1721 var lane ssa.VecLane 1722 var loadOp ssa.Opcode 1723 var opSize uint64 1724 switch vecOp { 1725 case wasm.OpcodeVecV128Load8Lane: 1726 loadOp, lane, opSize = ssa.OpcodeUload8, ssa.VecLaneI8x16, 1 1727 case wasm.OpcodeVecV128Load16Lane: 1728 loadOp, lane, opSize = ssa.OpcodeUload16, ssa.VecLaneI16x8, 2 1729 case wasm.OpcodeVecV128Load32Lane: 1730 loadOp, lane, opSize = ssa.OpcodeUload32, ssa.VecLaneI32x4, 4 1731 } 1732 laneIndex := c.wasmFunctionBody[state.pc] 1733 vector := state.pop() 1734 baseAddr := state.pop() 1735 addr := c.memOpSetup(baseAddr, uint64(offset), opSize) 1736 load := builder.AllocateInstruction(). 1737 AsExtLoad(loadOp, addr, offset, false). 1738 Insert(builder).Return() 1739 ret := builder.AllocateInstruction(). 1740 AsInsertlane(vector, load, laneIndex, lane). 1741 Insert(builder).Return() 1742 state.push(ret) 1743 case wasm.OpcodeVecV128Load64Lane: 1744 _, offset := c.readMemArg() 1745 state.pc++ 1746 if state.unreachable { 1747 break 1748 } 1749 laneIndex := c.wasmFunctionBody[state.pc] 1750 vector := state.pop() 1751 baseAddr := state.pop() 1752 addr := c.memOpSetup(baseAddr, uint64(offset), 8) 1753 load := builder.AllocateInstruction(). 1754 AsLoad(addr, offset, ssa.TypeI64). 1755 Insert(builder).Return() 1756 ret := builder.AllocateInstruction(). 1757 AsInsertlane(vector, load, laneIndex, ssa.VecLaneI64x2). 1758 Insert(builder).Return() 1759 state.push(ret) 1760 1761 case wasm.OpcodeVecV128Load32zero, wasm.OpcodeVecV128Load64zero: 1762 _, offset := c.readMemArg() 1763 if state.unreachable { 1764 break 1765 } 1766 1767 var scalarType ssa.Type 1768 switch vecOp { 1769 case wasm.OpcodeVecV128Load32zero: 1770 scalarType = ssa.TypeF32 1771 case wasm.OpcodeVecV128Load64zero: 1772 scalarType = ssa.TypeF64 1773 } 1774 1775 baseAddr := state.pop() 1776 addr := c.memOpSetup(baseAddr, uint64(offset), uint64(scalarType.Size())) 1777 1778 ret := builder.AllocateInstruction(). 1779 AsVZeroExtLoad(addr, offset, scalarType). 1780 Insert(builder).Return() 1781 state.push(ret) 1782 1783 case wasm.OpcodeVecV128Load8x8u, wasm.OpcodeVecV128Load8x8s, 1784 wasm.OpcodeVecV128Load16x4u, wasm.OpcodeVecV128Load16x4s, 1785 wasm.OpcodeVecV128Load32x2u, wasm.OpcodeVecV128Load32x2s: 1786 _, offset := c.readMemArg() 1787 if state.unreachable { 1788 break 1789 } 1790 var lane ssa.VecLane 1791 var signed bool 1792 switch vecOp { 1793 case wasm.OpcodeVecV128Load8x8s: 1794 signed = true 1795 fallthrough 1796 case wasm.OpcodeVecV128Load8x8u: 1797 lane = ssa.VecLaneI8x16 1798 case wasm.OpcodeVecV128Load16x4s: 1799 signed = true 1800 fallthrough 1801 case wasm.OpcodeVecV128Load16x4u: 1802 lane = ssa.VecLaneI16x8 1803 case wasm.OpcodeVecV128Load32x2s: 1804 signed = true 1805 fallthrough 1806 case wasm.OpcodeVecV128Load32x2u: 1807 lane = ssa.VecLaneI32x4 1808 } 1809 baseAddr := state.pop() 1810 addr := c.memOpSetup(baseAddr, uint64(offset), 8) 1811 load := builder.AllocateInstruction(). 1812 AsLoad(addr, offset, ssa.TypeV128). 1813 Insert(builder).Return() 1814 ret := builder.AllocateInstruction(). 1815 AsWiden(load, lane, signed, true). 1816 Insert(builder).Return() 1817 state.push(ret) 1818 case wasm.OpcodeVecV128Load8Splat, wasm.OpcodeVecV128Load16Splat, 1819 wasm.OpcodeVecV128Load32Splat, wasm.OpcodeVecV128Load64Splat: 1820 _, offset := c.readMemArg() 1821 if state.unreachable { 1822 break 1823 } 1824 var lane ssa.VecLane 1825 var opSize uint64 1826 switch vecOp { 1827 case wasm.OpcodeVecV128Load8Splat: 1828 lane, opSize = ssa.VecLaneI8x16, 1 1829 case wasm.OpcodeVecV128Load16Splat: 1830 lane, opSize = ssa.VecLaneI16x8, 2 1831 case wasm.OpcodeVecV128Load32Splat: 1832 lane, opSize = ssa.VecLaneI32x4, 4 1833 case wasm.OpcodeVecV128Load64Splat: 1834 lane, opSize = ssa.VecLaneI64x2, 8 1835 } 1836 baseAddr := state.pop() 1837 addr := c.memOpSetup(baseAddr, uint64(offset), opSize) 1838 ret := builder.AllocateInstruction(). 1839 AsLoadSplat(addr, offset, lane). 1840 Insert(builder).Return() 1841 state.push(ret) 1842 case wasm.OpcodeVecV128Store: 1843 _, offset := c.readMemArg() 1844 if state.unreachable { 1845 break 1846 } 1847 value := state.pop() 1848 baseAddr := state.pop() 1849 addr := c.memOpSetup(baseAddr, uint64(offset), 16) 1850 builder.AllocateInstruction(). 1851 AsStore(ssa.OpcodeStore, value, addr, offset). 1852 Insert(builder) 1853 case wasm.OpcodeVecV128Store8Lane, wasm.OpcodeVecV128Store16Lane, 1854 wasm.OpcodeVecV128Store32Lane, wasm.OpcodeVecV128Store64Lane: 1855 _, offset := c.readMemArg() 1856 state.pc++ 1857 if state.unreachable { 1858 break 1859 } 1860 laneIndex := c.wasmFunctionBody[state.pc] 1861 var storeOp ssa.Opcode 1862 var lane ssa.VecLane 1863 var opSize uint64 1864 switch vecOp { 1865 case wasm.OpcodeVecV128Store8Lane: 1866 storeOp, lane, opSize = ssa.OpcodeIstore8, ssa.VecLaneI8x16, 1 1867 case wasm.OpcodeVecV128Store16Lane: 1868 storeOp, lane, opSize = ssa.OpcodeIstore16, ssa.VecLaneI16x8, 2 1869 case wasm.OpcodeVecV128Store32Lane: 1870 storeOp, lane, opSize = ssa.OpcodeIstore32, ssa.VecLaneI32x4, 4 1871 case wasm.OpcodeVecV128Store64Lane: 1872 storeOp, lane, opSize = ssa.OpcodeStore, ssa.VecLaneI64x2, 8 1873 } 1874 vector := state.pop() 1875 baseAddr := state.pop() 1876 addr := c.memOpSetup(baseAddr, uint64(offset), opSize) 1877 value := builder.AllocateInstruction(). 1878 AsExtractlane(vector, laneIndex, lane, false). 1879 Insert(builder).Return() 1880 builder.AllocateInstruction(). 1881 AsStore(storeOp, value, addr, offset). 1882 Insert(builder) 1883 case wasm.OpcodeVecV128Not: 1884 if state.unreachable { 1885 break 1886 } 1887 v1 := state.pop() 1888 ret := builder.AllocateInstruction().AsVbnot(v1).Insert(builder).Return() 1889 state.push(ret) 1890 case wasm.OpcodeVecV128And: 1891 if state.unreachable { 1892 break 1893 } 1894 v2 := state.pop() 1895 v1 := state.pop() 1896 ret := builder.AllocateInstruction().AsVband(v1, v2).Insert(builder).Return() 1897 state.push(ret) 1898 case wasm.OpcodeVecV128AndNot: 1899 if state.unreachable { 1900 break 1901 } 1902 v2 := state.pop() 1903 v1 := state.pop() 1904 ret := builder.AllocateInstruction().AsVbandnot(v1, v2).Insert(builder).Return() 1905 state.push(ret) 1906 case wasm.OpcodeVecV128Or: 1907 if state.unreachable { 1908 break 1909 } 1910 v2 := state.pop() 1911 v1 := state.pop() 1912 ret := builder.AllocateInstruction().AsVbor(v1, v2).Insert(builder).Return() 1913 state.push(ret) 1914 case wasm.OpcodeVecV128Xor: 1915 if state.unreachable { 1916 break 1917 } 1918 v2 := state.pop() 1919 v1 := state.pop() 1920 ret := builder.AllocateInstruction().AsVbxor(v1, v2).Insert(builder).Return() 1921 state.push(ret) 1922 case wasm.OpcodeVecV128Bitselect: 1923 if state.unreachable { 1924 break 1925 } 1926 c := state.pop() 1927 v2 := state.pop() 1928 v1 := state.pop() 1929 ret := builder.AllocateInstruction().AsVbitselect(c, v1, v2).Insert(builder).Return() 1930 state.push(ret) 1931 case wasm.OpcodeVecV128AnyTrue: 1932 if state.unreachable { 1933 break 1934 } 1935 v1 := state.pop() 1936 ret := builder.AllocateInstruction().AsVanyTrue(v1).Insert(builder).Return() 1937 state.push(ret) 1938 case wasm.OpcodeVecI8x16AllTrue, wasm.OpcodeVecI16x8AllTrue, wasm.OpcodeVecI32x4AllTrue, wasm.OpcodeVecI64x2AllTrue: 1939 if state.unreachable { 1940 break 1941 } 1942 var lane ssa.VecLane 1943 switch vecOp { 1944 case wasm.OpcodeVecI8x16AllTrue: 1945 lane = ssa.VecLaneI8x16 1946 case wasm.OpcodeVecI16x8AllTrue: 1947 lane = ssa.VecLaneI16x8 1948 case wasm.OpcodeVecI32x4AllTrue: 1949 lane = ssa.VecLaneI32x4 1950 case wasm.OpcodeVecI64x2AllTrue: 1951 lane = ssa.VecLaneI64x2 1952 } 1953 v1 := state.pop() 1954 ret := builder.AllocateInstruction().AsVallTrue(v1, lane).Insert(builder).Return() 1955 state.push(ret) 1956 case wasm.OpcodeVecI8x16BitMask, wasm.OpcodeVecI16x8BitMask, wasm.OpcodeVecI32x4BitMask, wasm.OpcodeVecI64x2BitMask: 1957 if state.unreachable { 1958 break 1959 } 1960 var lane ssa.VecLane 1961 switch vecOp { 1962 case wasm.OpcodeVecI8x16BitMask: 1963 lane = ssa.VecLaneI8x16 1964 case wasm.OpcodeVecI16x8BitMask: 1965 lane = ssa.VecLaneI16x8 1966 case wasm.OpcodeVecI32x4BitMask: 1967 lane = ssa.VecLaneI32x4 1968 case wasm.OpcodeVecI64x2BitMask: 1969 lane = ssa.VecLaneI64x2 1970 } 1971 v1 := state.pop() 1972 ret := builder.AllocateInstruction().AsVhighBits(v1, lane).Insert(builder).Return() 1973 state.push(ret) 1974 case wasm.OpcodeVecI8x16Abs, wasm.OpcodeVecI16x8Abs, wasm.OpcodeVecI32x4Abs, wasm.OpcodeVecI64x2Abs: 1975 if state.unreachable { 1976 break 1977 } 1978 var lane ssa.VecLane 1979 switch vecOp { 1980 case wasm.OpcodeVecI8x16Abs: 1981 lane = ssa.VecLaneI8x16 1982 case wasm.OpcodeVecI16x8Abs: 1983 lane = ssa.VecLaneI16x8 1984 case wasm.OpcodeVecI32x4Abs: 1985 lane = ssa.VecLaneI32x4 1986 case wasm.OpcodeVecI64x2Abs: 1987 lane = ssa.VecLaneI64x2 1988 } 1989 v1 := state.pop() 1990 ret := builder.AllocateInstruction().AsVIabs(v1, lane).Insert(builder).Return() 1991 state.push(ret) 1992 case wasm.OpcodeVecI8x16Neg, wasm.OpcodeVecI16x8Neg, wasm.OpcodeVecI32x4Neg, wasm.OpcodeVecI64x2Neg: 1993 if state.unreachable { 1994 break 1995 } 1996 var lane ssa.VecLane 1997 switch vecOp { 1998 case wasm.OpcodeVecI8x16Neg: 1999 lane = ssa.VecLaneI8x16 2000 case wasm.OpcodeVecI16x8Neg: 2001 lane = ssa.VecLaneI16x8 2002 case wasm.OpcodeVecI32x4Neg: 2003 lane = ssa.VecLaneI32x4 2004 case wasm.OpcodeVecI64x2Neg: 2005 lane = ssa.VecLaneI64x2 2006 } 2007 v1 := state.pop() 2008 ret := builder.AllocateInstruction().AsVIneg(v1, lane).Insert(builder).Return() 2009 state.push(ret) 2010 case wasm.OpcodeVecI8x16Popcnt: 2011 if state.unreachable { 2012 break 2013 } 2014 lane := ssa.VecLaneI8x16 2015 v1 := state.pop() 2016 2017 ret := builder.AllocateInstruction().AsVIpopcnt(v1, lane).Insert(builder).Return() 2018 state.push(ret) 2019 case wasm.OpcodeVecI8x16Add, wasm.OpcodeVecI16x8Add, wasm.OpcodeVecI32x4Add, wasm.OpcodeVecI64x2Add: 2020 if state.unreachable { 2021 break 2022 } 2023 var lane ssa.VecLane 2024 switch vecOp { 2025 case wasm.OpcodeVecI8x16Add: 2026 lane = ssa.VecLaneI8x16 2027 case wasm.OpcodeVecI16x8Add: 2028 lane = ssa.VecLaneI16x8 2029 case wasm.OpcodeVecI32x4Add: 2030 lane = ssa.VecLaneI32x4 2031 case wasm.OpcodeVecI64x2Add: 2032 lane = ssa.VecLaneI64x2 2033 } 2034 v2 := state.pop() 2035 v1 := state.pop() 2036 ret := builder.AllocateInstruction().AsVIadd(v1, v2, lane).Insert(builder).Return() 2037 state.push(ret) 2038 case wasm.OpcodeVecI8x16AddSatS, wasm.OpcodeVecI16x8AddSatS: 2039 if state.unreachable { 2040 break 2041 } 2042 var lane ssa.VecLane 2043 switch vecOp { 2044 case wasm.OpcodeVecI8x16AddSatS: 2045 lane = ssa.VecLaneI8x16 2046 case wasm.OpcodeVecI16x8AddSatS: 2047 lane = ssa.VecLaneI16x8 2048 } 2049 v2 := state.pop() 2050 v1 := state.pop() 2051 ret := builder.AllocateInstruction().AsVSaddSat(v1, v2, lane).Insert(builder).Return() 2052 state.push(ret) 2053 case wasm.OpcodeVecI8x16AddSatU, wasm.OpcodeVecI16x8AddSatU: 2054 if state.unreachable { 2055 break 2056 } 2057 var lane ssa.VecLane 2058 switch vecOp { 2059 case wasm.OpcodeVecI8x16AddSatU: 2060 lane = ssa.VecLaneI8x16 2061 case wasm.OpcodeVecI16x8AddSatU: 2062 lane = ssa.VecLaneI16x8 2063 } 2064 v2 := state.pop() 2065 v1 := state.pop() 2066 ret := builder.AllocateInstruction().AsVUaddSat(v1, v2, lane).Insert(builder).Return() 2067 state.push(ret) 2068 case wasm.OpcodeVecI8x16SubSatS, wasm.OpcodeVecI16x8SubSatS: 2069 if state.unreachable { 2070 break 2071 } 2072 var lane ssa.VecLane 2073 switch vecOp { 2074 case wasm.OpcodeVecI8x16SubSatS: 2075 lane = ssa.VecLaneI8x16 2076 case wasm.OpcodeVecI16x8SubSatS: 2077 lane = ssa.VecLaneI16x8 2078 } 2079 v2 := state.pop() 2080 v1 := state.pop() 2081 ret := builder.AllocateInstruction().AsVSsubSat(v1, v2, lane).Insert(builder).Return() 2082 state.push(ret) 2083 case wasm.OpcodeVecI8x16SubSatU, wasm.OpcodeVecI16x8SubSatU: 2084 if state.unreachable { 2085 break 2086 } 2087 var lane ssa.VecLane 2088 switch vecOp { 2089 case wasm.OpcodeVecI8x16SubSatU: 2090 lane = ssa.VecLaneI8x16 2091 case wasm.OpcodeVecI16x8SubSatU: 2092 lane = ssa.VecLaneI16x8 2093 } 2094 v2 := state.pop() 2095 v1 := state.pop() 2096 ret := builder.AllocateInstruction().AsVUsubSat(v1, v2, lane).Insert(builder).Return() 2097 state.push(ret) 2098 2099 case wasm.OpcodeVecI8x16Sub, wasm.OpcodeVecI16x8Sub, wasm.OpcodeVecI32x4Sub, wasm.OpcodeVecI64x2Sub: 2100 if state.unreachable { 2101 break 2102 } 2103 var lane ssa.VecLane 2104 switch vecOp { 2105 case wasm.OpcodeVecI8x16Sub: 2106 lane = ssa.VecLaneI8x16 2107 case wasm.OpcodeVecI16x8Sub: 2108 lane = ssa.VecLaneI16x8 2109 case wasm.OpcodeVecI32x4Sub: 2110 lane = ssa.VecLaneI32x4 2111 case wasm.OpcodeVecI64x2Sub: 2112 lane = ssa.VecLaneI64x2 2113 } 2114 v2 := state.pop() 2115 v1 := state.pop() 2116 ret := builder.AllocateInstruction().AsVIsub(v1, v2, lane).Insert(builder).Return() 2117 state.push(ret) 2118 case wasm.OpcodeVecI8x16MinS, wasm.OpcodeVecI16x8MinS, wasm.OpcodeVecI32x4MinS: 2119 if state.unreachable { 2120 break 2121 } 2122 var lane ssa.VecLane 2123 switch vecOp { 2124 case wasm.OpcodeVecI8x16MinS: 2125 lane = ssa.VecLaneI8x16 2126 case wasm.OpcodeVecI16x8MinS: 2127 lane = ssa.VecLaneI16x8 2128 case wasm.OpcodeVecI32x4MinS: 2129 lane = ssa.VecLaneI32x4 2130 } 2131 v2 := state.pop() 2132 v1 := state.pop() 2133 ret := builder.AllocateInstruction().AsVImin(v1, v2, lane).Insert(builder).Return() 2134 state.push(ret) 2135 case wasm.OpcodeVecI8x16MinU, wasm.OpcodeVecI16x8MinU, wasm.OpcodeVecI32x4MinU: 2136 if state.unreachable { 2137 break 2138 } 2139 var lane ssa.VecLane 2140 switch vecOp { 2141 case wasm.OpcodeVecI8x16MinU: 2142 lane = ssa.VecLaneI8x16 2143 case wasm.OpcodeVecI16x8MinU: 2144 lane = ssa.VecLaneI16x8 2145 case wasm.OpcodeVecI32x4MinU: 2146 lane = ssa.VecLaneI32x4 2147 } 2148 v2 := state.pop() 2149 v1 := state.pop() 2150 ret := builder.AllocateInstruction().AsVUmin(v1, v2, lane).Insert(builder).Return() 2151 state.push(ret) 2152 case wasm.OpcodeVecI8x16MaxS, wasm.OpcodeVecI16x8MaxS, wasm.OpcodeVecI32x4MaxS: 2153 if state.unreachable { 2154 break 2155 } 2156 var lane ssa.VecLane 2157 switch vecOp { 2158 case wasm.OpcodeVecI8x16MaxS: 2159 lane = ssa.VecLaneI8x16 2160 case wasm.OpcodeVecI16x8MaxS: 2161 lane = ssa.VecLaneI16x8 2162 case wasm.OpcodeVecI32x4MaxS: 2163 lane = ssa.VecLaneI32x4 2164 } 2165 v2 := state.pop() 2166 v1 := state.pop() 2167 ret := builder.AllocateInstruction().AsVImax(v1, v2, lane).Insert(builder).Return() 2168 state.push(ret) 2169 case wasm.OpcodeVecI8x16MaxU, wasm.OpcodeVecI16x8MaxU, wasm.OpcodeVecI32x4MaxU: 2170 if state.unreachable { 2171 break 2172 } 2173 var lane ssa.VecLane 2174 switch vecOp { 2175 case wasm.OpcodeVecI8x16MaxU: 2176 lane = ssa.VecLaneI8x16 2177 case wasm.OpcodeVecI16x8MaxU: 2178 lane = ssa.VecLaneI16x8 2179 case wasm.OpcodeVecI32x4MaxU: 2180 lane = ssa.VecLaneI32x4 2181 } 2182 v2 := state.pop() 2183 v1 := state.pop() 2184 ret := builder.AllocateInstruction().AsVUmax(v1, v2, lane).Insert(builder).Return() 2185 state.push(ret) 2186 case wasm.OpcodeVecI8x16AvgrU, wasm.OpcodeVecI16x8AvgrU: 2187 if state.unreachable { 2188 break 2189 } 2190 var lane ssa.VecLane 2191 switch vecOp { 2192 case wasm.OpcodeVecI8x16AvgrU: 2193 lane = ssa.VecLaneI8x16 2194 case wasm.OpcodeVecI16x8AvgrU: 2195 lane = ssa.VecLaneI16x8 2196 } 2197 v2 := state.pop() 2198 v1 := state.pop() 2199 ret := builder.AllocateInstruction().AsVAvgRound(v1, v2, lane).Insert(builder).Return() 2200 state.push(ret) 2201 case wasm.OpcodeVecI16x8Mul, wasm.OpcodeVecI32x4Mul, wasm.OpcodeVecI64x2Mul: 2202 if state.unreachable { 2203 break 2204 } 2205 var lane ssa.VecLane 2206 switch vecOp { 2207 case wasm.OpcodeVecI16x8Mul: 2208 lane = ssa.VecLaneI16x8 2209 case wasm.OpcodeVecI32x4Mul: 2210 lane = ssa.VecLaneI32x4 2211 case wasm.OpcodeVecI64x2Mul: 2212 lane = ssa.VecLaneI64x2 2213 } 2214 v2 := state.pop() 2215 v1 := state.pop() 2216 ret := builder.AllocateInstruction().AsVImul(v1, v2, lane).Insert(builder).Return() 2217 state.push(ret) 2218 case wasm.OpcodeVecI16x8Q15mulrSatS: 2219 if state.unreachable { 2220 break 2221 } 2222 v2 := state.pop() 2223 v1 := state.pop() 2224 ret := builder.AllocateInstruction().AsSqmulRoundSat(v1, v2, ssa.VecLaneI16x8).Insert(builder).Return() 2225 state.push(ret) 2226 case wasm.OpcodeVecI8x16Eq, wasm.OpcodeVecI16x8Eq, wasm.OpcodeVecI32x4Eq, wasm.OpcodeVecI64x2Eq: 2227 if state.unreachable { 2228 break 2229 } 2230 var lane ssa.VecLane 2231 switch vecOp { 2232 case wasm.OpcodeVecI8x16Eq: 2233 lane = ssa.VecLaneI8x16 2234 case wasm.OpcodeVecI16x8Eq: 2235 lane = ssa.VecLaneI16x8 2236 case wasm.OpcodeVecI32x4Eq: 2237 lane = ssa.VecLaneI32x4 2238 case wasm.OpcodeVecI64x2Eq: 2239 lane = ssa.VecLaneI64x2 2240 } 2241 v2 := state.pop() 2242 v1 := state.pop() 2243 ret := builder.AllocateInstruction(). 2244 AsVIcmp(v1, v2, ssa.IntegerCmpCondEqual, lane).Insert(builder).Return() 2245 state.push(ret) 2246 case wasm.OpcodeVecI8x16Ne, wasm.OpcodeVecI16x8Ne, wasm.OpcodeVecI32x4Ne, wasm.OpcodeVecI64x2Ne: 2247 if state.unreachable { 2248 break 2249 } 2250 var lane ssa.VecLane 2251 switch vecOp { 2252 case wasm.OpcodeVecI8x16Ne: 2253 lane = ssa.VecLaneI8x16 2254 case wasm.OpcodeVecI16x8Ne: 2255 lane = ssa.VecLaneI16x8 2256 case wasm.OpcodeVecI32x4Ne: 2257 lane = ssa.VecLaneI32x4 2258 case wasm.OpcodeVecI64x2Ne: 2259 lane = ssa.VecLaneI64x2 2260 } 2261 v2 := state.pop() 2262 v1 := state.pop() 2263 ret := builder.AllocateInstruction(). 2264 AsVIcmp(v1, v2, ssa.IntegerCmpCondNotEqual, lane).Insert(builder).Return() 2265 state.push(ret) 2266 case wasm.OpcodeVecI8x16LtS, wasm.OpcodeVecI16x8LtS, wasm.OpcodeVecI32x4LtS, wasm.OpcodeVecI64x2LtS: 2267 if state.unreachable { 2268 break 2269 } 2270 var lane ssa.VecLane 2271 switch vecOp { 2272 case wasm.OpcodeVecI8x16LtS: 2273 lane = ssa.VecLaneI8x16 2274 case wasm.OpcodeVecI16x8LtS: 2275 lane = ssa.VecLaneI16x8 2276 case wasm.OpcodeVecI32x4LtS: 2277 lane = ssa.VecLaneI32x4 2278 case wasm.OpcodeVecI64x2LtS: 2279 lane = ssa.VecLaneI64x2 2280 } 2281 v2 := state.pop() 2282 v1 := state.pop() 2283 ret := builder.AllocateInstruction(). 2284 AsVIcmp(v1, v2, ssa.IntegerCmpCondSignedLessThan, lane).Insert(builder).Return() 2285 state.push(ret) 2286 case wasm.OpcodeVecI8x16LtU, wasm.OpcodeVecI16x8LtU, wasm.OpcodeVecI32x4LtU: 2287 if state.unreachable { 2288 break 2289 } 2290 var lane ssa.VecLane 2291 switch vecOp { 2292 case wasm.OpcodeVecI8x16LtU: 2293 lane = ssa.VecLaneI8x16 2294 case wasm.OpcodeVecI16x8LtU: 2295 lane = ssa.VecLaneI16x8 2296 case wasm.OpcodeVecI32x4LtU: 2297 lane = ssa.VecLaneI32x4 2298 } 2299 v2 := state.pop() 2300 v1 := state.pop() 2301 ret := builder.AllocateInstruction(). 2302 AsVIcmp(v1, v2, ssa.IntegerCmpCondUnsignedLessThan, lane).Insert(builder).Return() 2303 state.push(ret) 2304 case wasm.OpcodeVecI8x16LeS, wasm.OpcodeVecI16x8LeS, wasm.OpcodeVecI32x4LeS, wasm.OpcodeVecI64x2LeS: 2305 if state.unreachable { 2306 break 2307 } 2308 var lane ssa.VecLane 2309 switch vecOp { 2310 case wasm.OpcodeVecI8x16LeS: 2311 lane = ssa.VecLaneI8x16 2312 case wasm.OpcodeVecI16x8LeS: 2313 lane = ssa.VecLaneI16x8 2314 case wasm.OpcodeVecI32x4LeS: 2315 lane = ssa.VecLaneI32x4 2316 case wasm.OpcodeVecI64x2LeS: 2317 lane = ssa.VecLaneI64x2 2318 } 2319 v2 := state.pop() 2320 v1 := state.pop() 2321 ret := builder.AllocateInstruction(). 2322 AsVIcmp(v1, v2, ssa.IntegerCmpCondSignedLessThanOrEqual, lane).Insert(builder).Return() 2323 state.push(ret) 2324 case wasm.OpcodeVecI8x16LeU, wasm.OpcodeVecI16x8LeU, wasm.OpcodeVecI32x4LeU: 2325 if state.unreachable { 2326 break 2327 } 2328 var lane ssa.VecLane 2329 switch vecOp { 2330 case wasm.OpcodeVecI8x16LeU: 2331 lane = ssa.VecLaneI8x16 2332 case wasm.OpcodeVecI16x8LeU: 2333 lane = ssa.VecLaneI16x8 2334 case wasm.OpcodeVecI32x4LeU: 2335 lane = ssa.VecLaneI32x4 2336 } 2337 v2 := state.pop() 2338 v1 := state.pop() 2339 ret := builder.AllocateInstruction(). 2340 AsVIcmp(v1, v2, ssa.IntegerCmpCondUnsignedLessThanOrEqual, lane).Insert(builder).Return() 2341 state.push(ret) 2342 case wasm.OpcodeVecI8x16GtS, wasm.OpcodeVecI16x8GtS, wasm.OpcodeVecI32x4GtS, wasm.OpcodeVecI64x2GtS: 2343 if state.unreachable { 2344 break 2345 } 2346 var lane ssa.VecLane 2347 switch vecOp { 2348 case wasm.OpcodeVecI8x16GtS: 2349 lane = ssa.VecLaneI8x16 2350 case wasm.OpcodeVecI16x8GtS: 2351 lane = ssa.VecLaneI16x8 2352 case wasm.OpcodeVecI32x4GtS: 2353 lane = ssa.VecLaneI32x4 2354 case wasm.OpcodeVecI64x2GtS: 2355 lane = ssa.VecLaneI64x2 2356 } 2357 v2 := state.pop() 2358 v1 := state.pop() 2359 ret := builder.AllocateInstruction(). 2360 AsVIcmp(v1, v2, ssa.IntegerCmpCondSignedGreaterThan, lane).Insert(builder).Return() 2361 state.push(ret) 2362 case wasm.OpcodeVecI8x16GtU, wasm.OpcodeVecI16x8GtU, wasm.OpcodeVecI32x4GtU: 2363 if state.unreachable { 2364 break 2365 } 2366 var lane ssa.VecLane 2367 switch vecOp { 2368 case wasm.OpcodeVecI8x16GtU: 2369 lane = ssa.VecLaneI8x16 2370 case wasm.OpcodeVecI16x8GtU: 2371 lane = ssa.VecLaneI16x8 2372 case wasm.OpcodeVecI32x4GtU: 2373 lane = ssa.VecLaneI32x4 2374 } 2375 v2 := state.pop() 2376 v1 := state.pop() 2377 ret := builder.AllocateInstruction(). 2378 AsVIcmp(v1, v2, ssa.IntegerCmpCondUnsignedGreaterThan, lane).Insert(builder).Return() 2379 state.push(ret) 2380 case wasm.OpcodeVecI8x16GeS, wasm.OpcodeVecI16x8GeS, wasm.OpcodeVecI32x4GeS, wasm.OpcodeVecI64x2GeS: 2381 if state.unreachable { 2382 break 2383 } 2384 var lane ssa.VecLane 2385 switch vecOp { 2386 case wasm.OpcodeVecI8x16GeS: 2387 lane = ssa.VecLaneI8x16 2388 case wasm.OpcodeVecI16x8GeS: 2389 lane = ssa.VecLaneI16x8 2390 case wasm.OpcodeVecI32x4GeS: 2391 lane = ssa.VecLaneI32x4 2392 case wasm.OpcodeVecI64x2GeS: 2393 lane = ssa.VecLaneI64x2 2394 } 2395 v2 := state.pop() 2396 v1 := state.pop() 2397 ret := builder.AllocateInstruction(). 2398 AsVIcmp(v1, v2, ssa.IntegerCmpCondSignedGreaterThanOrEqual, lane).Insert(builder).Return() 2399 state.push(ret) 2400 case wasm.OpcodeVecI8x16GeU, wasm.OpcodeVecI16x8GeU, wasm.OpcodeVecI32x4GeU: 2401 if state.unreachable { 2402 break 2403 } 2404 var lane ssa.VecLane 2405 switch vecOp { 2406 case wasm.OpcodeVecI8x16GeU: 2407 lane = ssa.VecLaneI8x16 2408 case wasm.OpcodeVecI16x8GeU: 2409 lane = ssa.VecLaneI16x8 2410 case wasm.OpcodeVecI32x4GeU: 2411 lane = ssa.VecLaneI32x4 2412 } 2413 v2 := state.pop() 2414 v1 := state.pop() 2415 ret := builder.AllocateInstruction(). 2416 AsVIcmp(v1, v2, ssa.IntegerCmpCondUnsignedGreaterThanOrEqual, lane).Insert(builder).Return() 2417 state.push(ret) 2418 case wasm.OpcodeVecF32x4Max, wasm.OpcodeVecF64x2Max: 2419 if state.unreachable { 2420 break 2421 } 2422 var lane ssa.VecLane 2423 switch vecOp { 2424 case wasm.OpcodeVecF32x4Max: 2425 lane = ssa.VecLaneF32x4 2426 case wasm.OpcodeVecF64x2Max: 2427 lane = ssa.VecLaneF64x2 2428 } 2429 v2 := state.pop() 2430 v1 := state.pop() 2431 ret := builder.AllocateInstruction().AsVFmax(v1, v2, lane).Insert(builder).Return() 2432 state.push(ret) 2433 case wasm.OpcodeVecF32x4Abs, wasm.OpcodeVecF64x2Abs: 2434 if state.unreachable { 2435 break 2436 } 2437 var lane ssa.VecLane 2438 switch vecOp { 2439 case wasm.OpcodeVecF32x4Abs: 2440 lane = ssa.VecLaneF32x4 2441 case wasm.OpcodeVecF64x2Abs: 2442 lane = ssa.VecLaneF64x2 2443 } 2444 v1 := state.pop() 2445 ret := builder.AllocateInstruction().AsVFabs(v1, lane).Insert(builder).Return() 2446 state.push(ret) 2447 case wasm.OpcodeVecF32x4Min, wasm.OpcodeVecF64x2Min: 2448 if state.unreachable { 2449 break 2450 } 2451 var lane ssa.VecLane 2452 switch vecOp { 2453 case wasm.OpcodeVecF32x4Min: 2454 lane = ssa.VecLaneF32x4 2455 case wasm.OpcodeVecF64x2Min: 2456 lane = ssa.VecLaneF64x2 2457 } 2458 v2 := state.pop() 2459 v1 := state.pop() 2460 ret := builder.AllocateInstruction().AsVFmin(v1, v2, lane).Insert(builder).Return() 2461 state.push(ret) 2462 case wasm.OpcodeVecF32x4Neg, wasm.OpcodeVecF64x2Neg: 2463 if state.unreachable { 2464 break 2465 } 2466 var lane ssa.VecLane 2467 switch vecOp { 2468 case wasm.OpcodeVecF32x4Neg: 2469 lane = ssa.VecLaneF32x4 2470 case wasm.OpcodeVecF64x2Neg: 2471 lane = ssa.VecLaneF64x2 2472 } 2473 v1 := state.pop() 2474 ret := builder.AllocateInstruction().AsVFneg(v1, lane).Insert(builder).Return() 2475 state.push(ret) 2476 case wasm.OpcodeVecF32x4Sqrt, wasm.OpcodeVecF64x2Sqrt: 2477 if state.unreachable { 2478 break 2479 } 2480 var lane ssa.VecLane 2481 switch vecOp { 2482 case wasm.OpcodeVecF32x4Sqrt: 2483 lane = ssa.VecLaneF32x4 2484 case wasm.OpcodeVecF64x2Sqrt: 2485 lane = ssa.VecLaneF64x2 2486 } 2487 v1 := state.pop() 2488 ret := builder.AllocateInstruction().AsVSqrt(v1, lane).Insert(builder).Return() 2489 state.push(ret) 2490 2491 case wasm.OpcodeVecF32x4Add, wasm.OpcodeVecF64x2Add: 2492 if state.unreachable { 2493 break 2494 } 2495 var lane ssa.VecLane 2496 switch vecOp { 2497 case wasm.OpcodeVecF32x4Add: 2498 lane = ssa.VecLaneF32x4 2499 case wasm.OpcodeVecF64x2Add: 2500 lane = ssa.VecLaneF64x2 2501 } 2502 v2 := state.pop() 2503 v1 := state.pop() 2504 ret := builder.AllocateInstruction().AsVFadd(v1, v2, lane).Insert(builder).Return() 2505 state.push(ret) 2506 case wasm.OpcodeVecF32x4Sub, wasm.OpcodeVecF64x2Sub: 2507 if state.unreachable { 2508 break 2509 } 2510 var lane ssa.VecLane 2511 switch vecOp { 2512 case wasm.OpcodeVecF32x4Sub: 2513 lane = ssa.VecLaneF32x4 2514 case wasm.OpcodeVecF64x2Sub: 2515 lane = ssa.VecLaneF64x2 2516 } 2517 v2 := state.pop() 2518 v1 := state.pop() 2519 ret := builder.AllocateInstruction().AsVFsub(v1, v2, lane).Insert(builder).Return() 2520 state.push(ret) 2521 case wasm.OpcodeVecF32x4Mul, wasm.OpcodeVecF64x2Mul: 2522 if state.unreachable { 2523 break 2524 } 2525 var lane ssa.VecLane 2526 switch vecOp { 2527 case wasm.OpcodeVecF32x4Mul: 2528 lane = ssa.VecLaneF32x4 2529 case wasm.OpcodeVecF64x2Mul: 2530 lane = ssa.VecLaneF64x2 2531 } 2532 v2 := state.pop() 2533 v1 := state.pop() 2534 ret := builder.AllocateInstruction().AsVFmul(v1, v2, lane).Insert(builder).Return() 2535 state.push(ret) 2536 case wasm.OpcodeVecF32x4Div, wasm.OpcodeVecF64x2Div: 2537 if state.unreachable { 2538 break 2539 } 2540 var lane ssa.VecLane 2541 switch vecOp { 2542 case wasm.OpcodeVecF32x4Div: 2543 lane = ssa.VecLaneF32x4 2544 case wasm.OpcodeVecF64x2Div: 2545 lane = ssa.VecLaneF64x2 2546 } 2547 v2 := state.pop() 2548 v1 := state.pop() 2549 ret := builder.AllocateInstruction().AsVFdiv(v1, v2, lane).Insert(builder).Return() 2550 state.push(ret) 2551 2552 case wasm.OpcodeVecI16x8ExtaddPairwiseI8x16S, wasm.OpcodeVecI16x8ExtaddPairwiseI8x16U: 2553 if state.unreachable { 2554 break 2555 } 2556 v1 := state.pop() 2557 // TODO: The sequence `Widen; Widen; IaddPairwise` can be substituted for a single instruction on some ISAs. 2558 signed := vecOp == wasm.OpcodeVecI16x8ExtaddPairwiseI8x16S 2559 vlo := builder.AllocateInstruction().AsWiden(v1, ssa.VecLaneI8x16, signed, true).Insert(builder).Return() 2560 vhi := builder.AllocateInstruction().AsWiden(v1, ssa.VecLaneI8x16, signed, false).Insert(builder).Return() 2561 ret := builder.AllocateInstruction().AsIaddPairwise(vlo, vhi, ssa.VecLaneI16x8).Insert(builder).Return() 2562 state.push(ret) 2563 2564 case wasm.OpcodeVecI32x4ExtaddPairwiseI16x8S, wasm.OpcodeVecI32x4ExtaddPairwiseI16x8U: 2565 if state.unreachable { 2566 break 2567 } 2568 v1 := state.pop() 2569 // TODO: The sequence `Widen; Widen; IaddPairwise` can be substituted for a single instruction on some ISAs. 2570 signed := vecOp == wasm.OpcodeVecI32x4ExtaddPairwiseI16x8S 2571 vlo := builder.AllocateInstruction().AsWiden(v1, ssa.VecLaneI16x8, signed, true).Insert(builder).Return() 2572 vhi := builder.AllocateInstruction().AsWiden(v1, ssa.VecLaneI16x8, signed, false).Insert(builder).Return() 2573 ret := builder.AllocateInstruction().AsIaddPairwise(vlo, vhi, ssa.VecLaneI32x4).Insert(builder).Return() 2574 state.push(ret) 2575 2576 case wasm.OpcodeVecI16x8ExtMulLowI8x16S, wasm.OpcodeVecI16x8ExtMulLowI8x16U: 2577 if state.unreachable { 2578 break 2579 } 2580 v2 := state.pop() 2581 v1 := state.pop() 2582 ret := c.lowerExtMul( 2583 v1, v2, 2584 ssa.VecLaneI8x16, ssa.VecLaneI16x8, 2585 vecOp == wasm.OpcodeVecI16x8ExtMulLowI8x16S, true) 2586 state.push(ret) 2587 2588 case wasm.OpcodeVecI16x8ExtMulHighI8x16S, wasm.OpcodeVecI16x8ExtMulHighI8x16U: 2589 if state.unreachable { 2590 break 2591 } 2592 v2 := state.pop() 2593 v1 := state.pop() 2594 ret := c.lowerExtMul( 2595 v1, v2, 2596 ssa.VecLaneI8x16, ssa.VecLaneI16x8, 2597 vecOp == wasm.OpcodeVecI16x8ExtMulHighI8x16S, false) 2598 state.push(ret) 2599 2600 case wasm.OpcodeVecI32x4ExtMulLowI16x8S, wasm.OpcodeVecI32x4ExtMulLowI16x8U: 2601 if state.unreachable { 2602 break 2603 } 2604 v2 := state.pop() 2605 v1 := state.pop() 2606 ret := c.lowerExtMul( 2607 v1, v2, 2608 ssa.VecLaneI16x8, ssa.VecLaneI32x4, 2609 vecOp == wasm.OpcodeVecI32x4ExtMulLowI16x8S, true) 2610 state.push(ret) 2611 2612 case wasm.OpcodeVecI32x4ExtMulHighI16x8S, wasm.OpcodeVecI32x4ExtMulHighI16x8U: 2613 if state.unreachable { 2614 break 2615 } 2616 v2 := state.pop() 2617 v1 := state.pop() 2618 ret := c.lowerExtMul( 2619 v1, v2, 2620 ssa.VecLaneI16x8, ssa.VecLaneI32x4, 2621 vecOp == wasm.OpcodeVecI32x4ExtMulHighI16x8S, false) 2622 state.push(ret) 2623 case wasm.OpcodeVecI64x2ExtMulLowI32x4S, wasm.OpcodeVecI64x2ExtMulLowI32x4U: 2624 if state.unreachable { 2625 break 2626 } 2627 v2 := state.pop() 2628 v1 := state.pop() 2629 ret := c.lowerExtMul( 2630 v1, v2, 2631 ssa.VecLaneI32x4, ssa.VecLaneI64x2, 2632 vecOp == wasm.OpcodeVecI64x2ExtMulLowI32x4S, true) 2633 state.push(ret) 2634 2635 case wasm.OpcodeVecI64x2ExtMulHighI32x4S, wasm.OpcodeVecI64x2ExtMulHighI32x4U: 2636 if state.unreachable { 2637 break 2638 } 2639 v2 := state.pop() 2640 v1 := state.pop() 2641 ret := c.lowerExtMul( 2642 v1, v2, 2643 ssa.VecLaneI32x4, ssa.VecLaneI64x2, 2644 vecOp == wasm.OpcodeVecI64x2ExtMulHighI32x4S, false) 2645 state.push(ret) 2646 2647 case wasm.OpcodeVecI32x4DotI16x8S: 2648 if state.unreachable { 2649 break 2650 } 2651 v2 := state.pop() 2652 v1 := state.pop() 2653 2654 // TODO: The sequence `Widen; Widen; VIMul` can be substituted for a single instruction on some ISAs. 2655 v1lo := builder.AllocateInstruction().AsWiden(v1, ssa.VecLaneI16x8, true, true).Insert(builder).Return() 2656 v2lo := builder.AllocateInstruction().AsWiden(v2, ssa.VecLaneI16x8, true, true).Insert(builder).Return() 2657 low := builder.AllocateInstruction().AsVImul(v1lo, v2lo, ssa.VecLaneI32x4).Insert(builder).Return() 2658 2659 v1hi := builder.AllocateInstruction().AsWiden(v1, ssa.VecLaneI16x8, true, false).Insert(builder).Return() 2660 v2hi := builder.AllocateInstruction().AsWiden(v2, ssa.VecLaneI16x8, true, false).Insert(builder).Return() 2661 high := builder.AllocateInstruction().AsVImul(v1hi, v2hi, ssa.VecLaneI32x4).Insert(builder).Return() 2662 2663 ret := builder.AllocateInstruction().AsIaddPairwise(low, high, ssa.VecLaneI32x4).Insert(builder).Return() 2664 state.push(ret) 2665 2666 case wasm.OpcodeVecF32x4Eq, wasm.OpcodeVecF64x2Eq: 2667 if state.unreachable { 2668 break 2669 } 2670 var lane ssa.VecLane 2671 switch vecOp { 2672 case wasm.OpcodeVecF32x4Eq: 2673 lane = ssa.VecLaneF32x4 2674 case wasm.OpcodeVecF64x2Eq: 2675 lane = ssa.VecLaneF64x2 2676 } 2677 v2 := state.pop() 2678 v1 := state.pop() 2679 ret := builder.AllocateInstruction(). 2680 AsVFcmp(v1, v2, ssa.FloatCmpCondEqual, lane).Insert(builder).Return() 2681 state.push(ret) 2682 case wasm.OpcodeVecF32x4Ne, wasm.OpcodeVecF64x2Ne: 2683 if state.unreachable { 2684 break 2685 } 2686 var lane ssa.VecLane 2687 switch vecOp { 2688 case wasm.OpcodeVecF32x4Ne: 2689 lane = ssa.VecLaneF32x4 2690 case wasm.OpcodeVecF64x2Ne: 2691 lane = ssa.VecLaneF64x2 2692 } 2693 v2 := state.pop() 2694 v1 := state.pop() 2695 ret := builder.AllocateInstruction(). 2696 AsVFcmp(v1, v2, ssa.FloatCmpCondNotEqual, lane).Insert(builder).Return() 2697 state.push(ret) 2698 case wasm.OpcodeVecF32x4Lt, wasm.OpcodeVecF64x2Lt: 2699 if state.unreachable { 2700 break 2701 } 2702 var lane ssa.VecLane 2703 switch vecOp { 2704 case wasm.OpcodeVecF32x4Lt: 2705 lane = ssa.VecLaneF32x4 2706 case wasm.OpcodeVecF64x2Lt: 2707 lane = ssa.VecLaneF64x2 2708 } 2709 v2 := state.pop() 2710 v1 := state.pop() 2711 ret := builder.AllocateInstruction(). 2712 AsVFcmp(v1, v2, ssa.FloatCmpCondLessThan, lane).Insert(builder).Return() 2713 state.push(ret) 2714 case wasm.OpcodeVecF32x4Le, wasm.OpcodeVecF64x2Le: 2715 if state.unreachable { 2716 break 2717 } 2718 var lane ssa.VecLane 2719 switch vecOp { 2720 case wasm.OpcodeVecF32x4Le: 2721 lane = ssa.VecLaneF32x4 2722 case wasm.OpcodeVecF64x2Le: 2723 lane = ssa.VecLaneF64x2 2724 } 2725 v2 := state.pop() 2726 v1 := state.pop() 2727 ret := builder.AllocateInstruction(). 2728 AsVFcmp(v1, v2, ssa.FloatCmpCondLessThanOrEqual, lane).Insert(builder).Return() 2729 state.push(ret) 2730 case wasm.OpcodeVecF32x4Gt, wasm.OpcodeVecF64x2Gt: 2731 if state.unreachable { 2732 break 2733 } 2734 var lane ssa.VecLane 2735 switch vecOp { 2736 case wasm.OpcodeVecF32x4Gt: 2737 lane = ssa.VecLaneF32x4 2738 case wasm.OpcodeVecF64x2Gt: 2739 lane = ssa.VecLaneF64x2 2740 } 2741 v2 := state.pop() 2742 v1 := state.pop() 2743 ret := builder.AllocateInstruction(). 2744 AsVFcmp(v1, v2, ssa.FloatCmpCondGreaterThan, lane).Insert(builder).Return() 2745 state.push(ret) 2746 case wasm.OpcodeVecF32x4Ge, wasm.OpcodeVecF64x2Ge: 2747 if state.unreachable { 2748 break 2749 } 2750 var lane ssa.VecLane 2751 switch vecOp { 2752 case wasm.OpcodeVecF32x4Ge: 2753 lane = ssa.VecLaneF32x4 2754 case wasm.OpcodeVecF64x2Ge: 2755 lane = ssa.VecLaneF64x2 2756 } 2757 v2 := state.pop() 2758 v1 := state.pop() 2759 ret := builder.AllocateInstruction(). 2760 AsVFcmp(v1, v2, ssa.FloatCmpCondGreaterThanOrEqual, lane).Insert(builder).Return() 2761 state.push(ret) 2762 case wasm.OpcodeVecF32x4Ceil, wasm.OpcodeVecF64x2Ceil: 2763 if state.unreachable { 2764 break 2765 } 2766 var lane ssa.VecLane 2767 switch vecOp { 2768 case wasm.OpcodeVecF32x4Ceil: 2769 lane = ssa.VecLaneF32x4 2770 case wasm.OpcodeVecF64x2Ceil: 2771 lane = ssa.VecLaneF64x2 2772 } 2773 v1 := state.pop() 2774 ret := builder.AllocateInstruction().AsVCeil(v1, lane).Insert(builder).Return() 2775 state.push(ret) 2776 case wasm.OpcodeVecF32x4Floor, wasm.OpcodeVecF64x2Floor: 2777 if state.unreachable { 2778 break 2779 } 2780 var lane ssa.VecLane 2781 switch vecOp { 2782 case wasm.OpcodeVecF32x4Floor: 2783 lane = ssa.VecLaneF32x4 2784 case wasm.OpcodeVecF64x2Floor: 2785 lane = ssa.VecLaneF64x2 2786 } 2787 v1 := state.pop() 2788 ret := builder.AllocateInstruction().AsVFloor(v1, lane).Insert(builder).Return() 2789 state.push(ret) 2790 case wasm.OpcodeVecF32x4Trunc, wasm.OpcodeVecF64x2Trunc: 2791 if state.unreachable { 2792 break 2793 } 2794 var lane ssa.VecLane 2795 switch vecOp { 2796 case wasm.OpcodeVecF32x4Trunc: 2797 lane = ssa.VecLaneF32x4 2798 case wasm.OpcodeVecF64x2Trunc: 2799 lane = ssa.VecLaneF64x2 2800 } 2801 v1 := state.pop() 2802 ret := builder.AllocateInstruction().AsVTrunc(v1, lane).Insert(builder).Return() 2803 state.push(ret) 2804 case wasm.OpcodeVecF32x4Nearest, wasm.OpcodeVecF64x2Nearest: 2805 if state.unreachable { 2806 break 2807 } 2808 var lane ssa.VecLane 2809 switch vecOp { 2810 case wasm.OpcodeVecF32x4Nearest: 2811 lane = ssa.VecLaneF32x4 2812 case wasm.OpcodeVecF64x2Nearest: 2813 lane = ssa.VecLaneF64x2 2814 } 2815 v1 := state.pop() 2816 ret := builder.AllocateInstruction().AsVNearest(v1, lane).Insert(builder).Return() 2817 state.push(ret) 2818 case wasm.OpcodeVecF32x4Pmin, wasm.OpcodeVecF64x2Pmin: 2819 if state.unreachable { 2820 break 2821 } 2822 var lane ssa.VecLane 2823 switch vecOp { 2824 case wasm.OpcodeVecF32x4Pmin: 2825 lane = ssa.VecLaneF32x4 2826 case wasm.OpcodeVecF64x2Pmin: 2827 lane = ssa.VecLaneF64x2 2828 } 2829 v2 := state.pop() 2830 v1 := state.pop() 2831 ret := builder.AllocateInstruction().AsVMinPseudo(v1, v2, lane).Insert(builder).Return() 2832 state.push(ret) 2833 case wasm.OpcodeVecF32x4Pmax, wasm.OpcodeVecF64x2Pmax: 2834 if state.unreachable { 2835 break 2836 } 2837 var lane ssa.VecLane 2838 switch vecOp { 2839 case wasm.OpcodeVecF32x4Pmax: 2840 lane = ssa.VecLaneF32x4 2841 case wasm.OpcodeVecF64x2Pmax: 2842 lane = ssa.VecLaneF64x2 2843 } 2844 v2 := state.pop() 2845 v1 := state.pop() 2846 ret := builder.AllocateInstruction().AsVMaxPseudo(v1, v2, lane).Insert(builder).Return() 2847 state.push(ret) 2848 case wasm.OpcodeVecI32x4TruncSatF32x4S, wasm.OpcodeVecI32x4TruncSatF32x4U: 2849 if state.unreachable { 2850 break 2851 } 2852 v1 := state.pop() 2853 ret := builder.AllocateInstruction(). 2854 AsVFcvtToIntSat(v1, ssa.VecLaneF32x4, vecOp == wasm.OpcodeVecI32x4TruncSatF32x4S).Insert(builder).Return() 2855 state.push(ret) 2856 case wasm.OpcodeVecI32x4TruncSatF64x2SZero, wasm.OpcodeVecI32x4TruncSatF64x2UZero: 2857 if state.unreachable { 2858 break 2859 } 2860 v1 := state.pop() 2861 ret := builder.AllocateInstruction(). 2862 AsVFcvtToIntSat(v1, ssa.VecLaneF64x2, vecOp == wasm.OpcodeVecI32x4TruncSatF64x2SZero).Insert(builder).Return() 2863 state.push(ret) 2864 case wasm.OpcodeVecF32x4ConvertI32x4S, wasm.OpcodeVecF32x4ConvertI32x4U: 2865 if state.unreachable { 2866 break 2867 } 2868 v1 := state.pop() 2869 ret := builder.AllocateInstruction(). 2870 AsVFcvtFromInt(v1, ssa.VecLaneF32x4, vecOp == wasm.OpcodeVecF32x4ConvertI32x4S).Insert(builder).Return() 2871 state.push(ret) 2872 case wasm.OpcodeVecF64x2ConvertLowI32x4S, wasm.OpcodeVecF64x2ConvertLowI32x4U: 2873 if state.unreachable { 2874 break 2875 } 2876 v1 := state.pop() 2877 v1w := builder.AllocateInstruction(). 2878 AsWiden(v1, ssa.VecLaneI32x4, vecOp == wasm.OpcodeVecF64x2ConvertLowI32x4S, true).Insert(builder).Return() 2879 ret := builder.AllocateInstruction(). 2880 AsVFcvtFromInt(v1w, ssa.VecLaneF64x2, vecOp == wasm.OpcodeVecF64x2ConvertLowI32x4S). 2881 Insert(builder).Return() 2882 state.push(ret) 2883 case wasm.OpcodeVecI8x16NarrowI16x8S, wasm.OpcodeVecI8x16NarrowI16x8U: 2884 if state.unreachable { 2885 break 2886 } 2887 v2 := state.pop() 2888 v1 := state.pop() 2889 ret := builder.AllocateInstruction(). 2890 AsNarrow(v1, v2, ssa.VecLaneI16x8, vecOp == wasm.OpcodeVecI8x16NarrowI16x8S). 2891 Insert(builder).Return() 2892 state.push(ret) 2893 case wasm.OpcodeVecI16x8NarrowI32x4S, wasm.OpcodeVecI16x8NarrowI32x4U: 2894 if state.unreachable { 2895 break 2896 } 2897 v2 := state.pop() 2898 v1 := state.pop() 2899 ret := builder.AllocateInstruction(). 2900 AsNarrow(v1, v2, ssa.VecLaneI32x4, vecOp == wasm.OpcodeVecI16x8NarrowI32x4S). 2901 Insert(builder).Return() 2902 state.push(ret) 2903 case wasm.OpcodeVecI16x8ExtendLowI8x16S, wasm.OpcodeVecI16x8ExtendLowI8x16U: 2904 if state.unreachable { 2905 break 2906 } 2907 v1 := state.pop() 2908 ret := builder.AllocateInstruction(). 2909 AsWiden(v1, ssa.VecLaneI8x16, vecOp == wasm.OpcodeVecI16x8ExtendLowI8x16S, true). 2910 Insert(builder).Return() 2911 state.push(ret) 2912 case wasm.OpcodeVecI16x8ExtendHighI8x16S, wasm.OpcodeVecI16x8ExtendHighI8x16U: 2913 if state.unreachable { 2914 break 2915 } 2916 v1 := state.pop() 2917 ret := builder.AllocateInstruction(). 2918 AsWiden(v1, ssa.VecLaneI8x16, vecOp == wasm.OpcodeVecI16x8ExtendHighI8x16S, false). 2919 Insert(builder).Return() 2920 state.push(ret) 2921 case wasm.OpcodeVecI32x4ExtendLowI16x8S, wasm.OpcodeVecI32x4ExtendLowI16x8U: 2922 if state.unreachable { 2923 break 2924 } 2925 v1 := state.pop() 2926 ret := builder.AllocateInstruction(). 2927 AsWiden(v1, ssa.VecLaneI16x8, vecOp == wasm.OpcodeVecI32x4ExtendLowI16x8S, true). 2928 Insert(builder).Return() 2929 state.push(ret) 2930 case wasm.OpcodeVecI32x4ExtendHighI16x8S, wasm.OpcodeVecI32x4ExtendHighI16x8U: 2931 if state.unreachable { 2932 break 2933 } 2934 v1 := state.pop() 2935 ret := builder.AllocateInstruction(). 2936 AsWiden(v1, ssa.VecLaneI16x8, vecOp == wasm.OpcodeVecI32x4ExtendHighI16x8S, false). 2937 Insert(builder).Return() 2938 state.push(ret) 2939 case wasm.OpcodeVecI64x2ExtendLowI32x4S, wasm.OpcodeVecI64x2ExtendLowI32x4U: 2940 if state.unreachable { 2941 break 2942 } 2943 v1 := state.pop() 2944 ret := builder.AllocateInstruction(). 2945 AsWiden(v1, ssa.VecLaneI32x4, vecOp == wasm.OpcodeVecI64x2ExtendLowI32x4S, true). 2946 Insert(builder).Return() 2947 state.push(ret) 2948 case wasm.OpcodeVecI64x2ExtendHighI32x4S, wasm.OpcodeVecI64x2ExtendHighI32x4U: 2949 if state.unreachable { 2950 break 2951 } 2952 v1 := state.pop() 2953 ret := builder.AllocateInstruction(). 2954 AsWiden(v1, ssa.VecLaneI32x4, vecOp == wasm.OpcodeVecI64x2ExtendHighI32x4S, false). 2955 Insert(builder).Return() 2956 state.push(ret) 2957 2958 case wasm.OpcodeVecF64x2PromoteLowF32x4Zero: 2959 if state.unreachable { 2960 break 2961 } 2962 v1 := state.pop() 2963 ret := builder.AllocateInstruction(). 2964 AsFvpromoteLow(v1, ssa.VecLaneF32x4). 2965 Insert(builder).Return() 2966 state.push(ret) 2967 case wasm.OpcodeVecF32x4DemoteF64x2Zero: 2968 if state.unreachable { 2969 break 2970 } 2971 v1 := state.pop() 2972 ret := builder.AllocateInstruction(). 2973 AsFvdemote(v1, ssa.VecLaneF64x2). 2974 Insert(builder).Return() 2975 state.push(ret) 2976 case wasm.OpcodeVecI8x16Shl, wasm.OpcodeVecI16x8Shl, wasm.OpcodeVecI32x4Shl, wasm.OpcodeVecI64x2Shl: 2977 if state.unreachable { 2978 break 2979 } 2980 var lane ssa.VecLane 2981 switch vecOp { 2982 case wasm.OpcodeVecI8x16Shl: 2983 lane = ssa.VecLaneI8x16 2984 case wasm.OpcodeVecI16x8Shl: 2985 lane = ssa.VecLaneI16x8 2986 case wasm.OpcodeVecI32x4Shl: 2987 lane = ssa.VecLaneI32x4 2988 case wasm.OpcodeVecI64x2Shl: 2989 lane = ssa.VecLaneI64x2 2990 } 2991 v2 := state.pop() 2992 v1 := state.pop() 2993 ret := builder.AllocateInstruction().AsVIshl(v1, v2, lane).Insert(builder).Return() 2994 state.push(ret) 2995 case wasm.OpcodeVecI8x16ShrS, wasm.OpcodeVecI16x8ShrS, wasm.OpcodeVecI32x4ShrS, wasm.OpcodeVecI64x2ShrS: 2996 if state.unreachable { 2997 break 2998 } 2999 var lane ssa.VecLane 3000 switch vecOp { 3001 case wasm.OpcodeVecI8x16ShrS: 3002 lane = ssa.VecLaneI8x16 3003 case wasm.OpcodeVecI16x8ShrS: 3004 lane = ssa.VecLaneI16x8 3005 case wasm.OpcodeVecI32x4ShrS: 3006 lane = ssa.VecLaneI32x4 3007 case wasm.OpcodeVecI64x2ShrS: 3008 lane = ssa.VecLaneI64x2 3009 } 3010 v2 := state.pop() 3011 v1 := state.pop() 3012 ret := builder.AllocateInstruction().AsVSshr(v1, v2, lane).Insert(builder).Return() 3013 state.push(ret) 3014 case wasm.OpcodeVecI8x16ShrU, wasm.OpcodeVecI16x8ShrU, wasm.OpcodeVecI32x4ShrU, wasm.OpcodeVecI64x2ShrU: 3015 if state.unreachable { 3016 break 3017 } 3018 var lane ssa.VecLane 3019 switch vecOp { 3020 case wasm.OpcodeVecI8x16ShrU: 3021 lane = ssa.VecLaneI8x16 3022 case wasm.OpcodeVecI16x8ShrU: 3023 lane = ssa.VecLaneI16x8 3024 case wasm.OpcodeVecI32x4ShrU: 3025 lane = ssa.VecLaneI32x4 3026 case wasm.OpcodeVecI64x2ShrU: 3027 lane = ssa.VecLaneI64x2 3028 } 3029 v2 := state.pop() 3030 v1 := state.pop() 3031 ret := builder.AllocateInstruction().AsVUshr(v1, v2, lane).Insert(builder).Return() 3032 state.push(ret) 3033 case wasm.OpcodeVecI8x16ExtractLaneS, wasm.OpcodeVecI16x8ExtractLaneS: 3034 state.pc++ 3035 if state.unreachable { 3036 break 3037 } 3038 var lane ssa.VecLane 3039 switch vecOp { 3040 case wasm.OpcodeVecI8x16ExtractLaneS: 3041 lane = ssa.VecLaneI8x16 3042 case wasm.OpcodeVecI16x8ExtractLaneS: 3043 lane = ssa.VecLaneI16x8 3044 } 3045 v1 := state.pop() 3046 index := c.wasmFunctionBody[state.pc] 3047 ext := builder.AllocateInstruction().AsExtractlane(v1, index, lane, true).Insert(builder).Return() 3048 state.push(ext) 3049 case wasm.OpcodeVecI8x16ExtractLaneU, wasm.OpcodeVecI16x8ExtractLaneU, 3050 wasm.OpcodeVecI32x4ExtractLane, wasm.OpcodeVecI64x2ExtractLane, 3051 wasm.OpcodeVecF32x4ExtractLane, wasm.OpcodeVecF64x2ExtractLane: 3052 state.pc++ // Skip the immediate value. 3053 if state.unreachable { 3054 break 3055 } 3056 var lane ssa.VecLane 3057 switch vecOp { 3058 case wasm.OpcodeVecI8x16ExtractLaneU: 3059 lane = ssa.VecLaneI8x16 3060 case wasm.OpcodeVecI16x8ExtractLaneU: 3061 lane = ssa.VecLaneI16x8 3062 case wasm.OpcodeVecI32x4ExtractLane: 3063 lane = ssa.VecLaneI32x4 3064 case wasm.OpcodeVecI64x2ExtractLane: 3065 lane = ssa.VecLaneI64x2 3066 case wasm.OpcodeVecF32x4ExtractLane: 3067 lane = ssa.VecLaneF32x4 3068 case wasm.OpcodeVecF64x2ExtractLane: 3069 lane = ssa.VecLaneF64x2 3070 } 3071 v1 := state.pop() 3072 index := c.wasmFunctionBody[state.pc] 3073 ext := builder.AllocateInstruction().AsExtractlane(v1, index, lane, false).Insert(builder).Return() 3074 state.push(ext) 3075 case wasm.OpcodeVecI8x16ReplaceLane, wasm.OpcodeVecI16x8ReplaceLane, 3076 wasm.OpcodeVecI32x4ReplaceLane, wasm.OpcodeVecI64x2ReplaceLane, 3077 wasm.OpcodeVecF32x4ReplaceLane, wasm.OpcodeVecF64x2ReplaceLane: 3078 state.pc++ 3079 if state.unreachable { 3080 break 3081 } 3082 var lane ssa.VecLane 3083 switch vecOp { 3084 case wasm.OpcodeVecI8x16ReplaceLane: 3085 lane = ssa.VecLaneI8x16 3086 case wasm.OpcodeVecI16x8ReplaceLane: 3087 lane = ssa.VecLaneI16x8 3088 case wasm.OpcodeVecI32x4ReplaceLane: 3089 lane = ssa.VecLaneI32x4 3090 case wasm.OpcodeVecI64x2ReplaceLane: 3091 lane = ssa.VecLaneI64x2 3092 case wasm.OpcodeVecF32x4ReplaceLane: 3093 lane = ssa.VecLaneF32x4 3094 case wasm.OpcodeVecF64x2ReplaceLane: 3095 lane = ssa.VecLaneF64x2 3096 } 3097 v2 := state.pop() 3098 v1 := state.pop() 3099 index := c.wasmFunctionBody[state.pc] 3100 ret := builder.AllocateInstruction().AsInsertlane(v1, v2, index, lane).Insert(builder).Return() 3101 state.push(ret) 3102 case wasm.OpcodeVecV128i8x16Shuffle: 3103 state.pc++ 3104 laneIndexes := c.wasmFunctionBody[state.pc : state.pc+16] 3105 state.pc += 15 3106 if state.unreachable { 3107 break 3108 } 3109 v2 := state.pop() 3110 v1 := state.pop() 3111 ret := builder.AllocateInstruction().AsShuffle(v1, v2, laneIndexes).Insert(builder).Return() 3112 state.push(ret) 3113 3114 case wasm.OpcodeVecI8x16Swizzle: 3115 if state.unreachable { 3116 break 3117 } 3118 v2 := state.pop() 3119 v1 := state.pop() 3120 ret := builder.AllocateInstruction().AsSwizzle(v1, v2, ssa.VecLaneI8x16).Insert(builder).Return() 3121 state.push(ret) 3122 3123 case wasm.OpcodeVecI8x16Splat, 3124 wasm.OpcodeVecI16x8Splat, 3125 wasm.OpcodeVecI32x4Splat, 3126 wasm.OpcodeVecI64x2Splat, 3127 wasm.OpcodeVecF32x4Splat, 3128 wasm.OpcodeVecF64x2Splat: 3129 if state.unreachable { 3130 break 3131 } 3132 var lane ssa.VecLane 3133 switch vecOp { 3134 case wasm.OpcodeVecI8x16Splat: 3135 lane = ssa.VecLaneI8x16 3136 case wasm.OpcodeVecI16x8Splat: 3137 lane = ssa.VecLaneI16x8 3138 case wasm.OpcodeVecI32x4Splat: 3139 lane = ssa.VecLaneI32x4 3140 case wasm.OpcodeVecI64x2Splat: 3141 lane = ssa.VecLaneI64x2 3142 case wasm.OpcodeVecF32x4Splat: 3143 lane = ssa.VecLaneF32x4 3144 case wasm.OpcodeVecF64x2Splat: 3145 lane = ssa.VecLaneF64x2 3146 } 3147 v1 := state.pop() 3148 ret := builder.AllocateInstruction().AsSplat(v1, lane).Insert(builder).Return() 3149 state.push(ret) 3150 3151 default: 3152 panic("TODO: unsupported vector instruction: " + wasm.VectorInstructionName(vecOp)) 3153 } 3154 case wasm.OpcodeRefFunc: 3155 funcIndex := c.readI32u() 3156 if state.unreachable { 3157 break 3158 } 3159 3160 c.storeCallerModuleContext() 3161 3162 funcIndexVal := builder.AllocateInstruction().AsIconst32(funcIndex).Insert(builder).Return() 3163 3164 refFuncPtr := builder.AllocateInstruction(). 3165 AsLoad(c.execCtxPtrValue, 3166 wazevoapi.ExecutionContextOffsetRefFuncTrampolineAddress.U32(), 3167 ssa.TypeI64, 3168 ).Insert(builder).Return() 3169 3170 // TODO: reuse the slice. 3171 args := []ssa.Value{c.execCtxPtrValue, funcIndexVal} 3172 refFuncRet := builder. 3173 AllocateInstruction(). 3174 AsCallIndirect(refFuncPtr, &c.refFuncSig, args). 3175 Insert(builder).Return() 3176 state.push(refFuncRet) 3177 3178 case wasm.OpcodeRefNull: 3179 c.loweringState.pc++ // skips the reference type as we treat both of them as i64(0). 3180 if state.unreachable { 3181 break 3182 } 3183 ret := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return() 3184 state.push(ret) 3185 case wasm.OpcodeRefIsNull: 3186 if state.unreachable { 3187 break 3188 } 3189 r := state.pop() 3190 zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder) 3191 icmp := builder.AllocateInstruction(). 3192 AsIcmp(r, zero.Return(), ssa.IntegerCmpCondEqual). 3193 Insert(builder). 3194 Return() 3195 state.push(icmp) 3196 case wasm.OpcodeTableSet: 3197 tableIndex := c.readI32u() 3198 if state.unreachable { 3199 break 3200 } 3201 r := state.pop() 3202 targetOffsetInTable := state.pop() 3203 3204 elementAddr := c.lowerAccessTableWithBoundsCheck(tableIndex, targetOffsetInTable) 3205 builder.AllocateInstruction().AsStore(ssa.OpcodeStore, r, elementAddr, 0).Insert(builder) 3206 3207 case wasm.OpcodeTableGet: 3208 tableIndex := c.readI32u() 3209 if state.unreachable { 3210 break 3211 } 3212 targetOffsetInTable := state.pop() 3213 elementAddr := c.lowerAccessTableWithBoundsCheck(tableIndex, targetOffsetInTable) 3214 loaded := builder.AllocateInstruction().AsLoad(elementAddr, 0, ssa.TypeI64).Insert(builder).Return() 3215 state.push(loaded) 3216 default: 3217 panic("TODO: unsupported in wazevo yet: " + wasm.InstructionName(op)) 3218 } 3219 3220 if wazevoapi.FrontEndLoggingEnabled { 3221 fmt.Println("--------- Translated " + wasm.InstructionName(op) + " --------") 3222 fmt.Println("state: " + c.loweringState.String()) 3223 fmt.Println(c.formatBuilder()) 3224 fmt.Println("--------------------------") 3225 } 3226 c.loweringState.pc++ 3227 } 3228 3229 func (c *Compiler) lowerExtMul(v1, v2 ssa.Value, from, to ssa.VecLane, signed, low bool) ssa.Value { 3230 // TODO: The sequence `Widen; Widen; VIMul` can be substituted for a single instruction on some ISAs. 3231 builder := c.ssaBuilder 3232 3233 v1lo := builder.AllocateInstruction().AsWiden(v1, from, signed, low).Insert(builder).Return() 3234 v2lo := builder.AllocateInstruction().AsWiden(v2, from, signed, low).Insert(builder).Return() 3235 3236 return builder.AllocateInstruction().AsVImul(v1lo, v2lo, to).Insert(builder).Return() 3237 } 3238 3239 const ( 3240 tableInstanceBaseAddressOffset = 0 3241 tableInstanceLenOffset = tableInstanceBaseAddressOffset + 8 3242 ) 3243 3244 func (c *Compiler) lowerAccessTableWithBoundsCheck(tableIndex uint32, elementOffsetInTable ssa.Value) (elementAddress ssa.Value) { 3245 builder := c.ssaBuilder 3246 3247 // Load the table. 3248 loadTableInstancePtr := builder.AllocateInstruction() 3249 loadTableInstancePtr.AsLoad(c.moduleCtxPtrValue, c.offset.TableOffset(int(tableIndex)).U32(), ssa.TypeI64) 3250 builder.InsertInstruction(loadTableInstancePtr) 3251 tableInstancePtr := loadTableInstancePtr.Return() 3252 3253 // Load the table's length. 3254 loadTableLen := builder.AllocateInstruction() 3255 loadTableLen.AsLoad(tableInstancePtr, tableInstanceLenOffset, ssa.TypeI32) 3256 builder.InsertInstruction(loadTableLen) 3257 tableLen := loadTableLen.Return() 3258 3259 // Compare the length and the target, and trap if out of bounds. 3260 checkOOB := builder.AllocateInstruction() 3261 checkOOB.AsIcmp(elementOffsetInTable, tableLen, ssa.IntegerCmpCondUnsignedGreaterThanOrEqual) 3262 builder.InsertInstruction(checkOOB) 3263 exitIfOOB := builder.AllocateInstruction() 3264 exitIfOOB.AsExitIfTrueWithCode(c.execCtxPtrValue, checkOOB.Return(), wazevoapi.ExitCodeTableOutOfBounds) 3265 builder.InsertInstruction(exitIfOOB) 3266 3267 // Get the base address of wasm.TableInstance.References. 3268 loadTableBaseAddress := builder.AllocateInstruction() 3269 loadTableBaseAddress.AsLoad(tableInstancePtr, tableInstanceBaseAddressOffset, ssa.TypeI64) 3270 builder.InsertInstruction(loadTableBaseAddress) 3271 tableBase := loadTableBaseAddress.Return() 3272 3273 // Calculate the address of the target function. First we need to multiply targetOffsetInTable by 8 (pointer size). 3274 multiplyBy8 := builder.AllocateInstruction() 3275 three := builder.AllocateInstruction() 3276 three.AsIconst64(3) 3277 builder.InsertInstruction(three) 3278 multiplyBy8.AsIshl(elementOffsetInTable, three.Return()) 3279 builder.InsertInstruction(multiplyBy8) 3280 targetOffsetInTableMultipliedBy8 := multiplyBy8.Return() 3281 3282 // Then add the multiplied value to the base which results in the address of the target function (*wazevo.functionInstance) 3283 calcElementAddressInTable := builder.AllocateInstruction() 3284 calcElementAddressInTable.AsIadd(tableBase, targetOffsetInTableMultipliedBy8) 3285 builder.InsertInstruction(calcElementAddressInTable) 3286 return calcElementAddressInTable.Return() 3287 } 3288 3289 func (c *Compiler) lowerCallIndirect(typeIndex, tableIndex uint32) { 3290 builder := c.ssaBuilder 3291 state := c.state() 3292 3293 elementOffsetInTable := state.pop() 3294 functionInstancePtrAddress := c.lowerAccessTableWithBoundsCheck(tableIndex, elementOffsetInTable) 3295 loadFunctionInstancePtr := builder.AllocateInstruction() 3296 loadFunctionInstancePtr.AsLoad(functionInstancePtrAddress, 0, ssa.TypeI64) 3297 builder.InsertInstruction(loadFunctionInstancePtr) 3298 functionInstancePtr := loadFunctionInstancePtr.Return() 3299 3300 // Check if it is not the null pointer. 3301 zero := builder.AllocateInstruction() 3302 zero.AsIconst64(0) 3303 builder.InsertInstruction(zero) 3304 checkNull := builder.AllocateInstruction() 3305 checkNull.AsIcmp(functionInstancePtr, zero.Return(), ssa.IntegerCmpCondEqual) 3306 builder.InsertInstruction(checkNull) 3307 exitIfNull := builder.AllocateInstruction() 3308 exitIfNull.AsExitIfTrueWithCode(c.execCtxPtrValue, checkNull.Return(), wazevoapi.ExitCodeIndirectCallNullPointer) 3309 builder.InsertInstruction(exitIfNull) 3310 3311 // We need to do the type check. First, load the target function instance's typeID. 3312 loadTypeID := builder.AllocateInstruction() 3313 loadTypeID.AsLoad(functionInstancePtr, wazevoapi.FunctionInstanceTypeIDOffset, ssa.TypeI32) 3314 builder.InsertInstruction(loadTypeID) 3315 actualTypeID := loadTypeID.Return() 3316 3317 // Next, we load the expected TypeID: 3318 loadTypeIDsBegin := builder.AllocateInstruction() 3319 loadTypeIDsBegin.AsLoad(c.moduleCtxPtrValue, c.offset.TypeIDs1stElement.U32(), ssa.TypeI64) 3320 builder.InsertInstruction(loadTypeIDsBegin) 3321 typeIDsBegin := loadTypeIDsBegin.Return() 3322 3323 loadExpectedTypeID := builder.AllocateInstruction() 3324 loadExpectedTypeID.AsLoad(typeIDsBegin, uint32(typeIndex)*4 /* size of wasm.FunctionTypeID */, ssa.TypeI32) 3325 builder.InsertInstruction(loadExpectedTypeID) 3326 expectedTypeID := loadExpectedTypeID.Return() 3327 3328 // Check if the type ID matches. 3329 checkTypeID := builder.AllocateInstruction() 3330 checkTypeID.AsIcmp(actualTypeID, expectedTypeID, ssa.IntegerCmpCondNotEqual) 3331 builder.InsertInstruction(checkTypeID) 3332 exitIfNotMatch := builder.AllocateInstruction() 3333 exitIfNotMatch.AsExitIfTrueWithCode(c.execCtxPtrValue, checkTypeID.Return(), wazevoapi.ExitCodeIndirectCallTypeMismatch) 3334 builder.InsertInstruction(exitIfNotMatch) 3335 3336 // Now ready to call the function. Load the executable and moduleContextOpaquePtr from the function instance. 3337 loadExecutablePtr := builder.AllocateInstruction() 3338 loadExecutablePtr.AsLoad(functionInstancePtr, wazevoapi.FunctionInstanceExecutableOffset, ssa.TypeI64) 3339 builder.InsertInstruction(loadExecutablePtr) 3340 executablePtr := loadExecutablePtr.Return() 3341 loadModuleContextOpaquePtr := builder.AllocateInstruction() 3342 loadModuleContextOpaquePtr.AsLoad(functionInstancePtr, wazevoapi.FunctionInstanceModuleContextOpaquePtrOffset, ssa.TypeI64) 3343 builder.InsertInstruction(loadModuleContextOpaquePtr) 3344 moduleContextOpaquePtr := loadModuleContextOpaquePtr.Return() 3345 3346 // TODO: reuse slice? 3347 typ := &c.m.TypeSection[typeIndex] 3348 argN := len(typ.Params) 3349 args := make([]ssa.Value, argN+2) 3350 args[0] = c.execCtxPtrValue 3351 args[1] = moduleContextOpaquePtr 3352 state.nPopInto(argN, args[2:]) 3353 3354 // Before transfer the control to the callee, we have to store the current module's moduleContextPtr 3355 // into execContext.callerModuleContextPtr in case when the callee is a Go function. 3356 c.storeCallerModuleContext() 3357 3358 call := builder.AllocateInstruction() 3359 call.AsCallIndirect(executablePtr, c.signatures[typ], args) 3360 builder.InsertInstruction(call) 3361 3362 first, rest := call.Returns() 3363 if first.Valid() { 3364 state.push(first) 3365 } 3366 for _, v := range rest { 3367 state.push(v) 3368 } 3369 3370 c.reloadAfterCall() 3371 } 3372 3373 // memOpSetup inserts the bounds check and calculates the address of the memory operation (loads/stores). 3374 func (c *Compiler) memOpSetup(baseAddr ssa.Value, constOffset, operationSizeInBytes uint64) (address ssa.Value) { 3375 address = ssa.ValueInvalid 3376 builder := c.ssaBuilder 3377 3378 baseAddrID := baseAddr.ID() 3379 ceil := constOffset + operationSizeInBytes 3380 if known := c.getKnownSafeBound(baseAddrID); known.valid() { 3381 // We reuse the calculated absolute address even if the bound is not known to be safe. 3382 address = known.absoluteAddr 3383 if ceil <= known.bound { 3384 return 3385 } 3386 } 3387 3388 ceilConst := builder.AllocateInstruction() 3389 ceilConst.AsIconst64(ceil) 3390 builder.InsertInstruction(ceilConst) 3391 3392 // We calculate the offset in 64-bit space. 3393 extBaseAddr := builder.AllocateInstruction(). 3394 AsUExtend(baseAddr, 32, 64). 3395 Insert(builder). 3396 Return() 3397 3398 // Note: memLen is already zero extended to 64-bit space at the load time. 3399 memLen := c.getMemoryLenValue(false) 3400 3401 // baseAddrPlusCeil = baseAddr + ceil 3402 baseAddrPlusCeil := builder.AllocateInstruction() 3403 baseAddrPlusCeil.AsIadd(extBaseAddr, ceilConst.Return()) 3404 builder.InsertInstruction(baseAddrPlusCeil) 3405 3406 // Check for out of bounds memory access: `memLen >= baseAddrPlusCeil`. 3407 cmp := builder.AllocateInstruction() 3408 cmp.AsIcmp(memLen, baseAddrPlusCeil.Return(), ssa.IntegerCmpCondUnsignedLessThan) 3409 builder.InsertInstruction(cmp) 3410 exitIfNZ := builder.AllocateInstruction() 3411 exitIfNZ.AsExitIfTrueWithCode(c.execCtxPtrValue, cmp.Return(), wazevoapi.ExitCodeMemoryOutOfBounds) 3412 builder.InsertInstruction(exitIfNZ) 3413 3414 // Load the value from memBase + extBaseAddr. 3415 if address == ssa.ValueInvalid { // Reuse the value if the memBase is already calculated at this point. 3416 memBase := c.getMemoryBaseValue(false) 3417 address = builder.AllocateInstruction(). 3418 AsIadd(memBase, extBaseAddr).Insert(builder).Return() 3419 } 3420 3421 // Record the bound ceil for this baseAddr is known to be safe for the subsequent memory access in the same block. 3422 c.recordKnownSafeBound(baseAddrID, ceil, address) 3423 return 3424 } 3425 3426 func (c *Compiler) callMemmove(dst, src, size ssa.Value) { 3427 args := []ssa.Value{dst, src, size} // TODO: reuse the slice. 3428 3429 builder := c.ssaBuilder 3430 memmovePtr := builder.AllocateInstruction(). 3431 AsLoad(c.execCtxPtrValue, 3432 wazevoapi.ExecutionContextOffsetMemmoveAddress.U32(), 3433 ssa.TypeI64, 3434 ).Insert(builder).Return() 3435 builder.AllocateInstruction().AsCallIndirect(memmovePtr, &c.memmoveSig, args).Insert(builder) 3436 } 3437 3438 func (c *Compiler) reloadAfterCall() { 3439 // Note that when these are not used in the following instructions, they will be optimized out. 3440 // So in any ways, we define them! 3441 3442 // After calling any function, memory buffer might have changed. So we need to re-defined the variable. 3443 if c.needMemory { 3444 c.reloadMemoryBaseLen() 3445 } 3446 3447 // Also, any mutable Global can change. 3448 for _, index := range c.mutableGlobalVariablesIndexes { 3449 _ = c.getWasmGlobalValue(index, true) 3450 } 3451 } 3452 3453 func (c *Compiler) reloadMemoryBaseLen() { 3454 _ = c.getMemoryBaseValue(true) 3455 _ = c.getMemoryLenValue(true) 3456 3457 // This function being called means that the memory base might have changed. 3458 // Therefore, we need to clear the known safe bounds because we cache the absolute address of the memory access per each base offset. 3459 c.clearSafeBounds() 3460 } 3461 3462 func (c *Compiler) setWasmGlobalValue(index wasm.Index, v ssa.Value) { 3463 variable := c.globalVariables[index] 3464 opaqueOffset := c.offset.GlobalInstanceOffset(index) 3465 3466 builder := c.ssaBuilder 3467 if index < c.m.ImportGlobalCount { 3468 loadGlobalInstPtr := builder.AllocateInstruction() 3469 loadGlobalInstPtr.AsLoad(c.moduleCtxPtrValue, uint32(opaqueOffset), ssa.TypeI64) 3470 builder.InsertInstruction(loadGlobalInstPtr) 3471 3472 store := builder.AllocateInstruction() 3473 store.AsStore(ssa.OpcodeStore, v, loadGlobalInstPtr.Return(), uint32(0)) 3474 builder.InsertInstruction(store) 3475 3476 } else { 3477 store := builder.AllocateInstruction() 3478 store.AsStore(ssa.OpcodeStore, v, c.moduleCtxPtrValue, uint32(opaqueOffset)) 3479 builder.InsertInstruction(store) 3480 } 3481 3482 // The value has changed to `v`, so we record it. 3483 builder.DefineVariableInCurrentBB(variable, v) 3484 } 3485 3486 func (c *Compiler) getWasmGlobalValue(index wasm.Index, forceLoad bool) ssa.Value { 3487 variable := c.globalVariables[index] 3488 typ := c.globalVariablesTypes[index] 3489 opaqueOffset := c.offset.GlobalInstanceOffset(index) 3490 3491 builder := c.ssaBuilder 3492 if !forceLoad { 3493 if v := builder.FindValueInLinearPath(variable); v.Valid() { 3494 return v 3495 } 3496 } 3497 3498 var load *ssa.Instruction 3499 if index < c.m.ImportGlobalCount { 3500 loadGlobalInstPtr := builder.AllocateInstruction() 3501 loadGlobalInstPtr.AsLoad(c.moduleCtxPtrValue, uint32(opaqueOffset), ssa.TypeI64) 3502 builder.InsertInstruction(loadGlobalInstPtr) 3503 load = builder.AllocateInstruction(). 3504 AsLoad(loadGlobalInstPtr.Return(), uint32(0), typ) 3505 } else { 3506 load = builder.AllocateInstruction(). 3507 AsLoad(c.moduleCtxPtrValue, uint32(opaqueOffset), typ) 3508 } 3509 3510 v := load.Insert(builder).Return() 3511 builder.DefineVariableInCurrentBB(variable, v) 3512 return v 3513 } 3514 3515 const ( 3516 memoryInstanceBufOffset = 0 3517 memoryInstanceBufSizeOffset = memoryInstanceBufOffset + 8 3518 ) 3519 3520 func (c *Compiler) getMemoryBaseValue(forceReload bool) ssa.Value { 3521 builder := c.ssaBuilder 3522 variable := c.memoryBaseVariable 3523 if !forceReload { 3524 if v := builder.FindValueInLinearPath(variable); v.Valid() { 3525 return v 3526 } 3527 } 3528 3529 var ret ssa.Value 3530 if c.offset.LocalMemoryBegin < 0 { 3531 loadMemInstPtr := builder.AllocateInstruction() 3532 loadMemInstPtr.AsLoad(c.moduleCtxPtrValue, c.offset.ImportedMemoryBegin.U32(), ssa.TypeI64) 3533 builder.InsertInstruction(loadMemInstPtr) 3534 memInstPtr := loadMemInstPtr.Return() 3535 3536 loadBufPtr := builder.AllocateInstruction() 3537 loadBufPtr.AsLoad(memInstPtr, memoryInstanceBufOffset, ssa.TypeI64) 3538 builder.InsertInstruction(loadBufPtr) 3539 ret = loadBufPtr.Return() 3540 } else { 3541 load := builder.AllocateInstruction() 3542 load.AsLoad(c.moduleCtxPtrValue, c.offset.LocalMemoryBase().U32(), ssa.TypeI64) 3543 builder.InsertInstruction(load) 3544 ret = load.Return() 3545 } 3546 3547 builder.DefineVariableInCurrentBB(variable, ret) 3548 return ret 3549 } 3550 3551 func (c *Compiler) getMemoryLenValue(forceReload bool) ssa.Value { 3552 variable := c.memoryLenVariable 3553 builder := c.ssaBuilder 3554 if !forceReload { 3555 if v := builder.FindValueInLinearPath(variable); v.Valid() { 3556 return v 3557 } 3558 } 3559 3560 var ret ssa.Value 3561 if c.offset.LocalMemoryBegin < 0 { 3562 loadMemInstPtr := builder.AllocateInstruction() 3563 loadMemInstPtr.AsLoad(c.moduleCtxPtrValue, c.offset.ImportedMemoryBegin.U32(), ssa.TypeI64) 3564 builder.InsertInstruction(loadMemInstPtr) 3565 memInstPtr := loadMemInstPtr.Return() 3566 3567 loadBufSizePtr := builder.AllocateInstruction() 3568 loadBufSizePtr.AsLoad(memInstPtr, memoryInstanceBufSizeOffset, ssa.TypeI64) 3569 builder.InsertInstruction(loadBufSizePtr) 3570 3571 ret = loadBufSizePtr.Return() 3572 } else { 3573 load := builder.AllocateInstruction() 3574 load.AsExtLoad(ssa.OpcodeUload32, c.moduleCtxPtrValue, c.offset.LocalMemoryLen().U32(), true) 3575 builder.InsertInstruction(load) 3576 ret = load.Return() 3577 } 3578 3579 builder.DefineVariableInCurrentBB(variable, ret) 3580 return ret 3581 } 3582 3583 func (c *Compiler) insertIcmp(cond ssa.IntegerCmpCond) { 3584 state, builder := c.state(), c.ssaBuilder 3585 y, x := state.pop(), state.pop() 3586 cmp := builder.AllocateInstruction() 3587 cmp.AsIcmp(x, y, cond) 3588 builder.InsertInstruction(cmp) 3589 value := cmp.Return() 3590 state.push(value) 3591 } 3592 3593 func (c *Compiler) insertFcmp(cond ssa.FloatCmpCond) { 3594 state, builder := c.state(), c.ssaBuilder 3595 y, x := state.pop(), state.pop() 3596 cmp := builder.AllocateInstruction() 3597 cmp.AsFcmp(x, y, cond) 3598 builder.InsertInstruction(cmp) 3599 value := cmp.Return() 3600 state.push(value) 3601 } 3602 3603 // storeCallerModuleContext stores the current module's moduleContextPtr into execContext.callerModuleContextPtr. 3604 func (c *Compiler) storeCallerModuleContext() { 3605 builder := c.ssaBuilder 3606 execCtx := c.execCtxPtrValue 3607 store := builder.AllocateInstruction() 3608 store.AsStore(ssa.OpcodeStore, 3609 c.moduleCtxPtrValue, execCtx, wazevoapi.ExecutionContextOffsetCallerModuleContextPtr.U32()) 3610 builder.InsertInstruction(store) 3611 } 3612 3613 func (c *Compiler) readI32u() uint32 { 3614 v, n, err := leb128.LoadUint32(c.wasmFunctionBody[c.loweringState.pc+1:]) 3615 if err != nil { 3616 panic(err) // shouldn't be reached since compilation comes after validation. 3617 } 3618 c.loweringState.pc += int(n) 3619 return v 3620 } 3621 3622 func (c *Compiler) readI32s() int32 { 3623 v, n, err := leb128.LoadInt32(c.wasmFunctionBody[c.loweringState.pc+1:]) 3624 if err != nil { 3625 panic(err) // shouldn't be reached since compilation comes after validation. 3626 } 3627 c.loweringState.pc += int(n) 3628 return v 3629 } 3630 3631 func (c *Compiler) readI64s() int64 { 3632 v, n, err := leb128.LoadInt64(c.wasmFunctionBody[c.loweringState.pc+1:]) 3633 if err != nil { 3634 panic(err) // shouldn't be reached since compilation comes after validation. 3635 } 3636 c.loweringState.pc += int(n) 3637 return v 3638 } 3639 3640 func (c *Compiler) readF32() float32 { 3641 v := math.Float32frombits(binary.LittleEndian.Uint32(c.wasmFunctionBody[c.loweringState.pc+1:])) 3642 c.loweringState.pc += 4 3643 return v 3644 } 3645 3646 func (c *Compiler) readF64() float64 { 3647 v := math.Float64frombits(binary.LittleEndian.Uint64(c.wasmFunctionBody[c.loweringState.pc+1:])) 3648 c.loweringState.pc += 8 3649 return v 3650 } 3651 3652 // readBlockType reads the block type from the current position of the bytecode reader. 3653 func (c *Compiler) readBlockType() *wasm.FunctionType { 3654 state := c.state() 3655 3656 c.br.Reset(c.wasmFunctionBody[state.pc+1:]) 3657 bt, num, err := wasm.DecodeBlockType(c.m.TypeSection, c.br, api.CoreFeaturesV2) 3658 if err != nil { 3659 panic(err) // shouldn't be reached since compilation comes after validation. 3660 } 3661 state.pc += int(num) 3662 3663 return bt 3664 } 3665 3666 func (c *Compiler) readMemArg() (align, offset uint32) { 3667 state := c.state() 3668 3669 align, num, err := leb128.LoadUint32(c.wasmFunctionBody[state.pc+1:]) 3670 if err != nil { 3671 panic(fmt.Errorf("read memory align: %v", err)) 3672 } 3673 3674 state.pc += int(num) 3675 offset, num, err = leb128.LoadUint32(c.wasmFunctionBody[state.pc+1:]) 3676 if err != nil { 3677 panic(fmt.Errorf("read memory offset: %v", err)) 3678 } 3679 3680 state.pc += int(num) 3681 return align, offset 3682 } 3683 3684 // insertJumpToBlock inserts a jump instruction to the given block in the current block. 3685 func (c *Compiler) insertJumpToBlock(args []ssa.Value, targetBlk ssa.BasicBlock) { 3686 if targetBlk.ReturnBlock() { 3687 if c.needListener { 3688 c.callListenerAfter() 3689 } 3690 } 3691 3692 builder := c.ssaBuilder 3693 jmp := builder.AllocateInstruction() 3694 jmp.AsJump(args, targetBlk) 3695 builder.InsertInstruction(jmp) 3696 } 3697 3698 func (c *Compiler) insertIntegerExtend(signed bool, from, to byte) { 3699 state := c.state() 3700 builder := c.ssaBuilder 3701 v := state.pop() 3702 extend := builder.AllocateInstruction() 3703 if signed { 3704 extend.AsSExtend(v, from, to) 3705 } else { 3706 extend.AsUExtend(v, from, to) 3707 } 3708 builder.InsertInstruction(extend) 3709 value := extend.Return() 3710 state.push(value) 3711 } 3712 3713 func (c *Compiler) switchTo(originalStackLen int, targetBlk ssa.BasicBlock) { 3714 if targetBlk.Preds() == 0 { 3715 c.loweringState.unreachable = true 3716 } 3717 3718 // Now we should adjust the stack and start translating the continuation block. 3719 c.loweringState.values = c.loweringState.values[:originalStackLen] 3720 3721 c.ssaBuilder.SetCurrentBlock(targetBlk) 3722 3723 // At this point, blocks params consist only of the Wasm-level parameters, 3724 // (since it's added only when we are trying to resolve variable *inside* this block). 3725 for i := 0; i < targetBlk.Params(); i++ { 3726 value := targetBlk.Param(i) 3727 c.loweringState.push(value) 3728 } 3729 } 3730 3731 // cloneValuesList clones the given values list. 3732 func cloneValuesList(in []ssa.Value) (ret []ssa.Value) { 3733 ret = make([]ssa.Value, len(in)) 3734 for i := range ret { 3735 ret[i] = in[i] 3736 } 3737 return 3738 } 3739 3740 // results returns the number of results of the current function. 3741 func (c *Compiler) results() int { 3742 return len(c.wasmFunctionTyp.Results) 3743 } 3744 3745 func (c *Compiler) lowerBrTable(labels []uint32, index ssa.Value) { 3746 state := c.state() 3747 builder := c.ssaBuilder 3748 3749 f := state.ctrlPeekAt(int(labels[0])) 3750 var numArgs int 3751 if f.isLoop() { 3752 numArgs = len(f.blockType.Params) 3753 } else { 3754 numArgs = len(f.blockType.Results) 3755 } 3756 3757 targets := make([]ssa.BasicBlock, len(labels)) 3758 3759 // We need trampoline blocks since depending on the target block structure, we might end up inserting moves before jumps, 3760 // which cannot be done with br_table. Instead, we can do such per-block moves in the trampoline blocks. 3761 // At the linking phase (very end of the backend), we can remove the unnecessary jumps, and therefore no runtime overhead. 3762 currentBlk := builder.CurrentBlock() 3763 for i, l := range labels { 3764 // Args are always on the top of the stack. Note that we should not share the args slice 3765 // among the jump instructions since the args are modified during passes (e.g. redundant phi elimination). 3766 args := c.loweringState.nPeekDup(numArgs) 3767 targetBlk, _ := state.brTargetArgNumFor(l) 3768 trampoline := builder.AllocateBasicBlock() 3769 builder.SetCurrentBlock(trampoline) 3770 c.insertJumpToBlock(args, targetBlk) 3771 targets[i] = trampoline 3772 } 3773 builder.SetCurrentBlock(currentBlk) 3774 3775 // If the target block has no arguments, we can just jump to the target block. 3776 brTable := builder.AllocateInstruction() 3777 brTable.AsBrTable(index, targets) 3778 builder.InsertInstruction(brTable) 3779 3780 for _, trampoline := range targets { 3781 builder.Seal(trampoline) 3782 } 3783 } 3784 3785 func (l *loweringState) brTargetArgNumFor(labelIndex uint32) (targetBlk ssa.BasicBlock, argNum int) { 3786 targetFrame := l.ctrlPeekAt(int(labelIndex)) 3787 if targetFrame.isLoop() { 3788 targetBlk, argNum = targetFrame.blk, len(targetFrame.blockType.Params) 3789 } else { 3790 targetBlk, argNum = targetFrame.followingBlock, len(targetFrame.blockType.Results) 3791 } 3792 return 3793 } 3794 3795 func (c *Compiler) callListenerBefore() { 3796 c.storeCallerModuleContext() 3797 3798 builder := c.ssaBuilder 3799 beforeListeners1stElement := builder.AllocateInstruction(). 3800 AsLoad(c.moduleCtxPtrValue, 3801 c.offset.BeforeListenerTrampolines1stElement.U32(), 3802 ssa.TypeI64, 3803 ).Insert(builder).Return() 3804 3805 beforeListenerPtr := builder.AllocateInstruction(). 3806 AsLoad(beforeListeners1stElement, uint32(c.wasmFunctionTypeIndex)*8 /* 8 bytes per index */, ssa.TypeI64).Insert(builder).Return() 3807 3808 entry := builder.EntryBlock() 3809 ps := entry.Params() 3810 // TODO: reuse! 3811 args := make([]ssa.Value, ps) 3812 args[0] = c.execCtxPtrValue 3813 args[1] = builder.AllocateInstruction().AsIconst32(c.wasmLocalFunctionIndex).Insert(builder).Return() 3814 for i := 2; i < ps; i++ { 3815 args[i] = entry.Param(i) 3816 } 3817 3818 beforeSig := c.listenerSignatures[c.wasmFunctionTyp][0] 3819 builder.AllocateInstruction(). 3820 AsCallIndirect(beforeListenerPtr, beforeSig, args). 3821 Insert(builder) 3822 } 3823 3824 func (c *Compiler) callListenerAfter() { 3825 c.storeCallerModuleContext() 3826 3827 builder := c.ssaBuilder 3828 afterListeners1stElement := builder.AllocateInstruction(). 3829 AsLoad(c.moduleCtxPtrValue, 3830 c.offset.AfterListenerTrampolines1stElement.U32(), 3831 ssa.TypeI64, 3832 ).Insert(builder).Return() 3833 3834 afterListenerPtr := builder.AllocateInstruction(). 3835 AsLoad(afterListeners1stElement, 3836 uint32(c.wasmFunctionTypeIndex)*8 /* 8 bytes per index */, ssa.TypeI64). 3837 Insert(builder). 3838 Return() 3839 3840 afterSig := c.listenerSignatures[c.wasmFunctionTyp][1] 3841 results := c.loweringState.nPeekDup(c.results()) 3842 3843 // TODO: reuse! 3844 args := make([]ssa.Value, len(results)+2) 3845 args[0] = c.execCtxPtrValue 3846 args[1] = builder.AllocateInstruction().AsIconst32(c.wasmLocalFunctionIndex).Insert(builder).Return() 3847 for i, r := range results { 3848 args[i+2] = r 3849 } 3850 builder.AllocateInstruction(). 3851 AsCallIndirect(afterListenerPtr, afterSig, args). 3852 Insert(builder) 3853 } 3854 3855 const ( 3856 elementOrDataInstanceLenOffset = 8 3857 elementOrDataInstanceSize = 24 3858 ) 3859 3860 // dropInstance inserts instructions to drop the element/data instance specified by the given index. 3861 func (c *Compiler) dropDataOrElementInstance(index uint32, firstItemOffset wazevoapi.Offset) { 3862 builder := c.ssaBuilder 3863 instPtr := c.dataOrElementInstanceAddr(index, firstItemOffset) 3864 3865 zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return() 3866 3867 // Clear the instance. 3868 builder.AllocateInstruction().AsStore(ssa.OpcodeStore, zero, instPtr, 0).Insert(builder) 3869 builder.AllocateInstruction().AsStore(ssa.OpcodeStore, zero, instPtr, elementOrDataInstanceLenOffset).Insert(builder) 3870 builder.AllocateInstruction().AsStore(ssa.OpcodeStore, zero, instPtr, elementOrDataInstanceLenOffset+8).Insert(builder) 3871 } 3872 3873 func (c *Compiler) dataOrElementInstanceAddr(index uint32, firstItemOffset wazevoapi.Offset) ssa.Value { 3874 builder := c.ssaBuilder 3875 3876 _1stItemPtr := builder. 3877 AllocateInstruction(). 3878 AsLoad(c.moduleCtxPtrValue, firstItemOffset.U32(), ssa.TypeI64). 3879 Insert(builder).Return() 3880 3881 // Each data/element instance is a slice, so we need to multiply index by 16 to get the offset of the target instance. 3882 index = index * elementOrDataInstanceSize 3883 indexExt := builder.AllocateInstruction().AsIconst64(uint64(index)).Insert(builder).Return() 3884 // Then, add the offset to the address of the instance. 3885 instPtr := builder.AllocateInstruction().AsIadd(_1stItemPtr, indexExt).Insert(builder).Return() 3886 return instPtr 3887 } 3888 3889 func (c *Compiler) boundsCheckInDataOrElementInstance(instPtr, offsetInInstance, copySize ssa.Value, exitCode wazevoapi.ExitCode) { 3890 builder := c.ssaBuilder 3891 dataInstLen := builder.AllocateInstruction(). 3892 AsLoad(instPtr, elementOrDataInstanceLenOffset, ssa.TypeI64). 3893 Insert(builder).Return() 3894 ceil := builder.AllocateInstruction().AsIadd(offsetInInstance, copySize).Insert(builder).Return() 3895 cmp := builder.AllocateInstruction(). 3896 AsIcmp(dataInstLen, ceil, ssa.IntegerCmpCondUnsignedLessThan). 3897 Insert(builder). 3898 Return() 3899 builder.AllocateInstruction(). 3900 AsExitIfTrueWithCode(c.execCtxPtrValue, cmp, exitCode). 3901 Insert(builder) 3902 } 3903 3904 func (c *Compiler) boundsCheckInTable(tableIndex uint32, offset, size ssa.Value) (tableInstancePtr ssa.Value) { 3905 builder := c.ssaBuilder 3906 dstCeil := builder.AllocateInstruction().AsIadd(offset, size).Insert(builder).Return() 3907 3908 // Load the table. 3909 tableInstancePtr = builder.AllocateInstruction(). 3910 AsLoad(c.moduleCtxPtrValue, c.offset.TableOffset(int(tableIndex)).U32(), ssa.TypeI64). 3911 Insert(builder).Return() 3912 3913 // Load the table's length. 3914 tableLen := builder.AllocateInstruction(). 3915 AsLoad(tableInstancePtr, tableInstanceLenOffset, ssa.TypeI32).Insert(builder).Return() 3916 tableLenExt := builder.AllocateInstruction().AsUExtend(tableLen, 32, 64).Insert(builder).Return() 3917 3918 // Compare the length and the target, and trap if out of bounds. 3919 checkOOB := builder.AllocateInstruction() 3920 checkOOB.AsIcmp(tableLenExt, dstCeil, ssa.IntegerCmpCondUnsignedLessThan) 3921 builder.InsertInstruction(checkOOB) 3922 exitIfOOB := builder.AllocateInstruction() 3923 exitIfOOB.AsExitIfTrueWithCode(c.execCtxPtrValue, checkOOB.Return(), wazevoapi.ExitCodeTableOutOfBounds) 3924 builder.InsertInstruction(exitIfOOB) 3925 return 3926 } 3927 3928 func (c *Compiler) loadTableBaseAddr(tableInstancePtr ssa.Value) ssa.Value { 3929 builder := c.ssaBuilder 3930 loadTableBaseAddress := builder. 3931 AllocateInstruction(). 3932 AsLoad(tableInstancePtr, tableInstanceBaseAddressOffset, ssa.TypeI64). 3933 Insert(builder) 3934 return loadTableBaseAddress.Return() 3935 } 3936 3937 func (c *Compiler) boundsCheckInMemory(memLen, offset, size ssa.Value) { 3938 builder := c.ssaBuilder 3939 ceil := builder.AllocateInstruction().AsIadd(offset, size).Insert(builder).Return() 3940 cmp := builder.AllocateInstruction(). 3941 AsIcmp(memLen, ceil, ssa.IntegerCmpCondUnsignedLessThan). 3942 Insert(builder). 3943 Return() 3944 builder.AllocateInstruction(). 3945 AsExitIfTrueWithCode(c.execCtxPtrValue, cmp, wazevoapi.ExitCodeMemoryOutOfBounds). 3946 Insert(builder) 3947 }