github.com/wasilibs/wazerox@v0.0.0-20240124024944-4923be63ab5f/internal/engine/wazevo/frontend/lower.go (about) 1 package frontend 2 3 import ( 4 "encoding/binary" 5 "fmt" 6 "math" 7 "strings" 8 9 "github.com/wasilibs/wazerox/api" 10 "github.com/wasilibs/wazerox/internal/engine/wazevo/ssa" 11 "github.com/wasilibs/wazerox/internal/engine/wazevo/wazevoapi" 12 "github.com/wasilibs/wazerox/internal/leb128" 13 "github.com/wasilibs/wazerox/internal/wasm" 14 ) 15 16 type ( 17 // loweringState is used to keep the state of lowering. 18 loweringState struct { 19 // values holds the values on the Wasm stack. 20 values []ssa.Value 21 controlFrames []controlFrame 22 unreachable bool 23 unreachableDepth int 24 tmpForBrTable []uint32 25 pc int 26 } 27 controlFrame struct { 28 kind controlFrameKind 29 // originalStackLen holds the number of values on the Wasm stack 30 // when start executing this control frame minus params for the block. 31 originalStackLenWithoutParam int 32 // blk is the loop header if this is loop, and is the else-block if this is an if frame. 33 blk, 34 // followingBlock is the basic block we enter if we reach "end" of block. 35 followingBlock ssa.BasicBlock 36 blockType *wasm.FunctionType 37 // clonedArgs hold the arguments to Else block. 38 clonedArgs []ssa.Value 39 } 40 41 controlFrameKind byte 42 ) 43 44 // String implements fmt.Stringer for debugging. 45 func (l *loweringState) String() string { 46 var str []string 47 for _, v := range l.values { 48 str = append(str, fmt.Sprintf("v%v", v.ID())) 49 } 50 var frames []string 51 for i := range l.controlFrames { 52 frames = append(frames, l.controlFrames[i].kind.String()) 53 } 54 return fmt.Sprintf("\n\tunreachable=%v(depth=%d)\n\tstack: %s\n\tcontrol frames: %s", 55 l.unreachable, l.unreachableDepth, 56 strings.Join(str, ", "), 57 strings.Join(frames, ", "), 58 ) 59 } 60 61 const ( 62 controlFrameKindFunction = iota + 1 63 controlFrameKindLoop 64 controlFrameKindIfWithElse 65 controlFrameKindIfWithoutElse 66 controlFrameKindBlock 67 ) 68 69 // String implements fmt.Stringer for debugging. 70 func (k controlFrameKind) String() string { 71 switch k { 72 case controlFrameKindFunction: 73 return "function" 74 case controlFrameKindLoop: 75 return "loop" 76 case controlFrameKindIfWithElse: 77 return "if_with_else" 78 case controlFrameKindIfWithoutElse: 79 return "if_without_else" 80 case controlFrameKindBlock: 81 return "block" 82 default: 83 panic(k) 84 } 85 } 86 87 // isLoop returns true if this is a loop frame. 88 func (ctrl *controlFrame) isLoop() bool { 89 return ctrl.kind == controlFrameKindLoop 90 } 91 92 // reset resets the state of loweringState for reuse. 93 func (l *loweringState) reset() { 94 l.values = l.values[:0] 95 l.controlFrames = l.controlFrames[:0] 96 l.pc = 0 97 l.unreachable = false 98 l.unreachableDepth = 0 99 } 100 101 func (l *loweringState) peek() (ret ssa.Value) { 102 tail := len(l.values) - 1 103 return l.values[tail] 104 } 105 106 func (l *loweringState) pop() (ret ssa.Value) { 107 tail := len(l.values) - 1 108 ret = l.values[tail] 109 l.values = l.values[:tail] 110 return 111 } 112 113 func (l *loweringState) push(ret ssa.Value) { 114 l.values = append(l.values, ret) 115 } 116 117 func (l *loweringState) nPopInto(n int, dst []ssa.Value) { 118 if n == 0 { 119 return 120 } 121 tail := len(l.values) 122 begin := tail - n 123 view := l.values[begin:tail] 124 copy(dst, view) 125 l.values = l.values[:begin] 126 } 127 128 func (l *loweringState) nPeekDup(n int) []ssa.Value { 129 if n == 0 { 130 return nil 131 } 132 tail := len(l.values) 133 view := l.values[tail-n : tail] 134 return cloneValuesList(view) 135 } 136 137 func (l *loweringState) ctrlPop() (ret controlFrame) { 138 tail := len(l.controlFrames) - 1 139 ret = l.controlFrames[tail] 140 l.controlFrames = l.controlFrames[:tail] 141 return 142 } 143 144 func (l *loweringState) ctrlPush(ret controlFrame) { 145 l.controlFrames = append(l.controlFrames, ret) 146 } 147 148 func (l *loweringState) ctrlPeekAt(n int) (ret *controlFrame) { 149 tail := len(l.controlFrames) - 1 150 return &l.controlFrames[tail-n] 151 } 152 153 // lowerBody lowers the body of the Wasm function to the SSA form. 154 func (c *Compiler) lowerBody(entryBlk ssa.BasicBlock) { 155 c.ssaBuilder.Seal(entryBlk) 156 157 if c.needListener { 158 c.callListenerBefore() 159 } 160 161 // Pushes the empty control frame which corresponds to the function return. 162 c.loweringState.ctrlPush(controlFrame{ 163 kind: controlFrameKindFunction, 164 blockType: c.wasmFunctionTyp, 165 followingBlock: c.ssaBuilder.ReturnBlock(), 166 }) 167 168 for c.loweringState.pc < len(c.wasmFunctionBody) { 169 c.lowerCurrentOpcode() 170 } 171 } 172 173 func (c *Compiler) state() *loweringState { 174 return &c.loweringState 175 } 176 177 func (c *Compiler) lowerCurrentOpcode() { 178 op := c.wasmFunctionBody[c.loweringState.pc] 179 180 if c.needSourceOffsetInfo { 181 c.ssaBuilder.SetCurrentSourceOffset( 182 ssa.SourceOffset(c.loweringState.pc) + ssa.SourceOffset(c.wasmFunctionBodyOffsetInCodeSection), 183 ) 184 } 185 186 builder := c.ssaBuilder 187 state := c.state() 188 switch op { 189 case wasm.OpcodeI32Const: 190 c := c.readI32s() 191 if state.unreachable { 192 break 193 } 194 195 iconst := builder.AllocateInstruction().AsIconst32(uint32(c)).Insert(builder) 196 value := iconst.Return() 197 state.push(value) 198 case wasm.OpcodeI64Const: 199 c := c.readI64s() 200 if state.unreachable { 201 break 202 } 203 iconst := builder.AllocateInstruction().AsIconst64(uint64(c)).Insert(builder) 204 value := iconst.Return() 205 state.push(value) 206 case wasm.OpcodeF32Const: 207 f32 := c.readF32() 208 if state.unreachable { 209 break 210 } 211 f32const := builder.AllocateInstruction(). 212 AsF32const(f32). 213 Insert(builder). 214 Return() 215 state.push(f32const) 216 case wasm.OpcodeF64Const: 217 f64 := c.readF64() 218 if state.unreachable { 219 break 220 } 221 f64const := builder.AllocateInstruction(). 222 AsF64const(f64). 223 Insert(builder). 224 Return() 225 state.push(f64const) 226 case wasm.OpcodeI32Add, wasm.OpcodeI64Add: 227 if state.unreachable { 228 break 229 } 230 y, x := state.pop(), state.pop() 231 iadd := builder.AllocateInstruction() 232 iadd.AsIadd(x, y) 233 builder.InsertInstruction(iadd) 234 value := iadd.Return() 235 state.push(value) 236 case wasm.OpcodeI32Sub, wasm.OpcodeI64Sub: 237 if state.unreachable { 238 break 239 } 240 y, x := state.pop(), state.pop() 241 isub := builder.AllocateInstruction() 242 isub.AsIsub(x, y) 243 builder.InsertInstruction(isub) 244 value := isub.Return() 245 state.push(value) 246 case wasm.OpcodeF32Add, wasm.OpcodeF64Add: 247 if state.unreachable { 248 break 249 } 250 y, x := state.pop(), state.pop() 251 iadd := builder.AllocateInstruction() 252 iadd.AsFadd(x, y) 253 builder.InsertInstruction(iadd) 254 value := iadd.Return() 255 state.push(value) 256 case wasm.OpcodeI32Mul, wasm.OpcodeI64Mul: 257 if state.unreachable { 258 break 259 } 260 y, x := state.pop(), state.pop() 261 imul := builder.AllocateInstruction() 262 imul.AsImul(x, y) 263 builder.InsertInstruction(imul) 264 value := imul.Return() 265 state.push(value) 266 case wasm.OpcodeF32Sub, wasm.OpcodeF64Sub: 267 if state.unreachable { 268 break 269 } 270 y, x := state.pop(), state.pop() 271 isub := builder.AllocateInstruction() 272 isub.AsFsub(x, y) 273 builder.InsertInstruction(isub) 274 value := isub.Return() 275 state.push(value) 276 case wasm.OpcodeF32Mul, wasm.OpcodeF64Mul: 277 if state.unreachable { 278 break 279 } 280 y, x := state.pop(), state.pop() 281 isub := builder.AllocateInstruction() 282 isub.AsFmul(x, y) 283 builder.InsertInstruction(isub) 284 value := isub.Return() 285 state.push(value) 286 case wasm.OpcodeF32Div, wasm.OpcodeF64Div: 287 if state.unreachable { 288 break 289 } 290 y, x := state.pop(), state.pop() 291 isub := builder.AllocateInstruction() 292 isub.AsFdiv(x, y) 293 builder.InsertInstruction(isub) 294 value := isub.Return() 295 state.push(value) 296 case wasm.OpcodeF32Max, wasm.OpcodeF64Max: 297 if state.unreachable { 298 break 299 } 300 y, x := state.pop(), state.pop() 301 isub := builder.AllocateInstruction() 302 isub.AsFmax(x, y) 303 builder.InsertInstruction(isub) 304 value := isub.Return() 305 state.push(value) 306 case wasm.OpcodeF32Min, wasm.OpcodeF64Min: 307 if state.unreachable { 308 break 309 } 310 y, x := state.pop(), state.pop() 311 isub := builder.AllocateInstruction() 312 isub.AsFmin(x, y) 313 builder.InsertInstruction(isub) 314 value := isub.Return() 315 state.push(value) 316 case wasm.OpcodeI64Extend8S: 317 if state.unreachable { 318 break 319 } 320 c.insertIntegerExtend(true, 8, 64) 321 case wasm.OpcodeI64Extend16S: 322 if state.unreachable { 323 break 324 } 325 c.insertIntegerExtend(true, 16, 64) 326 case wasm.OpcodeI64Extend32S, wasm.OpcodeI64ExtendI32S: 327 if state.unreachable { 328 break 329 } 330 c.insertIntegerExtend(true, 32, 64) 331 case wasm.OpcodeI64ExtendI32U: 332 if state.unreachable { 333 break 334 } 335 c.insertIntegerExtend(false, 32, 64) 336 case wasm.OpcodeI32Extend8S: 337 if state.unreachable { 338 break 339 } 340 c.insertIntegerExtend(true, 8, 32) 341 case wasm.OpcodeI32Extend16S: 342 if state.unreachable { 343 break 344 } 345 c.insertIntegerExtend(true, 16, 32) 346 case wasm.OpcodeI32Eqz, wasm.OpcodeI64Eqz: 347 if state.unreachable { 348 break 349 } 350 x := state.pop() 351 zero := builder.AllocateInstruction() 352 if op == wasm.OpcodeI32Eqz { 353 zero.AsIconst32(0) 354 } else { 355 zero.AsIconst64(0) 356 } 357 builder.InsertInstruction(zero) 358 icmp := builder.AllocateInstruction(). 359 AsIcmp(x, zero.Return(), ssa.IntegerCmpCondEqual). 360 Insert(builder). 361 Return() 362 state.push(icmp) 363 case wasm.OpcodeI32Eq, wasm.OpcodeI64Eq: 364 if state.unreachable { 365 break 366 } 367 c.insertIcmp(ssa.IntegerCmpCondEqual) 368 case wasm.OpcodeI32Ne, wasm.OpcodeI64Ne: 369 if state.unreachable { 370 break 371 } 372 c.insertIcmp(ssa.IntegerCmpCondNotEqual) 373 case wasm.OpcodeI32LtS, wasm.OpcodeI64LtS: 374 if state.unreachable { 375 break 376 } 377 c.insertIcmp(ssa.IntegerCmpCondSignedLessThan) 378 case wasm.OpcodeI32LtU, wasm.OpcodeI64LtU: 379 if state.unreachable { 380 break 381 } 382 c.insertIcmp(ssa.IntegerCmpCondUnsignedLessThan) 383 case wasm.OpcodeI32GtS, wasm.OpcodeI64GtS: 384 if state.unreachable { 385 break 386 } 387 c.insertIcmp(ssa.IntegerCmpCondSignedGreaterThan) 388 case wasm.OpcodeI32GtU, wasm.OpcodeI64GtU: 389 if state.unreachable { 390 break 391 } 392 c.insertIcmp(ssa.IntegerCmpCondUnsignedGreaterThan) 393 case wasm.OpcodeI32LeS, wasm.OpcodeI64LeS: 394 if state.unreachable { 395 break 396 } 397 c.insertIcmp(ssa.IntegerCmpCondSignedLessThanOrEqual) 398 case wasm.OpcodeI32LeU, wasm.OpcodeI64LeU: 399 if state.unreachable { 400 break 401 } 402 c.insertIcmp(ssa.IntegerCmpCondUnsignedLessThanOrEqual) 403 case wasm.OpcodeI32GeS, wasm.OpcodeI64GeS: 404 if state.unreachable { 405 break 406 } 407 c.insertIcmp(ssa.IntegerCmpCondSignedGreaterThanOrEqual) 408 case wasm.OpcodeI32GeU, wasm.OpcodeI64GeU: 409 if state.unreachable { 410 break 411 } 412 c.insertIcmp(ssa.IntegerCmpCondUnsignedGreaterThanOrEqual) 413 414 case wasm.OpcodeF32Eq, wasm.OpcodeF64Eq: 415 if state.unreachable { 416 break 417 } 418 c.insertFcmp(ssa.FloatCmpCondEqual) 419 case wasm.OpcodeF32Ne, wasm.OpcodeF64Ne: 420 if state.unreachable { 421 break 422 } 423 c.insertFcmp(ssa.FloatCmpCondNotEqual) 424 case wasm.OpcodeF32Lt, wasm.OpcodeF64Lt: 425 if state.unreachable { 426 break 427 } 428 c.insertFcmp(ssa.FloatCmpCondLessThan) 429 case wasm.OpcodeF32Gt, wasm.OpcodeF64Gt: 430 if state.unreachable { 431 break 432 } 433 c.insertFcmp(ssa.FloatCmpCondGreaterThan) 434 case wasm.OpcodeF32Le, wasm.OpcodeF64Le: 435 if state.unreachable { 436 break 437 } 438 c.insertFcmp(ssa.FloatCmpCondLessThanOrEqual) 439 case wasm.OpcodeF32Ge, wasm.OpcodeF64Ge: 440 if state.unreachable { 441 break 442 } 443 c.insertFcmp(ssa.FloatCmpCondGreaterThanOrEqual) 444 case wasm.OpcodeF32Neg, wasm.OpcodeF64Neg: 445 if state.unreachable { 446 break 447 } 448 x := state.pop() 449 v := builder.AllocateInstruction().AsFneg(x).Insert(builder).Return() 450 state.push(v) 451 case wasm.OpcodeF32Sqrt, wasm.OpcodeF64Sqrt: 452 if state.unreachable { 453 break 454 } 455 x := state.pop() 456 v := builder.AllocateInstruction().AsSqrt(x).Insert(builder).Return() 457 state.push(v) 458 case wasm.OpcodeF32Abs, wasm.OpcodeF64Abs: 459 if state.unreachable { 460 break 461 } 462 x := state.pop() 463 v := builder.AllocateInstruction().AsFabs(x).Insert(builder).Return() 464 state.push(v) 465 case wasm.OpcodeF32Copysign, wasm.OpcodeF64Copysign: 466 if state.unreachable { 467 break 468 } 469 y, x := state.pop(), state.pop() 470 v := builder.AllocateInstruction().AsFcopysign(x, y).Insert(builder).Return() 471 state.push(v) 472 473 case wasm.OpcodeF32Ceil, wasm.OpcodeF64Ceil: 474 if state.unreachable { 475 break 476 } 477 x := state.pop() 478 v := builder.AllocateInstruction().AsCeil(x).Insert(builder).Return() 479 state.push(v) 480 case wasm.OpcodeF32Floor, wasm.OpcodeF64Floor: 481 if state.unreachable { 482 break 483 } 484 x := state.pop() 485 v := builder.AllocateInstruction().AsFloor(x).Insert(builder).Return() 486 state.push(v) 487 case wasm.OpcodeF32Trunc, wasm.OpcodeF64Trunc: 488 if state.unreachable { 489 break 490 } 491 x := state.pop() 492 v := builder.AllocateInstruction().AsTrunc(x).Insert(builder).Return() 493 state.push(v) 494 case wasm.OpcodeF32Nearest, wasm.OpcodeF64Nearest: 495 if state.unreachable { 496 break 497 } 498 x := state.pop() 499 v := builder.AllocateInstruction().AsNearest(x).Insert(builder).Return() 500 state.push(v) 501 case wasm.OpcodeI64TruncF64S, wasm.OpcodeI64TruncF32S, 502 wasm.OpcodeI32TruncF64S, wasm.OpcodeI32TruncF32S, 503 wasm.OpcodeI64TruncF64U, wasm.OpcodeI64TruncF32U, 504 wasm.OpcodeI32TruncF64U, wasm.OpcodeI32TruncF32U: 505 if state.unreachable { 506 break 507 } 508 ret := builder.AllocateInstruction().AsFcvtToInt( 509 state.pop(), 510 c.execCtxPtrValue, 511 op == wasm.OpcodeI64TruncF64S || op == wasm.OpcodeI64TruncF32S || op == wasm.OpcodeI32TruncF32S || op == wasm.OpcodeI32TruncF64S, 512 op == wasm.OpcodeI64TruncF64S || op == wasm.OpcodeI64TruncF32S || op == wasm.OpcodeI64TruncF64U || op == wasm.OpcodeI64TruncF32U, 513 false, 514 ).Insert(builder).Return() 515 state.push(ret) 516 case wasm.OpcodeMiscPrefix: 517 state.pc++ 518 // A misc opcode is encoded as an unsigned variable 32-bit integer. 519 miscOpUint, num, err := leb128.LoadUint32(c.wasmFunctionBody[state.pc:]) 520 if err != nil { 521 // In normal conditions this should never happen because the function has passed validation. 522 panic(fmt.Sprintf("failed to read misc opcode: %v", err)) 523 } 524 state.pc += int(num - 1) 525 miscOp := wasm.OpcodeMisc(miscOpUint) 526 switch miscOp { 527 case wasm.OpcodeMiscI64TruncSatF64S, wasm.OpcodeMiscI64TruncSatF32S, 528 wasm.OpcodeMiscI32TruncSatF64S, wasm.OpcodeMiscI32TruncSatF32S, 529 wasm.OpcodeMiscI64TruncSatF64U, wasm.OpcodeMiscI64TruncSatF32U, 530 wasm.OpcodeMiscI32TruncSatF64U, wasm.OpcodeMiscI32TruncSatF32U: 531 if state.unreachable { 532 break 533 } 534 ret := builder.AllocateInstruction().AsFcvtToInt( 535 state.pop(), 536 c.execCtxPtrValue, 537 miscOp == wasm.OpcodeMiscI64TruncSatF64S || miscOp == wasm.OpcodeMiscI64TruncSatF32S || miscOp == wasm.OpcodeMiscI32TruncSatF32S || miscOp == wasm.OpcodeMiscI32TruncSatF64S, 538 miscOp == wasm.OpcodeMiscI64TruncSatF64S || miscOp == wasm.OpcodeMiscI64TruncSatF32S || miscOp == wasm.OpcodeMiscI64TruncSatF64U || miscOp == wasm.OpcodeMiscI64TruncSatF32U, 539 true, 540 ).Insert(builder).Return() 541 state.push(ret) 542 543 case wasm.OpcodeMiscTableSize: 544 tableIndex := c.readI32u() 545 if state.unreachable { 546 break 547 } 548 549 // Load the table. 550 loadTableInstancePtr := builder.AllocateInstruction() 551 loadTableInstancePtr.AsLoad(c.moduleCtxPtrValue, c.offset.TableOffset(int(tableIndex)).U32(), ssa.TypeI64) 552 builder.InsertInstruction(loadTableInstancePtr) 553 tableInstancePtr := loadTableInstancePtr.Return() 554 555 // Load the table's length. 556 loadTableLen := builder.AllocateInstruction(). 557 AsLoad(tableInstancePtr, tableInstanceLenOffset, ssa.TypeI32). 558 Insert(builder) 559 state.push(loadTableLen.Return()) 560 561 case wasm.OpcodeMiscTableGrow: 562 tableIndex := c.readI32u() 563 if state.unreachable { 564 break 565 } 566 567 c.storeCallerModuleContext() 568 569 tableIndexVal := builder.AllocateInstruction().AsIconst32(tableIndex).Insert(builder).Return() 570 571 num := state.pop() 572 r := state.pop() 573 574 tableGrowPtr := builder.AllocateInstruction(). 575 AsLoad(c.execCtxPtrValue, 576 wazevoapi.ExecutionContextOffsetTableGrowTrampolineAddress.U32(), 577 ssa.TypeI64, 578 ).Insert(builder).Return() 579 580 // TODO: reuse the slice. 581 args := []ssa.Value{c.execCtxPtrValue, tableIndexVal, num, r} 582 callGrowRet := builder. 583 AllocateInstruction(). 584 AsCallIndirect(tableGrowPtr, &c.tableGrowSig, args). 585 Insert(builder).Return() 586 state.push(callGrowRet) 587 588 case wasm.OpcodeMiscTableCopy: 589 dstTableIndex := c.readI32u() 590 srcTableIndex := c.readI32u() 591 if state.unreachable { 592 break 593 } 594 595 copySize := builder. 596 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 597 srcOffset := builder. 598 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 599 dstOffset := builder. 600 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 601 602 // Out of bounds check. 603 dstTableInstancePtr := c.boundsCheckInTable(dstTableIndex, dstOffset, copySize) 604 srcTableInstancePtr := c.boundsCheckInTable(srcTableIndex, srcOffset, copySize) 605 606 dstTableBaseAddr := c.loadTableBaseAddr(dstTableInstancePtr) 607 srcTableBaseAddr := c.loadTableBaseAddr(srcTableInstancePtr) 608 609 three := builder.AllocateInstruction().AsIconst64(3).Insert(builder).Return() 610 611 dstOffsetInBytes := builder.AllocateInstruction().AsIshl(dstOffset, three).Insert(builder).Return() 612 dstAddr := builder.AllocateInstruction().AsIadd(dstTableBaseAddr, dstOffsetInBytes).Insert(builder).Return() 613 srcOffsetInBytes := builder.AllocateInstruction().AsIshl(srcOffset, three).Insert(builder).Return() 614 srcAddr := builder.AllocateInstruction().AsIadd(srcTableBaseAddr, srcOffsetInBytes).Insert(builder).Return() 615 616 copySizeInBytes := builder.AllocateInstruction().AsIshl(copySize, three).Insert(builder).Return() 617 c.callMemmove(dstAddr, srcAddr, copySizeInBytes) 618 619 case wasm.OpcodeMiscMemoryCopy: 620 state.pc += 2 // +2 to skip two memory indexes which are fixed to zero. 621 if state.unreachable { 622 break 623 } 624 625 copySize := builder. 626 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 627 srcOffset := builder. 628 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 629 dstOffset := builder. 630 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 631 632 // Out of bounds check. 633 memLen := c.getMemoryLenValue(false) 634 c.boundsCheckInMemory(memLen, dstOffset, copySize) 635 c.boundsCheckInMemory(memLen, srcOffset, copySize) 636 637 memBase := c.getMemoryBaseValue(false) 638 dstAddr := builder.AllocateInstruction().AsIadd(memBase, dstOffset).Insert(builder).Return() 639 srcAddr := builder.AllocateInstruction().AsIadd(memBase, srcOffset).Insert(builder).Return() 640 641 c.callMemmove(dstAddr, srcAddr, copySize) 642 643 case wasm.OpcodeMiscTableFill: 644 tableIndex := c.readI32u() 645 if state.unreachable { 646 break 647 } 648 fillSize := state.pop() 649 value := state.pop() 650 offset := state.pop() 651 652 fillSizeExt := builder. 653 AllocateInstruction().AsUExtend(fillSize, 32, 64).Insert(builder).Return() 654 offsetExt := builder. 655 AllocateInstruction().AsUExtend(offset, 32, 64).Insert(builder).Return() 656 tableInstancePtr := c.boundsCheckInTable(tableIndex, offsetExt, fillSizeExt) 657 658 three := builder.AllocateInstruction().AsIconst64(3).Insert(builder).Return() 659 offsetInBytes := builder.AllocateInstruction().AsIshl(offsetExt, three).Insert(builder).Return() 660 fillSizeInBytes := builder.AllocateInstruction().AsIshl(fillSizeExt, three).Insert(builder).Return() 661 662 // Calculate the base address of the table. 663 tableBaseAddr := c.loadTableBaseAddr(tableInstancePtr) 664 addr := builder.AllocateInstruction().AsIadd(tableBaseAddr, offsetInBytes).Insert(builder).Return() 665 666 // Prepare the loop and following block. 667 beforeLoop := builder.AllocateBasicBlock() 668 loopBlk := builder.AllocateBasicBlock() 669 loopVar := loopBlk.AddParam(builder, ssa.TypeI64) 670 followingBlk := builder.AllocateBasicBlock() 671 672 // Uses the copy trick for faster filling buffer like memory.fill, but in this case we copy 8 bytes at a time. 673 // buf := memoryInst.Buffer[offset : offset+fillSize] 674 // buf[0:8] = value 675 // for i := 8; i < fillSize; i *= 2 { Begin with 8 bytes. 676 // copy(buf[i:], buf[:i]) 677 // } 678 679 // Insert the jump to the beforeLoop block; If the fillSize is zero, then jump to the following block to skip entire logics. 680 zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return() 681 ifFillSizeZero := builder.AllocateInstruction().AsIcmp(fillSizeExt, zero, ssa.IntegerCmpCondEqual). 682 Insert(builder).Return() 683 builder.AllocateInstruction().AsBrnz(ifFillSizeZero, nil, followingBlk).Insert(builder) 684 c.insertJumpToBlock(nil, beforeLoop) 685 686 // buf[0:8] = value 687 builder.SetCurrentBlock(beforeLoop) 688 builder.AllocateInstruction().AsStore(ssa.OpcodeStore, value, addr, 0).Insert(builder) 689 initValue := builder.AllocateInstruction().AsIconst64(8).Insert(builder).Return() 690 c.insertJumpToBlock([]ssa.Value{initValue}, loopBlk) // TODO: reuse the slice. 691 692 builder.SetCurrentBlock(loopBlk) 693 dstAddr := builder.AllocateInstruction().AsIadd(addr, loopVar).Insert(builder).Return() 694 695 // If loopVar*2 > fillSizeInBytes, then count must be fillSizeInBytes-loopVar. 696 var count ssa.Value 697 { 698 loopVarDoubled := builder.AllocateInstruction().AsIadd(loopVar, loopVar).Insert(builder).Return() 699 loopVarDoubledLargerThanFillSize := builder. 700 AllocateInstruction().AsIcmp(loopVarDoubled, fillSizeInBytes, ssa.IntegerCmpCondUnsignedGreaterThanOrEqual). 701 Insert(builder).Return() 702 diff := builder.AllocateInstruction().AsIsub(fillSizeInBytes, loopVar).Insert(builder).Return() 703 count = builder.AllocateInstruction().AsSelect(loopVarDoubledLargerThanFillSize, diff, loopVar).Insert(builder).Return() 704 } 705 706 c.callMemmove(dstAddr, addr, count) 707 708 shiftAmount := builder.AllocateInstruction().AsIconst64(1).Insert(builder).Return() 709 newLoopVar := builder.AllocateInstruction().AsIshl(loopVar, shiftAmount).Insert(builder).Return() 710 loopVarLessThanFillSize := builder.AllocateInstruction(). 711 AsIcmp(newLoopVar, fillSizeInBytes, ssa.IntegerCmpCondUnsignedLessThan).Insert(builder).Return() 712 713 builder.AllocateInstruction(). 714 AsBrnz(loopVarLessThanFillSize, []ssa.Value{newLoopVar}, loopBlk). // TODO: reuse the slice. 715 Insert(builder) 716 717 c.insertJumpToBlock(nil, followingBlk) 718 builder.SetCurrentBlock(followingBlk) 719 720 builder.Seal(beforeLoop) 721 builder.Seal(loopBlk) 722 builder.Seal(followingBlk) 723 724 case wasm.OpcodeMiscMemoryFill: 725 state.pc++ // Skip the memory index which is fixed to zero. 726 if state.unreachable { 727 break 728 } 729 730 fillSize := builder. 731 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 732 value := state.pop() 733 offset := builder. 734 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 735 736 // Out of bounds check. 737 c.boundsCheckInMemory(c.getMemoryLenValue(false), offset, fillSize) 738 739 // Calculate the base address: 740 addr := builder.AllocateInstruction().AsIadd(c.getMemoryBaseValue(false), offset).Insert(builder).Return() 741 742 // Uses the copy trick for faster filling buffer: https://gist.github.com/taylorza/df2f89d5f9ab3ffd06865062a4cf015d 743 // buf := memoryInst.Buffer[offset : offset+fillSize] 744 // buf[0] = value 745 // for i := 1; i < fillSize; i *= 2 { 746 // copy(buf[i:], buf[:i]) 747 // } 748 749 // Prepare the loop and following block. 750 beforeLoop := builder.AllocateBasicBlock() 751 loopBlk := builder.AllocateBasicBlock() 752 loopVar := loopBlk.AddParam(builder, ssa.TypeI64) 753 followingBlk := builder.AllocateBasicBlock() 754 755 // Insert the jump to the beforeLoop block; If the fillSize is zero, then jump to the following block to skip entire logics. 756 zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return() 757 ifFillSizeZero := builder.AllocateInstruction().AsIcmp(fillSize, zero, ssa.IntegerCmpCondEqual). 758 Insert(builder).Return() 759 builder.AllocateInstruction().AsBrnz(ifFillSizeZero, nil, followingBlk).Insert(builder) 760 c.insertJumpToBlock(nil, beforeLoop) 761 762 // buf[0] = value 763 builder.SetCurrentBlock(beforeLoop) 764 builder.AllocateInstruction().AsStore(ssa.OpcodeIstore8, value, addr, 0).Insert(builder) 765 initValue := builder.AllocateInstruction().AsIconst64(1).Insert(builder).Return() 766 c.insertJumpToBlock([]ssa.Value{initValue}, loopBlk) // TODO: reuse the slice. 767 768 builder.SetCurrentBlock(loopBlk) 769 dstAddr := builder.AllocateInstruction().AsIadd(addr, loopVar).Insert(builder).Return() 770 771 // If loopVar*2 > fillSizeExt, then count must be fillSizeExt-loopVar. 772 var count ssa.Value 773 { 774 loopVarDoubled := builder.AllocateInstruction().AsIadd(loopVar, loopVar).Insert(builder).Return() 775 loopVarDoubledLargerThanFillSize := builder. 776 AllocateInstruction().AsIcmp(loopVarDoubled, fillSize, ssa.IntegerCmpCondUnsignedGreaterThanOrEqual). 777 Insert(builder).Return() 778 diff := builder.AllocateInstruction().AsIsub(fillSize, loopVar).Insert(builder).Return() 779 count = builder.AllocateInstruction().AsSelect(loopVarDoubledLargerThanFillSize, diff, loopVar).Insert(builder).Return() 780 } 781 782 c.callMemmove(dstAddr, addr, count) 783 784 shiftAmount := builder.AllocateInstruction().AsIconst64(1).Insert(builder).Return() 785 newLoopVar := builder.AllocateInstruction().AsIshl(loopVar, shiftAmount).Insert(builder).Return() 786 loopVarLessThanFillSize := builder.AllocateInstruction(). 787 AsIcmp(newLoopVar, fillSize, ssa.IntegerCmpCondUnsignedLessThan).Insert(builder).Return() 788 789 builder.AllocateInstruction(). 790 AsBrnz(loopVarLessThanFillSize, []ssa.Value{newLoopVar}, loopBlk). // TODO: reuse the slice. 791 Insert(builder) 792 793 c.insertJumpToBlock(nil, followingBlk) 794 builder.SetCurrentBlock(followingBlk) 795 796 builder.Seal(beforeLoop) 797 builder.Seal(loopBlk) 798 builder.Seal(followingBlk) 799 800 case wasm.OpcodeMiscMemoryInit: 801 index := c.readI32u() 802 state.pc++ // Skip the memory index which is fixed to zero. 803 if state.unreachable { 804 break 805 } 806 807 copySize := builder. 808 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 809 offsetInDataInstance := builder. 810 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 811 offsetInMemory := builder. 812 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 813 814 dataInstPtr := c.dataOrElementInstanceAddr(index, c.offset.DataInstances1stElement) 815 816 // Bounds check. 817 c.boundsCheckInMemory(c.getMemoryLenValue(false), offsetInMemory, copySize) 818 c.boundsCheckInDataOrElementInstance(dataInstPtr, offsetInDataInstance, copySize, wazevoapi.ExitCodeMemoryOutOfBounds) 819 820 dataInstBaseAddr := builder.AllocateInstruction().AsLoad(dataInstPtr, 0, ssa.TypeI64).Insert(builder).Return() 821 srcAddr := builder.AllocateInstruction().AsIadd(dataInstBaseAddr, offsetInDataInstance).Insert(builder).Return() 822 823 memBase := c.getMemoryBaseValue(false) 824 dstAddr := builder.AllocateInstruction().AsIadd(memBase, offsetInMemory).Insert(builder).Return() 825 826 c.callMemmove(dstAddr, srcAddr, copySize) 827 828 case wasm.OpcodeMiscTableInit: 829 elemIndex := c.readI32u() 830 tableIndex := c.readI32u() 831 if state.unreachable { 832 break 833 } 834 835 copySize := builder. 836 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 837 offsetInElementInstance := builder. 838 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 839 offsetInTable := builder. 840 AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return() 841 842 elemInstPtr := c.dataOrElementInstanceAddr(elemIndex, c.offset.ElementInstances1stElement) 843 844 // Bounds check. 845 tableInstancePtr := c.boundsCheckInTable(tableIndex, offsetInTable, copySize) 846 c.boundsCheckInDataOrElementInstance(elemInstPtr, offsetInElementInstance, copySize, wazevoapi.ExitCodeTableOutOfBounds) 847 848 three := builder.AllocateInstruction().AsIconst64(3).Insert(builder).Return() 849 // Calculates the destination address in the table. 850 tableOffsetInBytes := builder.AllocateInstruction().AsIshl(offsetInTable, three).Insert(builder).Return() 851 tableBaseAddr := c.loadTableBaseAddr(tableInstancePtr) 852 dstAddr := builder.AllocateInstruction().AsIadd(tableBaseAddr, tableOffsetInBytes).Insert(builder).Return() 853 854 // Calculates the source address in the element instance. 855 srcOffsetInBytes := builder.AllocateInstruction().AsIshl(offsetInElementInstance, three).Insert(builder).Return() 856 elemInstBaseAddr := builder.AllocateInstruction().AsLoad(elemInstPtr, 0, ssa.TypeI64).Insert(builder).Return() 857 srcAddr := builder.AllocateInstruction().AsIadd(elemInstBaseAddr, srcOffsetInBytes).Insert(builder).Return() 858 859 copySizeInBytes := builder.AllocateInstruction().AsIshl(copySize, three).Insert(builder).Return() 860 c.callMemmove(dstAddr, srcAddr, copySizeInBytes) 861 862 case wasm.OpcodeMiscElemDrop: 863 index := c.readI32u() 864 if state.unreachable { 865 break 866 } 867 868 c.dropDataOrElementInstance(index, c.offset.ElementInstances1stElement) 869 870 case wasm.OpcodeMiscDataDrop: 871 index := c.readI32u() 872 if state.unreachable { 873 break 874 } 875 c.dropDataOrElementInstance(index, c.offset.DataInstances1stElement) 876 877 default: 878 panic("Unknown MiscOp " + wasm.MiscInstructionName(miscOp)) 879 } 880 881 case wasm.OpcodeI32ReinterpretF32: 882 if state.unreachable { 883 break 884 } 885 reinterpret := builder.AllocateInstruction(). 886 AsBitcast(state.pop(), ssa.TypeI32). 887 Insert(builder).Return() 888 state.push(reinterpret) 889 890 case wasm.OpcodeI64ReinterpretF64: 891 if state.unreachable { 892 break 893 } 894 reinterpret := builder.AllocateInstruction(). 895 AsBitcast(state.pop(), ssa.TypeI64). 896 Insert(builder).Return() 897 state.push(reinterpret) 898 899 case wasm.OpcodeF32ReinterpretI32: 900 if state.unreachable { 901 break 902 } 903 reinterpret := builder.AllocateInstruction(). 904 AsBitcast(state.pop(), ssa.TypeF32). 905 Insert(builder).Return() 906 state.push(reinterpret) 907 908 case wasm.OpcodeF64ReinterpretI64: 909 if state.unreachable { 910 break 911 } 912 reinterpret := builder.AllocateInstruction(). 913 AsBitcast(state.pop(), ssa.TypeF64). 914 Insert(builder).Return() 915 state.push(reinterpret) 916 917 case wasm.OpcodeI32DivS, wasm.OpcodeI64DivS: 918 if state.unreachable { 919 break 920 } 921 y, x := state.pop(), state.pop() 922 result := builder.AllocateInstruction().AsSDiv(x, y, c.execCtxPtrValue).Insert(builder).Return() 923 state.push(result) 924 925 case wasm.OpcodeI32DivU, wasm.OpcodeI64DivU: 926 if state.unreachable { 927 break 928 } 929 y, x := state.pop(), state.pop() 930 result := builder.AllocateInstruction().AsUDiv(x, y, c.execCtxPtrValue).Insert(builder).Return() 931 state.push(result) 932 933 case wasm.OpcodeI32RemS, wasm.OpcodeI64RemS: 934 if state.unreachable { 935 break 936 } 937 y, x := state.pop(), state.pop() 938 result := builder.AllocateInstruction().AsSRem(x, y, c.execCtxPtrValue).Insert(builder).Return() 939 state.push(result) 940 941 case wasm.OpcodeI32RemU, wasm.OpcodeI64RemU: 942 if state.unreachable { 943 break 944 } 945 y, x := state.pop(), state.pop() 946 result := builder.AllocateInstruction().AsURem(x, y, c.execCtxPtrValue).Insert(builder).Return() 947 state.push(result) 948 949 case wasm.OpcodeI32And, wasm.OpcodeI64And: 950 if state.unreachable { 951 break 952 } 953 y, x := state.pop(), state.pop() 954 and := builder.AllocateInstruction() 955 and.AsBand(x, y) 956 builder.InsertInstruction(and) 957 value := and.Return() 958 state.push(value) 959 case wasm.OpcodeI32Or, wasm.OpcodeI64Or: 960 if state.unreachable { 961 break 962 } 963 y, x := state.pop(), state.pop() 964 or := builder.AllocateInstruction() 965 or.AsBor(x, y) 966 builder.InsertInstruction(or) 967 value := or.Return() 968 state.push(value) 969 case wasm.OpcodeI32Xor, wasm.OpcodeI64Xor: 970 if state.unreachable { 971 break 972 } 973 y, x := state.pop(), state.pop() 974 xor := builder.AllocateInstruction() 975 xor.AsBxor(x, y) 976 builder.InsertInstruction(xor) 977 value := xor.Return() 978 state.push(value) 979 case wasm.OpcodeI32Shl, wasm.OpcodeI64Shl: 980 if state.unreachable { 981 break 982 } 983 y, x := state.pop(), state.pop() 984 ishl := builder.AllocateInstruction() 985 ishl.AsIshl(x, y) 986 builder.InsertInstruction(ishl) 987 value := ishl.Return() 988 state.push(value) 989 case wasm.OpcodeI32ShrU, wasm.OpcodeI64ShrU: 990 if state.unreachable { 991 break 992 } 993 y, x := state.pop(), state.pop() 994 ishl := builder.AllocateInstruction() 995 ishl.AsUshr(x, y) 996 builder.InsertInstruction(ishl) 997 value := ishl.Return() 998 state.push(value) 999 case wasm.OpcodeI32ShrS, wasm.OpcodeI64ShrS: 1000 if state.unreachable { 1001 break 1002 } 1003 y, x := state.pop(), state.pop() 1004 ishl := builder.AllocateInstruction() 1005 ishl.AsSshr(x, y) 1006 builder.InsertInstruction(ishl) 1007 value := ishl.Return() 1008 state.push(value) 1009 case wasm.OpcodeI32Rotl, wasm.OpcodeI64Rotl: 1010 if state.unreachable { 1011 break 1012 } 1013 y, x := state.pop(), state.pop() 1014 rotl := builder.AllocateInstruction() 1015 rotl.AsRotl(x, y) 1016 builder.InsertInstruction(rotl) 1017 value := rotl.Return() 1018 state.push(value) 1019 case wasm.OpcodeI32Rotr, wasm.OpcodeI64Rotr: 1020 if state.unreachable { 1021 break 1022 } 1023 y, x := state.pop(), state.pop() 1024 rotr := builder.AllocateInstruction() 1025 rotr.AsRotr(x, y) 1026 builder.InsertInstruction(rotr) 1027 value := rotr.Return() 1028 state.push(value) 1029 case wasm.OpcodeI32Clz, wasm.OpcodeI64Clz: 1030 if state.unreachable { 1031 break 1032 } 1033 x := state.pop() 1034 clz := builder.AllocateInstruction() 1035 clz.AsClz(x) 1036 builder.InsertInstruction(clz) 1037 value := clz.Return() 1038 state.push(value) 1039 case wasm.OpcodeI32Ctz, wasm.OpcodeI64Ctz: 1040 if state.unreachable { 1041 break 1042 } 1043 x := state.pop() 1044 ctz := builder.AllocateInstruction() 1045 ctz.AsCtz(x) 1046 builder.InsertInstruction(ctz) 1047 value := ctz.Return() 1048 state.push(value) 1049 case wasm.OpcodeI32Popcnt, wasm.OpcodeI64Popcnt: 1050 if state.unreachable { 1051 break 1052 } 1053 x := state.pop() 1054 popcnt := builder.AllocateInstruction() 1055 popcnt.AsPopcnt(x) 1056 builder.InsertInstruction(popcnt) 1057 value := popcnt.Return() 1058 state.push(value) 1059 1060 case wasm.OpcodeI32WrapI64: 1061 if state.unreachable { 1062 break 1063 } 1064 x := state.pop() 1065 wrap := builder.AllocateInstruction().AsIreduce(x, ssa.TypeI32).Insert(builder).Return() 1066 state.push(wrap) 1067 case wasm.OpcodeGlobalGet: 1068 index := c.readI32u() 1069 if state.unreachable { 1070 break 1071 } 1072 v := c.getWasmGlobalValue(index, false) 1073 state.push(v) 1074 case wasm.OpcodeGlobalSet: 1075 index := c.readI32u() 1076 if state.unreachable { 1077 break 1078 } 1079 v := state.pop() 1080 c.setWasmGlobalValue(index, v) 1081 case wasm.OpcodeLocalGet: 1082 index := c.readI32u() 1083 if state.unreachable { 1084 break 1085 } 1086 variable := c.localVariable(index) 1087 v := builder.MustFindValue(variable) 1088 state.push(v) 1089 case wasm.OpcodeLocalSet: 1090 index := c.readI32u() 1091 if state.unreachable { 1092 break 1093 } 1094 variable := c.localVariable(index) 1095 newValue := state.pop() 1096 builder.DefineVariableInCurrentBB(variable, newValue) 1097 1098 case wasm.OpcodeLocalTee: 1099 index := c.readI32u() 1100 if state.unreachable { 1101 break 1102 } 1103 variable := c.localVariable(index) 1104 newValue := state.peek() 1105 builder.DefineVariableInCurrentBB(variable, newValue) 1106 1107 case wasm.OpcodeSelect, wasm.OpcodeTypedSelect: 1108 if op == wasm.OpcodeTypedSelect { 1109 state.pc += 2 // ignores the type which is only needed during validation. 1110 } 1111 1112 if state.unreachable { 1113 break 1114 } 1115 1116 cond := state.pop() 1117 v2 := state.pop() 1118 v1 := state.pop() 1119 1120 sl := builder.AllocateInstruction(). 1121 AsSelect(cond, v1, v2). 1122 Insert(builder). 1123 Return() 1124 state.push(sl) 1125 1126 case wasm.OpcodeMemorySize: 1127 state.pc++ // skips the memory index. 1128 if state.unreachable { 1129 break 1130 } 1131 1132 var memSizeInBytes ssa.Value 1133 if c.offset.LocalMemoryBegin < 0 { 1134 memInstPtr := builder.AllocateInstruction(). 1135 AsLoad(c.moduleCtxPtrValue, c.offset.ImportedMemoryBegin.U32(), ssa.TypeI64). 1136 Insert(builder). 1137 Return() 1138 1139 memSizeInBytes = builder.AllocateInstruction(). 1140 AsLoad(memInstPtr, memoryInstanceBufSizeOffset, ssa.TypeI32). 1141 Insert(builder). 1142 Return() 1143 } else { 1144 memSizeInBytes = builder.AllocateInstruction(). 1145 AsLoad(c.moduleCtxPtrValue, c.offset.LocalMemoryLen().U32(), ssa.TypeI32). 1146 Insert(builder). 1147 Return() 1148 } 1149 1150 amount := builder.AllocateInstruction() 1151 amount.AsIconst32(uint32(wasm.MemoryPageSizeInBits)) 1152 builder.InsertInstruction(amount) 1153 memSize := builder.AllocateInstruction(). 1154 AsUshr(memSizeInBytes, amount.Return()). 1155 Insert(builder). 1156 Return() 1157 state.push(memSize) 1158 1159 case wasm.OpcodeMemoryGrow: 1160 state.pc++ // skips the memory index. 1161 if state.unreachable { 1162 break 1163 } 1164 1165 c.storeCallerModuleContext() 1166 1167 pages := state.pop() 1168 memoryGrowPtr := builder.AllocateInstruction(). 1169 AsLoad(c.execCtxPtrValue, 1170 wazevoapi.ExecutionContextOffsetMemoryGrowTrampolineAddress.U32(), 1171 ssa.TypeI64, 1172 ).Insert(builder).Return() 1173 1174 // TODO: reuse the slice. 1175 args := []ssa.Value{c.execCtxPtrValue, pages} 1176 1177 callGrowRet := builder. 1178 AllocateInstruction(). 1179 AsCallIndirect(memoryGrowPtr, &c.memoryGrowSig, args). 1180 Insert(builder).Return() 1181 state.push(callGrowRet) 1182 1183 // After the memory grow, reload the cached memory base and len. 1184 c.reloadMemoryBaseLen() 1185 1186 case wasm.OpcodeI32Store, 1187 wasm.OpcodeI64Store, 1188 wasm.OpcodeF32Store, 1189 wasm.OpcodeF64Store, 1190 wasm.OpcodeI32Store8, 1191 wasm.OpcodeI32Store16, 1192 wasm.OpcodeI64Store8, 1193 wasm.OpcodeI64Store16, 1194 wasm.OpcodeI64Store32: 1195 1196 _, offset := c.readMemArg() 1197 if state.unreachable { 1198 break 1199 } 1200 var opSize uint64 1201 var opcode ssa.Opcode 1202 switch op { 1203 case wasm.OpcodeI32Store, wasm.OpcodeF32Store: 1204 opcode = ssa.OpcodeStore 1205 opSize = 4 1206 case wasm.OpcodeI64Store, wasm.OpcodeF64Store: 1207 opcode = ssa.OpcodeStore 1208 opSize = 8 1209 case wasm.OpcodeI32Store8, wasm.OpcodeI64Store8: 1210 opcode = ssa.OpcodeIstore8 1211 opSize = 1 1212 case wasm.OpcodeI32Store16, wasm.OpcodeI64Store16: 1213 opcode = ssa.OpcodeIstore16 1214 opSize = 2 1215 case wasm.OpcodeI64Store32: 1216 opcode = ssa.OpcodeIstore32 1217 opSize = 4 1218 default: 1219 panic("BUG") 1220 } 1221 1222 value := state.pop() 1223 baseAddr := state.pop() 1224 addr := c.memOpSetup(baseAddr, uint64(offset), opSize) 1225 builder.AllocateInstruction(). 1226 AsStore(opcode, value, addr, offset). 1227 Insert(builder) 1228 1229 case wasm.OpcodeI32Load, 1230 wasm.OpcodeI64Load, 1231 wasm.OpcodeF32Load, 1232 wasm.OpcodeF64Load, 1233 wasm.OpcodeI32Load8S, 1234 wasm.OpcodeI32Load8U, 1235 wasm.OpcodeI32Load16S, 1236 wasm.OpcodeI32Load16U, 1237 wasm.OpcodeI64Load8S, 1238 wasm.OpcodeI64Load8U, 1239 wasm.OpcodeI64Load16S, 1240 wasm.OpcodeI64Load16U, 1241 wasm.OpcodeI64Load32S, 1242 wasm.OpcodeI64Load32U: 1243 _, offset := c.readMemArg() 1244 if state.unreachable { 1245 break 1246 } 1247 1248 var opSize uint64 1249 switch op { 1250 case wasm.OpcodeI32Load, wasm.OpcodeF32Load: 1251 opSize = 4 1252 case wasm.OpcodeI64Load, wasm.OpcodeF64Load: 1253 opSize = 8 1254 case wasm.OpcodeI32Load8S, wasm.OpcodeI32Load8U: 1255 opSize = 1 1256 case wasm.OpcodeI32Load16S, wasm.OpcodeI32Load16U: 1257 opSize = 2 1258 case wasm.OpcodeI64Load8S, wasm.OpcodeI64Load8U: 1259 opSize = 1 1260 case wasm.OpcodeI64Load16S, wasm.OpcodeI64Load16U: 1261 opSize = 2 1262 case wasm.OpcodeI64Load32S, wasm.OpcodeI64Load32U: 1263 opSize = 4 1264 default: 1265 panic("BUG") 1266 } 1267 1268 baseAddr := state.pop() 1269 addr := c.memOpSetup(baseAddr, uint64(offset), opSize) 1270 load := builder.AllocateInstruction() 1271 switch op { 1272 case wasm.OpcodeI32Load: 1273 load.AsLoad(addr, offset, ssa.TypeI32) 1274 case wasm.OpcodeI64Load: 1275 load.AsLoad(addr, offset, ssa.TypeI64) 1276 case wasm.OpcodeF32Load: 1277 load.AsLoad(addr, offset, ssa.TypeF32) 1278 case wasm.OpcodeF64Load: 1279 load.AsLoad(addr, offset, ssa.TypeF64) 1280 case wasm.OpcodeI32Load8S: 1281 load.AsExtLoad(ssa.OpcodeSload8, addr, offset, false) 1282 case wasm.OpcodeI32Load8U: 1283 load.AsExtLoad(ssa.OpcodeUload8, addr, offset, false) 1284 case wasm.OpcodeI32Load16S: 1285 load.AsExtLoad(ssa.OpcodeSload16, addr, offset, false) 1286 case wasm.OpcodeI32Load16U: 1287 load.AsExtLoad(ssa.OpcodeUload16, addr, offset, false) 1288 case wasm.OpcodeI64Load8S: 1289 load.AsExtLoad(ssa.OpcodeSload8, addr, offset, true) 1290 case wasm.OpcodeI64Load8U: 1291 load.AsExtLoad(ssa.OpcodeUload8, addr, offset, true) 1292 case wasm.OpcodeI64Load16S: 1293 load.AsExtLoad(ssa.OpcodeSload16, addr, offset, true) 1294 case wasm.OpcodeI64Load16U: 1295 load.AsExtLoad(ssa.OpcodeUload16, addr, offset, true) 1296 case wasm.OpcodeI64Load32S: 1297 load.AsExtLoad(ssa.OpcodeSload32, addr, offset, true) 1298 case wasm.OpcodeI64Load32U: 1299 load.AsExtLoad(ssa.OpcodeUload32, addr, offset, true) 1300 default: 1301 panic("BUG") 1302 } 1303 builder.InsertInstruction(load) 1304 state.push(load.Return()) 1305 case wasm.OpcodeBlock: 1306 // Note: we do not need to create a BB for this as that would always have only one predecessor 1307 // which is the current BB, and therefore it's always ok to merge them in any way. 1308 1309 bt := c.readBlockType() 1310 1311 if state.unreachable { 1312 state.unreachableDepth++ 1313 break 1314 } 1315 1316 followingBlk := builder.AllocateBasicBlock() 1317 c.addBlockParamsFromWasmTypes(bt.Results, followingBlk) 1318 1319 state.ctrlPush(controlFrame{ 1320 kind: controlFrameKindBlock, 1321 originalStackLenWithoutParam: len(state.values) - len(bt.Params), 1322 followingBlock: followingBlk, 1323 blockType: bt, 1324 }) 1325 case wasm.OpcodeLoop: 1326 bt := c.readBlockType() 1327 1328 if state.unreachable { 1329 state.unreachableDepth++ 1330 break 1331 } 1332 1333 loopHeader, afterLoopBlock := builder.AllocateBasicBlock(), builder.AllocateBasicBlock() 1334 c.addBlockParamsFromWasmTypes(bt.Params, loopHeader) 1335 c.addBlockParamsFromWasmTypes(bt.Results, afterLoopBlock) 1336 1337 originalLen := len(state.values) - len(bt.Params) 1338 state.ctrlPush(controlFrame{ 1339 originalStackLenWithoutParam: originalLen, 1340 kind: controlFrameKindLoop, 1341 blk: loopHeader, 1342 followingBlock: afterLoopBlock, 1343 blockType: bt, 1344 }) 1345 1346 var args []ssa.Value 1347 if len(bt.Params) > 0 { 1348 args = cloneValuesList(state.values[originalLen:]) 1349 } 1350 1351 // Insert the jump to the header of loop. 1352 br := builder.AllocateInstruction() 1353 br.AsJump(args, loopHeader) 1354 builder.InsertInstruction(br) 1355 1356 c.switchTo(originalLen, loopHeader) 1357 1358 if c.ensureTermination { 1359 checkModuleExitCodePtr := builder.AllocateInstruction(). 1360 AsLoad(c.execCtxPtrValue, 1361 wazevoapi.ExecutionContextOffsetCheckModuleExitCodeTrampolineAddress.U32(), 1362 ssa.TypeI64, 1363 ).Insert(builder).Return() 1364 1365 c.checkModuleExitCodeArg[0] = c.execCtxPtrValue 1366 1367 builder.AllocateInstruction(). 1368 AsCallIndirect(checkModuleExitCodePtr, &c.checkModuleExitCodeSig, c.checkModuleExitCodeArg[:]). 1369 Insert(builder) 1370 } 1371 case wasm.OpcodeIf: 1372 bt := c.readBlockType() 1373 1374 if state.unreachable { 1375 state.unreachableDepth++ 1376 break 1377 } 1378 1379 v := state.pop() 1380 thenBlk, elseBlk, followingBlk := builder.AllocateBasicBlock(), builder.AllocateBasicBlock(), builder.AllocateBasicBlock() 1381 1382 // We do not make the Wasm-level block parameters as SSA-level block params for if-else blocks 1383 // since they won't be PHI and the definition is unique. 1384 1385 // On the other hand, the following block after if-else-end will likely have 1386 // multiple definitions (one in Then and another in Else blocks). 1387 c.addBlockParamsFromWasmTypes(bt.Results, followingBlk) 1388 1389 var args []ssa.Value 1390 if len(bt.Params) > 0 { 1391 args = cloneValuesList(state.values[len(state.values)-len(bt.Params):]) 1392 } 1393 1394 // Insert the conditional jump to the Else block. 1395 brz := builder.AllocateInstruction() 1396 brz.AsBrz(v, nil, elseBlk) 1397 builder.InsertInstruction(brz) 1398 1399 // Then, insert the jump to the Then block. 1400 br := builder.AllocateInstruction() 1401 br.AsJump(nil, thenBlk) 1402 builder.InsertInstruction(br) 1403 1404 state.ctrlPush(controlFrame{ 1405 kind: controlFrameKindIfWithoutElse, 1406 originalStackLenWithoutParam: len(state.values) - len(bt.Params), 1407 blk: elseBlk, 1408 followingBlock: followingBlk, 1409 blockType: bt, 1410 clonedArgs: args, 1411 }) 1412 1413 builder.SetCurrentBlock(thenBlk) 1414 1415 // Then and Else (if exists) have only one predecessor. 1416 builder.Seal(thenBlk) 1417 builder.Seal(elseBlk) 1418 case wasm.OpcodeElse: 1419 ifctrl := state.ctrlPeekAt(0) 1420 if unreachable := state.unreachable; unreachable && state.unreachableDepth > 0 { 1421 // If it is currently in unreachable and is a nested if, 1422 // we just remove the entire else block. 1423 break 1424 } 1425 1426 ifctrl.kind = controlFrameKindIfWithElse 1427 if !state.unreachable { 1428 // If this Then block is currently reachable, we have to insert the branching to the following BB. 1429 followingBlk := ifctrl.followingBlock // == the BB after if-then-else. 1430 args := c.loweringState.nPeekDup(len(ifctrl.blockType.Results)) 1431 c.insertJumpToBlock(args, followingBlk) 1432 } else { 1433 state.unreachable = false 1434 } 1435 1436 // Reset the stack so that we can correctly handle the else block. 1437 state.values = state.values[:ifctrl.originalStackLenWithoutParam] 1438 elseBlk := ifctrl.blk 1439 for _, arg := range ifctrl.clonedArgs { 1440 state.push(arg) 1441 } 1442 1443 builder.SetCurrentBlock(elseBlk) 1444 1445 case wasm.OpcodeEnd: 1446 if state.unreachableDepth > 0 { 1447 state.unreachableDepth-- 1448 break 1449 } 1450 1451 ctrl := state.ctrlPop() 1452 followingBlk := ctrl.followingBlock 1453 1454 unreachable := state.unreachable 1455 if !unreachable { 1456 // Top n-th args will be used as a result of the current control frame. 1457 args := c.loweringState.nPeekDup(len(ctrl.blockType.Results)) 1458 1459 // Insert the unconditional branch to the target. 1460 c.insertJumpToBlock(args, followingBlk) 1461 } else { // recover from the unreachable state. 1462 state.unreachable = false 1463 } 1464 1465 switch ctrl.kind { 1466 case controlFrameKindFunction: 1467 break // This is the very end of function. 1468 case controlFrameKindLoop: 1469 // Loop header block can be reached from any br/br_table contained in the loop, 1470 // so now that we've reached End of it, we can seal it. 1471 builder.Seal(ctrl.blk) 1472 case controlFrameKindIfWithoutElse: 1473 // If this is the end of Then block, we have to emit the empty Else block. 1474 elseBlk := ctrl.blk 1475 builder.SetCurrentBlock(elseBlk) 1476 c.insertJumpToBlock(ctrl.clonedArgs, followingBlk) 1477 } 1478 1479 builder.Seal(ctrl.followingBlock) 1480 1481 // Ready to start translating the following block. 1482 c.switchTo(ctrl.originalStackLenWithoutParam, followingBlk) 1483 1484 case wasm.OpcodeBr: 1485 labelIndex := c.readI32u() 1486 if state.unreachable { 1487 break 1488 } 1489 1490 targetBlk, argNum := state.brTargetArgNumFor(labelIndex) 1491 args := c.loweringState.nPeekDup(argNum) 1492 c.insertJumpToBlock(args, targetBlk) 1493 1494 state.unreachable = true 1495 1496 case wasm.OpcodeBrIf: 1497 labelIndex := c.readI32u() 1498 if state.unreachable { 1499 break 1500 } 1501 1502 v := state.pop() 1503 1504 targetBlk, argNum := state.brTargetArgNumFor(labelIndex) 1505 args := c.loweringState.nPeekDup(argNum) 1506 1507 // Insert the conditional jump to the target block. 1508 brnz := builder.AllocateInstruction() 1509 brnz.AsBrnz(v, args, targetBlk) 1510 builder.InsertInstruction(brnz) 1511 1512 // Insert the unconditional jump to the Else block which corresponds to after br_if. 1513 elseBlk := builder.AllocateBasicBlock() 1514 c.insertJumpToBlock(nil, elseBlk) 1515 1516 // Now start translating the instructions after br_if. 1517 builder.Seal(elseBlk) // Else of br_if has the current block as the only one successor. 1518 builder.SetCurrentBlock(elseBlk) 1519 1520 case wasm.OpcodeBrTable: 1521 labels := state.tmpForBrTable 1522 labels = labels[:0] 1523 labelCount := c.readI32u() 1524 for i := 0; i < int(labelCount); i++ { 1525 labels = append(labels, c.readI32u()) 1526 } 1527 labels = append(labels, c.readI32u()) // default label. 1528 if state.unreachable { 1529 break 1530 } 1531 1532 index := state.pop() 1533 if labelCount == 0 { // If this br_table is empty, we can just emit the unconditional jump. 1534 targetBlk, argNum := state.brTargetArgNumFor(labels[0]) 1535 args := c.loweringState.nPeekDup(argNum) 1536 c.insertJumpToBlock(args, targetBlk) 1537 } else { 1538 c.lowerBrTable(labels, index) 1539 } 1540 state.unreachable = true 1541 1542 case wasm.OpcodeNop: 1543 case wasm.OpcodeReturn: 1544 if state.unreachable { 1545 break 1546 } 1547 if c.needListener { 1548 c.callListenerAfter() 1549 } 1550 1551 results := c.loweringState.nPeekDup(c.results()) 1552 instr := builder.AllocateInstruction() 1553 1554 instr.AsReturn(results) 1555 builder.InsertInstruction(instr) 1556 state.unreachable = true 1557 1558 case wasm.OpcodeUnreachable: 1559 if state.unreachable { 1560 break 1561 } 1562 exit := builder.AllocateInstruction() 1563 exit.AsExitWithCode(c.execCtxPtrValue, wazevoapi.ExitCodeUnreachable) 1564 builder.InsertInstruction(exit) 1565 state.unreachable = true 1566 1567 case wasm.OpcodeCallIndirect: 1568 typeIndex := c.readI32u() 1569 tableIndex := c.readI32u() 1570 if state.unreachable { 1571 break 1572 } 1573 c.lowerCallIndirect(typeIndex, tableIndex) 1574 1575 case wasm.OpcodeCall: 1576 fnIndex := c.readI32u() 1577 if state.unreachable { 1578 break 1579 } 1580 1581 // Before transfer the control to the callee, we have to store the current module's moduleContextPtr 1582 // into execContext.callerModuleContextPtr in case when the callee is a Go function. 1583 // 1584 // TODO: maybe this can be optimized out if this is in-module function calls. Investigate later. 1585 c.storeCallerModuleContext() 1586 1587 var typIndex wasm.Index 1588 if fnIndex < c.m.ImportFunctionCount { 1589 var fi int 1590 for i := range c.m.ImportSection { 1591 imp := &c.m.ImportSection[i] 1592 if imp.Type == wasm.ExternTypeFunc { 1593 if fi == int(fnIndex) { 1594 typIndex = imp.DescFunc 1595 break 1596 } 1597 fi++ 1598 } 1599 } 1600 } else { 1601 typIndex = c.m.FunctionSection[fnIndex-c.m.ImportFunctionCount] 1602 } 1603 typ := &c.m.TypeSection[typIndex] 1604 1605 // TODO: reuse slice? 1606 argN := len(typ.Params) 1607 args := make([]ssa.Value, argN+2) 1608 args[0] = c.execCtxPtrValue 1609 state.nPopInto(argN, args[2:]) 1610 1611 sig := c.signatures[typ] 1612 call := builder.AllocateInstruction() 1613 if fnIndex >= c.m.ImportFunctionCount { 1614 args[1] = c.moduleCtxPtrValue // This case the callee module is itself. 1615 call.AsCall(FunctionIndexToFuncRef(fnIndex), sig, args) 1616 builder.InsertInstruction(call) 1617 } else { 1618 // This case we have to read the address of the imported function from the module context. 1619 moduleCtx := c.moduleCtxPtrValue 1620 loadFuncPtr, loadModuleCtxPtr := builder.AllocateInstruction(), builder.AllocateInstruction() 1621 funcPtrOffset, moduleCtxPtrOffset, _ := c.offset.ImportedFunctionOffset(fnIndex) 1622 loadFuncPtr.AsLoad(moduleCtx, funcPtrOffset.U32(), ssa.TypeI64) 1623 loadModuleCtxPtr.AsLoad(moduleCtx, moduleCtxPtrOffset.U32(), ssa.TypeI64) 1624 builder.InsertInstruction(loadFuncPtr) 1625 builder.InsertInstruction(loadModuleCtxPtr) 1626 1627 args[1] = loadModuleCtxPtr.Return() // This case the callee module is itself. 1628 1629 call.AsCallIndirect(loadFuncPtr.Return(), sig, args) 1630 builder.InsertInstruction(call) 1631 } 1632 1633 first, rest := call.Returns() 1634 if first.Valid() { 1635 state.push(first) 1636 } 1637 for _, v := range rest { 1638 state.push(v) 1639 } 1640 1641 c.reloadAfterCall() 1642 1643 case wasm.OpcodeDrop: 1644 if state.unreachable { 1645 break 1646 } 1647 _ = state.pop() 1648 case wasm.OpcodeF64ConvertI32S, wasm.OpcodeF64ConvertI64S, wasm.OpcodeF64ConvertI32U, wasm.OpcodeF64ConvertI64U: 1649 if state.unreachable { 1650 break 1651 } 1652 result := builder.AllocateInstruction().AsFcvtFromInt( 1653 state.pop(), 1654 op == wasm.OpcodeF64ConvertI32S || op == wasm.OpcodeF64ConvertI64S, 1655 true, 1656 ).Insert(builder).Return() 1657 state.push(result) 1658 case wasm.OpcodeF32ConvertI32S, wasm.OpcodeF32ConvertI64S, wasm.OpcodeF32ConvertI32U, wasm.OpcodeF32ConvertI64U: 1659 if state.unreachable { 1660 break 1661 } 1662 result := builder.AllocateInstruction().AsFcvtFromInt( 1663 state.pop(), 1664 op == wasm.OpcodeF32ConvertI32S || op == wasm.OpcodeF32ConvertI64S, 1665 false, 1666 ).Insert(builder).Return() 1667 state.push(result) 1668 case wasm.OpcodeF32DemoteF64: 1669 if state.unreachable { 1670 break 1671 } 1672 cvt := builder.AllocateInstruction() 1673 cvt.AsFdemote(state.pop()) 1674 builder.InsertInstruction(cvt) 1675 state.push(cvt.Return()) 1676 case wasm.OpcodeF64PromoteF32: 1677 if state.unreachable { 1678 break 1679 } 1680 cvt := builder.AllocateInstruction() 1681 cvt.AsFpromote(state.pop()) 1682 builder.InsertInstruction(cvt) 1683 state.push(cvt.Return()) 1684 1685 case wasm.OpcodeVecPrefix: 1686 state.pc++ 1687 vecOp := c.wasmFunctionBody[state.pc] 1688 switch vecOp { 1689 case wasm.OpcodeVecV128Const: 1690 state.pc++ 1691 lo := binary.LittleEndian.Uint64(c.wasmFunctionBody[state.pc:]) 1692 state.pc += 8 1693 hi := binary.LittleEndian.Uint64(c.wasmFunctionBody[state.pc:]) 1694 state.pc += 7 1695 if state.unreachable { 1696 break 1697 } 1698 ret := builder.AllocateInstruction().AsVconst(lo, hi).Insert(builder).Return() 1699 state.push(ret) 1700 case wasm.OpcodeVecV128Load: 1701 _, offset := c.readMemArg() 1702 if state.unreachable { 1703 break 1704 } 1705 baseAddr := state.pop() 1706 addr := c.memOpSetup(baseAddr, uint64(offset), 16) 1707 load := builder.AllocateInstruction() 1708 load.AsLoad(addr, offset, ssa.TypeV128) 1709 builder.InsertInstruction(load) 1710 state.push(load.Return()) 1711 case wasm.OpcodeVecV128Load8Lane, wasm.OpcodeVecV128Load16Lane, wasm.OpcodeVecV128Load32Lane: 1712 _, offset := c.readMemArg() 1713 state.pc++ 1714 if state.unreachable { 1715 break 1716 } 1717 var lane ssa.VecLane 1718 var loadOp ssa.Opcode 1719 var opSize uint64 1720 switch vecOp { 1721 case wasm.OpcodeVecV128Load8Lane: 1722 loadOp, lane, opSize = ssa.OpcodeUload8, ssa.VecLaneI8x16, 1 1723 case wasm.OpcodeVecV128Load16Lane: 1724 loadOp, lane, opSize = ssa.OpcodeUload16, ssa.VecLaneI16x8, 2 1725 case wasm.OpcodeVecV128Load32Lane: 1726 loadOp, lane, opSize = ssa.OpcodeUload32, ssa.VecLaneI32x4, 4 1727 } 1728 laneIndex := c.wasmFunctionBody[state.pc] 1729 vector := state.pop() 1730 baseAddr := state.pop() 1731 addr := c.memOpSetup(baseAddr, uint64(offset), opSize) 1732 load := builder.AllocateInstruction(). 1733 AsExtLoad(loadOp, addr, offset, false). 1734 Insert(builder).Return() 1735 ret := builder.AllocateInstruction(). 1736 AsInsertlane(vector, load, laneIndex, lane). 1737 Insert(builder).Return() 1738 state.push(ret) 1739 case wasm.OpcodeVecV128Load64Lane: 1740 _, offset := c.readMemArg() 1741 state.pc++ 1742 if state.unreachable { 1743 break 1744 } 1745 laneIndex := c.wasmFunctionBody[state.pc] 1746 vector := state.pop() 1747 baseAddr := state.pop() 1748 addr := c.memOpSetup(baseAddr, uint64(offset), 8) 1749 load := builder.AllocateInstruction(). 1750 AsLoad(addr, offset, ssa.TypeI64). 1751 Insert(builder).Return() 1752 ret := builder.AllocateInstruction(). 1753 AsInsertlane(vector, load, laneIndex, ssa.VecLaneI64x2). 1754 Insert(builder).Return() 1755 state.push(ret) 1756 1757 case wasm.OpcodeVecV128Load32zero, wasm.OpcodeVecV128Load64zero: 1758 _, offset := c.readMemArg() 1759 if state.unreachable { 1760 break 1761 } 1762 1763 var scalarType ssa.Type 1764 switch vecOp { 1765 case wasm.OpcodeVecV128Load32zero: 1766 scalarType = ssa.TypeF32 1767 case wasm.OpcodeVecV128Load64zero: 1768 scalarType = ssa.TypeF64 1769 } 1770 1771 baseAddr := state.pop() 1772 addr := c.memOpSetup(baseAddr, uint64(offset), uint64(scalarType.Size())) 1773 1774 ret := builder.AllocateInstruction(). 1775 AsVZeroExtLoad(addr, offset, scalarType). 1776 Insert(builder).Return() 1777 state.push(ret) 1778 1779 case wasm.OpcodeVecV128Load8x8u, wasm.OpcodeVecV128Load8x8s, 1780 wasm.OpcodeVecV128Load16x4u, wasm.OpcodeVecV128Load16x4s, 1781 wasm.OpcodeVecV128Load32x2u, wasm.OpcodeVecV128Load32x2s: 1782 _, offset := c.readMemArg() 1783 if state.unreachable { 1784 break 1785 } 1786 var lane ssa.VecLane 1787 var signed bool 1788 switch vecOp { 1789 case wasm.OpcodeVecV128Load8x8s: 1790 signed = true 1791 fallthrough 1792 case wasm.OpcodeVecV128Load8x8u: 1793 lane = ssa.VecLaneI8x16 1794 case wasm.OpcodeVecV128Load16x4s: 1795 signed = true 1796 fallthrough 1797 case wasm.OpcodeVecV128Load16x4u: 1798 lane = ssa.VecLaneI16x8 1799 case wasm.OpcodeVecV128Load32x2s: 1800 signed = true 1801 fallthrough 1802 case wasm.OpcodeVecV128Load32x2u: 1803 lane = ssa.VecLaneI32x4 1804 } 1805 baseAddr := state.pop() 1806 addr := c.memOpSetup(baseAddr, uint64(offset), 8) 1807 load := builder.AllocateInstruction(). 1808 AsLoad(addr, offset, ssa.TypeV128). 1809 Insert(builder).Return() 1810 ret := builder.AllocateInstruction(). 1811 AsWiden(load, lane, signed, true). 1812 Insert(builder).Return() 1813 state.push(ret) 1814 case wasm.OpcodeVecV128Load8Splat, wasm.OpcodeVecV128Load16Splat, 1815 wasm.OpcodeVecV128Load32Splat, wasm.OpcodeVecV128Load64Splat: 1816 _, offset := c.readMemArg() 1817 if state.unreachable { 1818 break 1819 } 1820 var lane ssa.VecLane 1821 var opSize uint64 1822 switch vecOp { 1823 case wasm.OpcodeVecV128Load8Splat: 1824 lane, opSize = ssa.VecLaneI8x16, 1 1825 case wasm.OpcodeVecV128Load16Splat: 1826 lane, opSize = ssa.VecLaneI16x8, 2 1827 case wasm.OpcodeVecV128Load32Splat: 1828 lane, opSize = ssa.VecLaneI32x4, 4 1829 case wasm.OpcodeVecV128Load64Splat: 1830 lane, opSize = ssa.VecLaneI64x2, 8 1831 } 1832 baseAddr := state.pop() 1833 addr := c.memOpSetup(baseAddr, uint64(offset), opSize) 1834 ret := builder.AllocateInstruction(). 1835 AsLoadSplat(addr, offset, lane). 1836 Insert(builder).Return() 1837 state.push(ret) 1838 case wasm.OpcodeVecV128Store: 1839 _, offset := c.readMemArg() 1840 if state.unreachable { 1841 break 1842 } 1843 value := state.pop() 1844 baseAddr := state.pop() 1845 addr := c.memOpSetup(baseAddr, uint64(offset), 16) 1846 builder.AllocateInstruction(). 1847 AsStore(ssa.OpcodeStore, value, addr, offset). 1848 Insert(builder) 1849 case wasm.OpcodeVecV128Store8Lane, wasm.OpcodeVecV128Store16Lane, 1850 wasm.OpcodeVecV128Store32Lane, wasm.OpcodeVecV128Store64Lane: 1851 _, offset := c.readMemArg() 1852 state.pc++ 1853 if state.unreachable { 1854 break 1855 } 1856 laneIndex := c.wasmFunctionBody[state.pc] 1857 var storeOp ssa.Opcode 1858 var lane ssa.VecLane 1859 var opSize uint64 1860 switch vecOp { 1861 case wasm.OpcodeVecV128Store8Lane: 1862 storeOp, lane, opSize = ssa.OpcodeIstore8, ssa.VecLaneI8x16, 1 1863 case wasm.OpcodeVecV128Store16Lane: 1864 storeOp, lane, opSize = ssa.OpcodeIstore16, ssa.VecLaneI16x8, 2 1865 case wasm.OpcodeVecV128Store32Lane: 1866 storeOp, lane, opSize = ssa.OpcodeIstore32, ssa.VecLaneI32x4, 4 1867 case wasm.OpcodeVecV128Store64Lane: 1868 storeOp, lane, opSize = ssa.OpcodeStore, ssa.VecLaneI64x2, 8 1869 } 1870 vector := state.pop() 1871 baseAddr := state.pop() 1872 addr := c.memOpSetup(baseAddr, uint64(offset), opSize) 1873 value := builder.AllocateInstruction(). 1874 AsExtractlane(vector, laneIndex, lane, false). 1875 Insert(builder).Return() 1876 builder.AllocateInstruction(). 1877 AsStore(storeOp, value, addr, offset). 1878 Insert(builder) 1879 case wasm.OpcodeVecV128Not: 1880 if state.unreachable { 1881 break 1882 } 1883 v1 := state.pop() 1884 ret := builder.AllocateInstruction().AsVbnot(v1).Insert(builder).Return() 1885 state.push(ret) 1886 case wasm.OpcodeVecV128And: 1887 if state.unreachable { 1888 break 1889 } 1890 v2 := state.pop() 1891 v1 := state.pop() 1892 ret := builder.AllocateInstruction().AsVband(v1, v2).Insert(builder).Return() 1893 state.push(ret) 1894 case wasm.OpcodeVecV128AndNot: 1895 if state.unreachable { 1896 break 1897 } 1898 v2 := state.pop() 1899 v1 := state.pop() 1900 ret := builder.AllocateInstruction().AsVbandnot(v1, v2).Insert(builder).Return() 1901 state.push(ret) 1902 case wasm.OpcodeVecV128Or: 1903 if state.unreachable { 1904 break 1905 } 1906 v2 := state.pop() 1907 v1 := state.pop() 1908 ret := builder.AllocateInstruction().AsVbor(v1, v2).Insert(builder).Return() 1909 state.push(ret) 1910 case wasm.OpcodeVecV128Xor: 1911 if state.unreachable { 1912 break 1913 } 1914 v2 := state.pop() 1915 v1 := state.pop() 1916 ret := builder.AllocateInstruction().AsVbxor(v1, v2).Insert(builder).Return() 1917 state.push(ret) 1918 case wasm.OpcodeVecV128Bitselect: 1919 if state.unreachable { 1920 break 1921 } 1922 c := state.pop() 1923 v2 := state.pop() 1924 v1 := state.pop() 1925 ret := builder.AllocateInstruction().AsVbitselect(c, v1, v2).Insert(builder).Return() 1926 state.push(ret) 1927 case wasm.OpcodeVecV128AnyTrue: 1928 if state.unreachable { 1929 break 1930 } 1931 v1 := state.pop() 1932 ret := builder.AllocateInstruction().AsVanyTrue(v1).Insert(builder).Return() 1933 state.push(ret) 1934 case wasm.OpcodeVecI8x16AllTrue, wasm.OpcodeVecI16x8AllTrue, wasm.OpcodeVecI32x4AllTrue, wasm.OpcodeVecI64x2AllTrue: 1935 if state.unreachable { 1936 break 1937 } 1938 var lane ssa.VecLane 1939 switch vecOp { 1940 case wasm.OpcodeVecI8x16AllTrue: 1941 lane = ssa.VecLaneI8x16 1942 case wasm.OpcodeVecI16x8AllTrue: 1943 lane = ssa.VecLaneI16x8 1944 case wasm.OpcodeVecI32x4AllTrue: 1945 lane = ssa.VecLaneI32x4 1946 case wasm.OpcodeVecI64x2AllTrue: 1947 lane = ssa.VecLaneI64x2 1948 } 1949 v1 := state.pop() 1950 ret := builder.AllocateInstruction().AsVallTrue(v1, lane).Insert(builder).Return() 1951 state.push(ret) 1952 case wasm.OpcodeVecI8x16BitMask, wasm.OpcodeVecI16x8BitMask, wasm.OpcodeVecI32x4BitMask, wasm.OpcodeVecI64x2BitMask: 1953 if state.unreachable { 1954 break 1955 } 1956 var lane ssa.VecLane 1957 switch vecOp { 1958 case wasm.OpcodeVecI8x16BitMask: 1959 lane = ssa.VecLaneI8x16 1960 case wasm.OpcodeVecI16x8BitMask: 1961 lane = ssa.VecLaneI16x8 1962 case wasm.OpcodeVecI32x4BitMask: 1963 lane = ssa.VecLaneI32x4 1964 case wasm.OpcodeVecI64x2BitMask: 1965 lane = ssa.VecLaneI64x2 1966 } 1967 v1 := state.pop() 1968 ret := builder.AllocateInstruction().AsVhighBits(v1, lane).Insert(builder).Return() 1969 state.push(ret) 1970 case wasm.OpcodeVecI8x16Abs, wasm.OpcodeVecI16x8Abs, wasm.OpcodeVecI32x4Abs, wasm.OpcodeVecI64x2Abs: 1971 if state.unreachable { 1972 break 1973 } 1974 var lane ssa.VecLane 1975 switch vecOp { 1976 case wasm.OpcodeVecI8x16Abs: 1977 lane = ssa.VecLaneI8x16 1978 case wasm.OpcodeVecI16x8Abs: 1979 lane = ssa.VecLaneI16x8 1980 case wasm.OpcodeVecI32x4Abs: 1981 lane = ssa.VecLaneI32x4 1982 case wasm.OpcodeVecI64x2Abs: 1983 lane = ssa.VecLaneI64x2 1984 } 1985 v1 := state.pop() 1986 ret := builder.AllocateInstruction().AsVIabs(v1, lane).Insert(builder).Return() 1987 state.push(ret) 1988 case wasm.OpcodeVecI8x16Neg, wasm.OpcodeVecI16x8Neg, wasm.OpcodeVecI32x4Neg, wasm.OpcodeVecI64x2Neg: 1989 if state.unreachable { 1990 break 1991 } 1992 var lane ssa.VecLane 1993 switch vecOp { 1994 case wasm.OpcodeVecI8x16Neg: 1995 lane = ssa.VecLaneI8x16 1996 case wasm.OpcodeVecI16x8Neg: 1997 lane = ssa.VecLaneI16x8 1998 case wasm.OpcodeVecI32x4Neg: 1999 lane = ssa.VecLaneI32x4 2000 case wasm.OpcodeVecI64x2Neg: 2001 lane = ssa.VecLaneI64x2 2002 } 2003 v1 := state.pop() 2004 ret := builder.AllocateInstruction().AsVIneg(v1, lane).Insert(builder).Return() 2005 state.push(ret) 2006 case wasm.OpcodeVecI8x16Popcnt: 2007 if state.unreachable { 2008 break 2009 } 2010 lane := ssa.VecLaneI8x16 2011 v1 := state.pop() 2012 2013 ret := builder.AllocateInstruction().AsVIpopcnt(v1, lane).Insert(builder).Return() 2014 state.push(ret) 2015 case wasm.OpcodeVecI8x16Add, wasm.OpcodeVecI16x8Add, wasm.OpcodeVecI32x4Add, wasm.OpcodeVecI64x2Add: 2016 if state.unreachable { 2017 break 2018 } 2019 var lane ssa.VecLane 2020 switch vecOp { 2021 case wasm.OpcodeVecI8x16Add: 2022 lane = ssa.VecLaneI8x16 2023 case wasm.OpcodeVecI16x8Add: 2024 lane = ssa.VecLaneI16x8 2025 case wasm.OpcodeVecI32x4Add: 2026 lane = ssa.VecLaneI32x4 2027 case wasm.OpcodeVecI64x2Add: 2028 lane = ssa.VecLaneI64x2 2029 } 2030 v2 := state.pop() 2031 v1 := state.pop() 2032 ret := builder.AllocateInstruction().AsVIadd(v1, v2, lane).Insert(builder).Return() 2033 state.push(ret) 2034 case wasm.OpcodeVecI8x16AddSatS, wasm.OpcodeVecI16x8AddSatS: 2035 if state.unreachable { 2036 break 2037 } 2038 var lane ssa.VecLane 2039 switch vecOp { 2040 case wasm.OpcodeVecI8x16AddSatS: 2041 lane = ssa.VecLaneI8x16 2042 case wasm.OpcodeVecI16x8AddSatS: 2043 lane = ssa.VecLaneI16x8 2044 } 2045 v2 := state.pop() 2046 v1 := state.pop() 2047 ret := builder.AllocateInstruction().AsVSaddSat(v1, v2, lane).Insert(builder).Return() 2048 state.push(ret) 2049 case wasm.OpcodeVecI8x16AddSatU, wasm.OpcodeVecI16x8AddSatU: 2050 if state.unreachable { 2051 break 2052 } 2053 var lane ssa.VecLane 2054 switch vecOp { 2055 case wasm.OpcodeVecI8x16AddSatU: 2056 lane = ssa.VecLaneI8x16 2057 case wasm.OpcodeVecI16x8AddSatU: 2058 lane = ssa.VecLaneI16x8 2059 } 2060 v2 := state.pop() 2061 v1 := state.pop() 2062 ret := builder.AllocateInstruction().AsVUaddSat(v1, v2, lane).Insert(builder).Return() 2063 state.push(ret) 2064 case wasm.OpcodeVecI8x16SubSatS, wasm.OpcodeVecI16x8SubSatS: 2065 if state.unreachable { 2066 break 2067 } 2068 var lane ssa.VecLane 2069 switch vecOp { 2070 case wasm.OpcodeVecI8x16SubSatS: 2071 lane = ssa.VecLaneI8x16 2072 case wasm.OpcodeVecI16x8SubSatS: 2073 lane = ssa.VecLaneI16x8 2074 } 2075 v2 := state.pop() 2076 v1 := state.pop() 2077 ret := builder.AllocateInstruction().AsVSsubSat(v1, v2, lane).Insert(builder).Return() 2078 state.push(ret) 2079 case wasm.OpcodeVecI8x16SubSatU, wasm.OpcodeVecI16x8SubSatU: 2080 if state.unreachable { 2081 break 2082 } 2083 var lane ssa.VecLane 2084 switch vecOp { 2085 case wasm.OpcodeVecI8x16SubSatU: 2086 lane = ssa.VecLaneI8x16 2087 case wasm.OpcodeVecI16x8SubSatU: 2088 lane = ssa.VecLaneI16x8 2089 } 2090 v2 := state.pop() 2091 v1 := state.pop() 2092 ret := builder.AllocateInstruction().AsVUsubSat(v1, v2, lane).Insert(builder).Return() 2093 state.push(ret) 2094 2095 case wasm.OpcodeVecI8x16Sub, wasm.OpcodeVecI16x8Sub, wasm.OpcodeVecI32x4Sub, wasm.OpcodeVecI64x2Sub: 2096 if state.unreachable { 2097 break 2098 } 2099 var lane ssa.VecLane 2100 switch vecOp { 2101 case wasm.OpcodeVecI8x16Sub: 2102 lane = ssa.VecLaneI8x16 2103 case wasm.OpcodeVecI16x8Sub: 2104 lane = ssa.VecLaneI16x8 2105 case wasm.OpcodeVecI32x4Sub: 2106 lane = ssa.VecLaneI32x4 2107 case wasm.OpcodeVecI64x2Sub: 2108 lane = ssa.VecLaneI64x2 2109 } 2110 v2 := state.pop() 2111 v1 := state.pop() 2112 ret := builder.AllocateInstruction().AsVIsub(v1, v2, lane).Insert(builder).Return() 2113 state.push(ret) 2114 case wasm.OpcodeVecI8x16MinS, wasm.OpcodeVecI16x8MinS, wasm.OpcodeVecI32x4MinS: 2115 if state.unreachable { 2116 break 2117 } 2118 var lane ssa.VecLane 2119 switch vecOp { 2120 case wasm.OpcodeVecI8x16MinS: 2121 lane = ssa.VecLaneI8x16 2122 case wasm.OpcodeVecI16x8MinS: 2123 lane = ssa.VecLaneI16x8 2124 case wasm.OpcodeVecI32x4MinS: 2125 lane = ssa.VecLaneI32x4 2126 } 2127 v2 := state.pop() 2128 v1 := state.pop() 2129 ret := builder.AllocateInstruction().AsVImin(v1, v2, lane).Insert(builder).Return() 2130 state.push(ret) 2131 case wasm.OpcodeVecI8x16MinU, wasm.OpcodeVecI16x8MinU, wasm.OpcodeVecI32x4MinU: 2132 if state.unreachable { 2133 break 2134 } 2135 var lane ssa.VecLane 2136 switch vecOp { 2137 case wasm.OpcodeVecI8x16MinU: 2138 lane = ssa.VecLaneI8x16 2139 case wasm.OpcodeVecI16x8MinU: 2140 lane = ssa.VecLaneI16x8 2141 case wasm.OpcodeVecI32x4MinU: 2142 lane = ssa.VecLaneI32x4 2143 } 2144 v2 := state.pop() 2145 v1 := state.pop() 2146 ret := builder.AllocateInstruction().AsVUmin(v1, v2, lane).Insert(builder).Return() 2147 state.push(ret) 2148 case wasm.OpcodeVecI8x16MaxS, wasm.OpcodeVecI16x8MaxS, wasm.OpcodeVecI32x4MaxS: 2149 if state.unreachable { 2150 break 2151 } 2152 var lane ssa.VecLane 2153 switch vecOp { 2154 case wasm.OpcodeVecI8x16MaxS: 2155 lane = ssa.VecLaneI8x16 2156 case wasm.OpcodeVecI16x8MaxS: 2157 lane = ssa.VecLaneI16x8 2158 case wasm.OpcodeVecI32x4MaxS: 2159 lane = ssa.VecLaneI32x4 2160 } 2161 v2 := state.pop() 2162 v1 := state.pop() 2163 ret := builder.AllocateInstruction().AsVImax(v1, v2, lane).Insert(builder).Return() 2164 state.push(ret) 2165 case wasm.OpcodeVecI8x16MaxU, wasm.OpcodeVecI16x8MaxU, wasm.OpcodeVecI32x4MaxU: 2166 if state.unreachable { 2167 break 2168 } 2169 var lane ssa.VecLane 2170 switch vecOp { 2171 case wasm.OpcodeVecI8x16MaxU: 2172 lane = ssa.VecLaneI8x16 2173 case wasm.OpcodeVecI16x8MaxU: 2174 lane = ssa.VecLaneI16x8 2175 case wasm.OpcodeVecI32x4MaxU: 2176 lane = ssa.VecLaneI32x4 2177 } 2178 v2 := state.pop() 2179 v1 := state.pop() 2180 ret := builder.AllocateInstruction().AsVUmax(v1, v2, lane).Insert(builder).Return() 2181 state.push(ret) 2182 case wasm.OpcodeVecI8x16AvgrU, wasm.OpcodeVecI16x8AvgrU: 2183 if state.unreachable { 2184 break 2185 } 2186 var lane ssa.VecLane 2187 switch vecOp { 2188 case wasm.OpcodeVecI8x16AvgrU: 2189 lane = ssa.VecLaneI8x16 2190 case wasm.OpcodeVecI16x8AvgrU: 2191 lane = ssa.VecLaneI16x8 2192 } 2193 v2 := state.pop() 2194 v1 := state.pop() 2195 ret := builder.AllocateInstruction().AsVAvgRound(v1, v2, lane).Insert(builder).Return() 2196 state.push(ret) 2197 case wasm.OpcodeVecI16x8Mul, wasm.OpcodeVecI32x4Mul, wasm.OpcodeVecI64x2Mul: 2198 if state.unreachable { 2199 break 2200 } 2201 var lane ssa.VecLane 2202 switch vecOp { 2203 case wasm.OpcodeVecI16x8Mul: 2204 lane = ssa.VecLaneI16x8 2205 case wasm.OpcodeVecI32x4Mul: 2206 lane = ssa.VecLaneI32x4 2207 case wasm.OpcodeVecI64x2Mul: 2208 lane = ssa.VecLaneI64x2 2209 } 2210 v2 := state.pop() 2211 v1 := state.pop() 2212 ret := builder.AllocateInstruction().AsVImul(v1, v2, lane).Insert(builder).Return() 2213 state.push(ret) 2214 case wasm.OpcodeVecI16x8Q15mulrSatS: 2215 if state.unreachable { 2216 break 2217 } 2218 v2 := state.pop() 2219 v1 := state.pop() 2220 ret := builder.AllocateInstruction().AsSqmulRoundSat(v1, v2, ssa.VecLaneI16x8).Insert(builder).Return() 2221 state.push(ret) 2222 case wasm.OpcodeVecI8x16Eq, wasm.OpcodeVecI16x8Eq, wasm.OpcodeVecI32x4Eq, wasm.OpcodeVecI64x2Eq: 2223 if state.unreachable { 2224 break 2225 } 2226 var lane ssa.VecLane 2227 switch vecOp { 2228 case wasm.OpcodeVecI8x16Eq: 2229 lane = ssa.VecLaneI8x16 2230 case wasm.OpcodeVecI16x8Eq: 2231 lane = ssa.VecLaneI16x8 2232 case wasm.OpcodeVecI32x4Eq: 2233 lane = ssa.VecLaneI32x4 2234 case wasm.OpcodeVecI64x2Eq: 2235 lane = ssa.VecLaneI64x2 2236 } 2237 v2 := state.pop() 2238 v1 := state.pop() 2239 ret := builder.AllocateInstruction(). 2240 AsVIcmp(v1, v2, ssa.IntegerCmpCondEqual, lane).Insert(builder).Return() 2241 state.push(ret) 2242 case wasm.OpcodeVecI8x16Ne, wasm.OpcodeVecI16x8Ne, wasm.OpcodeVecI32x4Ne, wasm.OpcodeVecI64x2Ne: 2243 if state.unreachable { 2244 break 2245 } 2246 var lane ssa.VecLane 2247 switch vecOp { 2248 case wasm.OpcodeVecI8x16Ne: 2249 lane = ssa.VecLaneI8x16 2250 case wasm.OpcodeVecI16x8Ne: 2251 lane = ssa.VecLaneI16x8 2252 case wasm.OpcodeVecI32x4Ne: 2253 lane = ssa.VecLaneI32x4 2254 case wasm.OpcodeVecI64x2Ne: 2255 lane = ssa.VecLaneI64x2 2256 } 2257 v2 := state.pop() 2258 v1 := state.pop() 2259 ret := builder.AllocateInstruction(). 2260 AsVIcmp(v1, v2, ssa.IntegerCmpCondNotEqual, lane).Insert(builder).Return() 2261 state.push(ret) 2262 case wasm.OpcodeVecI8x16LtS, wasm.OpcodeVecI16x8LtS, wasm.OpcodeVecI32x4LtS, wasm.OpcodeVecI64x2LtS: 2263 if state.unreachable { 2264 break 2265 } 2266 var lane ssa.VecLane 2267 switch vecOp { 2268 case wasm.OpcodeVecI8x16LtS: 2269 lane = ssa.VecLaneI8x16 2270 case wasm.OpcodeVecI16x8LtS: 2271 lane = ssa.VecLaneI16x8 2272 case wasm.OpcodeVecI32x4LtS: 2273 lane = ssa.VecLaneI32x4 2274 case wasm.OpcodeVecI64x2LtS: 2275 lane = ssa.VecLaneI64x2 2276 } 2277 v2 := state.pop() 2278 v1 := state.pop() 2279 ret := builder.AllocateInstruction(). 2280 AsVIcmp(v1, v2, ssa.IntegerCmpCondSignedLessThan, lane).Insert(builder).Return() 2281 state.push(ret) 2282 case wasm.OpcodeVecI8x16LtU, wasm.OpcodeVecI16x8LtU, wasm.OpcodeVecI32x4LtU: 2283 if state.unreachable { 2284 break 2285 } 2286 var lane ssa.VecLane 2287 switch vecOp { 2288 case wasm.OpcodeVecI8x16LtU: 2289 lane = ssa.VecLaneI8x16 2290 case wasm.OpcodeVecI16x8LtU: 2291 lane = ssa.VecLaneI16x8 2292 case wasm.OpcodeVecI32x4LtU: 2293 lane = ssa.VecLaneI32x4 2294 } 2295 v2 := state.pop() 2296 v1 := state.pop() 2297 ret := builder.AllocateInstruction(). 2298 AsVIcmp(v1, v2, ssa.IntegerCmpCondUnsignedLessThan, lane).Insert(builder).Return() 2299 state.push(ret) 2300 case wasm.OpcodeVecI8x16LeS, wasm.OpcodeVecI16x8LeS, wasm.OpcodeVecI32x4LeS, wasm.OpcodeVecI64x2LeS: 2301 if state.unreachable { 2302 break 2303 } 2304 var lane ssa.VecLane 2305 switch vecOp { 2306 case wasm.OpcodeVecI8x16LeS: 2307 lane = ssa.VecLaneI8x16 2308 case wasm.OpcodeVecI16x8LeS: 2309 lane = ssa.VecLaneI16x8 2310 case wasm.OpcodeVecI32x4LeS: 2311 lane = ssa.VecLaneI32x4 2312 case wasm.OpcodeVecI64x2LeS: 2313 lane = ssa.VecLaneI64x2 2314 } 2315 v2 := state.pop() 2316 v1 := state.pop() 2317 ret := builder.AllocateInstruction(). 2318 AsVIcmp(v1, v2, ssa.IntegerCmpCondSignedLessThanOrEqual, lane).Insert(builder).Return() 2319 state.push(ret) 2320 case wasm.OpcodeVecI8x16LeU, wasm.OpcodeVecI16x8LeU, wasm.OpcodeVecI32x4LeU: 2321 if state.unreachable { 2322 break 2323 } 2324 var lane ssa.VecLane 2325 switch vecOp { 2326 case wasm.OpcodeVecI8x16LeU: 2327 lane = ssa.VecLaneI8x16 2328 case wasm.OpcodeVecI16x8LeU: 2329 lane = ssa.VecLaneI16x8 2330 case wasm.OpcodeVecI32x4LeU: 2331 lane = ssa.VecLaneI32x4 2332 } 2333 v2 := state.pop() 2334 v1 := state.pop() 2335 ret := builder.AllocateInstruction(). 2336 AsVIcmp(v1, v2, ssa.IntegerCmpCondUnsignedLessThanOrEqual, lane).Insert(builder).Return() 2337 state.push(ret) 2338 case wasm.OpcodeVecI8x16GtS, wasm.OpcodeVecI16x8GtS, wasm.OpcodeVecI32x4GtS, wasm.OpcodeVecI64x2GtS: 2339 if state.unreachable { 2340 break 2341 } 2342 var lane ssa.VecLane 2343 switch vecOp { 2344 case wasm.OpcodeVecI8x16GtS: 2345 lane = ssa.VecLaneI8x16 2346 case wasm.OpcodeVecI16x8GtS: 2347 lane = ssa.VecLaneI16x8 2348 case wasm.OpcodeVecI32x4GtS: 2349 lane = ssa.VecLaneI32x4 2350 case wasm.OpcodeVecI64x2GtS: 2351 lane = ssa.VecLaneI64x2 2352 } 2353 v2 := state.pop() 2354 v1 := state.pop() 2355 ret := builder.AllocateInstruction(). 2356 AsVIcmp(v1, v2, ssa.IntegerCmpCondSignedGreaterThan, lane).Insert(builder).Return() 2357 state.push(ret) 2358 case wasm.OpcodeVecI8x16GtU, wasm.OpcodeVecI16x8GtU, wasm.OpcodeVecI32x4GtU: 2359 if state.unreachable { 2360 break 2361 } 2362 var lane ssa.VecLane 2363 switch vecOp { 2364 case wasm.OpcodeVecI8x16GtU: 2365 lane = ssa.VecLaneI8x16 2366 case wasm.OpcodeVecI16x8GtU: 2367 lane = ssa.VecLaneI16x8 2368 case wasm.OpcodeVecI32x4GtU: 2369 lane = ssa.VecLaneI32x4 2370 } 2371 v2 := state.pop() 2372 v1 := state.pop() 2373 ret := builder.AllocateInstruction(). 2374 AsVIcmp(v1, v2, ssa.IntegerCmpCondUnsignedGreaterThan, lane).Insert(builder).Return() 2375 state.push(ret) 2376 case wasm.OpcodeVecI8x16GeS, wasm.OpcodeVecI16x8GeS, wasm.OpcodeVecI32x4GeS, wasm.OpcodeVecI64x2GeS: 2377 if state.unreachable { 2378 break 2379 } 2380 var lane ssa.VecLane 2381 switch vecOp { 2382 case wasm.OpcodeVecI8x16GeS: 2383 lane = ssa.VecLaneI8x16 2384 case wasm.OpcodeVecI16x8GeS: 2385 lane = ssa.VecLaneI16x8 2386 case wasm.OpcodeVecI32x4GeS: 2387 lane = ssa.VecLaneI32x4 2388 case wasm.OpcodeVecI64x2GeS: 2389 lane = ssa.VecLaneI64x2 2390 } 2391 v2 := state.pop() 2392 v1 := state.pop() 2393 ret := builder.AllocateInstruction(). 2394 AsVIcmp(v1, v2, ssa.IntegerCmpCondSignedGreaterThanOrEqual, lane).Insert(builder).Return() 2395 state.push(ret) 2396 case wasm.OpcodeVecI8x16GeU, wasm.OpcodeVecI16x8GeU, wasm.OpcodeVecI32x4GeU: 2397 if state.unreachable { 2398 break 2399 } 2400 var lane ssa.VecLane 2401 switch vecOp { 2402 case wasm.OpcodeVecI8x16GeU: 2403 lane = ssa.VecLaneI8x16 2404 case wasm.OpcodeVecI16x8GeU: 2405 lane = ssa.VecLaneI16x8 2406 case wasm.OpcodeVecI32x4GeU: 2407 lane = ssa.VecLaneI32x4 2408 } 2409 v2 := state.pop() 2410 v1 := state.pop() 2411 ret := builder.AllocateInstruction(). 2412 AsVIcmp(v1, v2, ssa.IntegerCmpCondUnsignedGreaterThanOrEqual, lane).Insert(builder).Return() 2413 state.push(ret) 2414 case wasm.OpcodeVecF32x4Max, wasm.OpcodeVecF64x2Max: 2415 if state.unreachable { 2416 break 2417 } 2418 var lane ssa.VecLane 2419 switch vecOp { 2420 case wasm.OpcodeVecF32x4Max: 2421 lane = ssa.VecLaneF32x4 2422 case wasm.OpcodeVecF64x2Max: 2423 lane = ssa.VecLaneF64x2 2424 } 2425 v2 := state.pop() 2426 v1 := state.pop() 2427 ret := builder.AllocateInstruction().AsVFmax(v1, v2, lane).Insert(builder).Return() 2428 state.push(ret) 2429 case wasm.OpcodeVecF32x4Abs, wasm.OpcodeVecF64x2Abs: 2430 if state.unreachable { 2431 break 2432 } 2433 var lane ssa.VecLane 2434 switch vecOp { 2435 case wasm.OpcodeVecF32x4Abs: 2436 lane = ssa.VecLaneF32x4 2437 case wasm.OpcodeVecF64x2Abs: 2438 lane = ssa.VecLaneF64x2 2439 } 2440 v1 := state.pop() 2441 ret := builder.AllocateInstruction().AsVFabs(v1, lane).Insert(builder).Return() 2442 state.push(ret) 2443 case wasm.OpcodeVecF32x4Min, wasm.OpcodeVecF64x2Min: 2444 if state.unreachable { 2445 break 2446 } 2447 var lane ssa.VecLane 2448 switch vecOp { 2449 case wasm.OpcodeVecF32x4Min: 2450 lane = ssa.VecLaneF32x4 2451 case wasm.OpcodeVecF64x2Min: 2452 lane = ssa.VecLaneF64x2 2453 } 2454 v2 := state.pop() 2455 v1 := state.pop() 2456 ret := builder.AllocateInstruction().AsVFmin(v1, v2, lane).Insert(builder).Return() 2457 state.push(ret) 2458 case wasm.OpcodeVecF32x4Neg, wasm.OpcodeVecF64x2Neg: 2459 if state.unreachable { 2460 break 2461 } 2462 var lane ssa.VecLane 2463 switch vecOp { 2464 case wasm.OpcodeVecF32x4Neg: 2465 lane = ssa.VecLaneF32x4 2466 case wasm.OpcodeVecF64x2Neg: 2467 lane = ssa.VecLaneF64x2 2468 } 2469 v1 := state.pop() 2470 ret := builder.AllocateInstruction().AsVFneg(v1, lane).Insert(builder).Return() 2471 state.push(ret) 2472 case wasm.OpcodeVecF32x4Sqrt, wasm.OpcodeVecF64x2Sqrt: 2473 if state.unreachable { 2474 break 2475 } 2476 var lane ssa.VecLane 2477 switch vecOp { 2478 case wasm.OpcodeVecF32x4Sqrt: 2479 lane = ssa.VecLaneF32x4 2480 case wasm.OpcodeVecF64x2Sqrt: 2481 lane = ssa.VecLaneF64x2 2482 } 2483 v1 := state.pop() 2484 ret := builder.AllocateInstruction().AsVSqrt(v1, lane).Insert(builder).Return() 2485 state.push(ret) 2486 2487 case wasm.OpcodeVecF32x4Add, wasm.OpcodeVecF64x2Add: 2488 if state.unreachable { 2489 break 2490 } 2491 var lane ssa.VecLane 2492 switch vecOp { 2493 case wasm.OpcodeVecF32x4Add: 2494 lane = ssa.VecLaneF32x4 2495 case wasm.OpcodeVecF64x2Add: 2496 lane = ssa.VecLaneF64x2 2497 } 2498 v2 := state.pop() 2499 v1 := state.pop() 2500 ret := builder.AllocateInstruction().AsVFadd(v1, v2, lane).Insert(builder).Return() 2501 state.push(ret) 2502 case wasm.OpcodeVecF32x4Sub, wasm.OpcodeVecF64x2Sub: 2503 if state.unreachable { 2504 break 2505 } 2506 var lane ssa.VecLane 2507 switch vecOp { 2508 case wasm.OpcodeVecF32x4Sub: 2509 lane = ssa.VecLaneF32x4 2510 case wasm.OpcodeVecF64x2Sub: 2511 lane = ssa.VecLaneF64x2 2512 } 2513 v2 := state.pop() 2514 v1 := state.pop() 2515 ret := builder.AllocateInstruction().AsVFsub(v1, v2, lane).Insert(builder).Return() 2516 state.push(ret) 2517 case wasm.OpcodeVecF32x4Mul, wasm.OpcodeVecF64x2Mul: 2518 if state.unreachable { 2519 break 2520 } 2521 var lane ssa.VecLane 2522 switch vecOp { 2523 case wasm.OpcodeVecF32x4Mul: 2524 lane = ssa.VecLaneF32x4 2525 case wasm.OpcodeVecF64x2Mul: 2526 lane = ssa.VecLaneF64x2 2527 } 2528 v2 := state.pop() 2529 v1 := state.pop() 2530 ret := builder.AllocateInstruction().AsVFmul(v1, v2, lane).Insert(builder).Return() 2531 state.push(ret) 2532 case wasm.OpcodeVecF32x4Div, wasm.OpcodeVecF64x2Div: 2533 if state.unreachable { 2534 break 2535 } 2536 var lane ssa.VecLane 2537 switch vecOp { 2538 case wasm.OpcodeVecF32x4Div: 2539 lane = ssa.VecLaneF32x4 2540 case wasm.OpcodeVecF64x2Div: 2541 lane = ssa.VecLaneF64x2 2542 } 2543 v2 := state.pop() 2544 v1 := state.pop() 2545 ret := builder.AllocateInstruction().AsVFdiv(v1, v2, lane).Insert(builder).Return() 2546 state.push(ret) 2547 2548 case wasm.OpcodeVecI16x8ExtaddPairwiseI8x16S, wasm.OpcodeVecI16x8ExtaddPairwiseI8x16U: 2549 if state.unreachable { 2550 break 2551 } 2552 v1 := state.pop() 2553 // TODO: The sequence `Widen; Widen; IaddPairwise` can be substituted for a single instruction on some ISAs. 2554 signed := vecOp == wasm.OpcodeVecI16x8ExtaddPairwiseI8x16S 2555 vlo := builder.AllocateInstruction().AsWiden(v1, ssa.VecLaneI8x16, signed, true).Insert(builder).Return() 2556 vhi := builder.AllocateInstruction().AsWiden(v1, ssa.VecLaneI8x16, signed, false).Insert(builder).Return() 2557 ret := builder.AllocateInstruction().AsIaddPairwise(vlo, vhi, ssa.VecLaneI16x8).Insert(builder).Return() 2558 state.push(ret) 2559 2560 case wasm.OpcodeVecI32x4ExtaddPairwiseI16x8S, wasm.OpcodeVecI32x4ExtaddPairwiseI16x8U: 2561 if state.unreachable { 2562 break 2563 } 2564 v1 := state.pop() 2565 // TODO: The sequence `Widen; Widen; IaddPairwise` can be substituted for a single instruction on some ISAs. 2566 signed := vecOp == wasm.OpcodeVecI32x4ExtaddPairwiseI16x8S 2567 vlo := builder.AllocateInstruction().AsWiden(v1, ssa.VecLaneI16x8, signed, true).Insert(builder).Return() 2568 vhi := builder.AllocateInstruction().AsWiden(v1, ssa.VecLaneI16x8, signed, false).Insert(builder).Return() 2569 ret := builder.AllocateInstruction().AsIaddPairwise(vlo, vhi, ssa.VecLaneI32x4).Insert(builder).Return() 2570 state.push(ret) 2571 2572 case wasm.OpcodeVecI16x8ExtMulLowI8x16S, wasm.OpcodeVecI16x8ExtMulLowI8x16U: 2573 if state.unreachable { 2574 break 2575 } 2576 v2 := state.pop() 2577 v1 := state.pop() 2578 ret := c.lowerExtMul( 2579 v1, v2, 2580 ssa.VecLaneI8x16, ssa.VecLaneI16x8, 2581 vecOp == wasm.OpcodeVecI16x8ExtMulLowI8x16S, true) 2582 state.push(ret) 2583 2584 case wasm.OpcodeVecI16x8ExtMulHighI8x16S, wasm.OpcodeVecI16x8ExtMulHighI8x16U: 2585 if state.unreachable { 2586 break 2587 } 2588 v2 := state.pop() 2589 v1 := state.pop() 2590 ret := c.lowerExtMul( 2591 v1, v2, 2592 ssa.VecLaneI8x16, ssa.VecLaneI16x8, 2593 vecOp == wasm.OpcodeVecI16x8ExtMulHighI8x16S, false) 2594 state.push(ret) 2595 2596 case wasm.OpcodeVecI32x4ExtMulLowI16x8S, wasm.OpcodeVecI32x4ExtMulLowI16x8U: 2597 if state.unreachable { 2598 break 2599 } 2600 v2 := state.pop() 2601 v1 := state.pop() 2602 ret := c.lowerExtMul( 2603 v1, v2, 2604 ssa.VecLaneI16x8, ssa.VecLaneI32x4, 2605 vecOp == wasm.OpcodeVecI32x4ExtMulLowI16x8S, true) 2606 state.push(ret) 2607 2608 case wasm.OpcodeVecI32x4ExtMulHighI16x8S, wasm.OpcodeVecI32x4ExtMulHighI16x8U: 2609 if state.unreachable { 2610 break 2611 } 2612 v2 := state.pop() 2613 v1 := state.pop() 2614 ret := c.lowerExtMul( 2615 v1, v2, 2616 ssa.VecLaneI16x8, ssa.VecLaneI32x4, 2617 vecOp == wasm.OpcodeVecI32x4ExtMulHighI16x8S, false) 2618 state.push(ret) 2619 case wasm.OpcodeVecI64x2ExtMulLowI32x4S, wasm.OpcodeVecI64x2ExtMulLowI32x4U: 2620 if state.unreachable { 2621 break 2622 } 2623 v2 := state.pop() 2624 v1 := state.pop() 2625 ret := c.lowerExtMul( 2626 v1, v2, 2627 ssa.VecLaneI32x4, ssa.VecLaneI64x2, 2628 vecOp == wasm.OpcodeVecI64x2ExtMulLowI32x4S, true) 2629 state.push(ret) 2630 2631 case wasm.OpcodeVecI64x2ExtMulHighI32x4S, wasm.OpcodeVecI64x2ExtMulHighI32x4U: 2632 if state.unreachable { 2633 break 2634 } 2635 v2 := state.pop() 2636 v1 := state.pop() 2637 ret := c.lowerExtMul( 2638 v1, v2, 2639 ssa.VecLaneI32x4, ssa.VecLaneI64x2, 2640 vecOp == wasm.OpcodeVecI64x2ExtMulHighI32x4S, false) 2641 state.push(ret) 2642 2643 case wasm.OpcodeVecI32x4DotI16x8S: 2644 if state.unreachable { 2645 break 2646 } 2647 v2 := state.pop() 2648 v1 := state.pop() 2649 2650 // TODO: The sequence `Widen; Widen; VIMul` can be substituted for a single instruction on some ISAs. 2651 v1lo := builder.AllocateInstruction().AsWiden(v1, ssa.VecLaneI16x8, true, true).Insert(builder).Return() 2652 v2lo := builder.AllocateInstruction().AsWiden(v2, ssa.VecLaneI16x8, true, true).Insert(builder).Return() 2653 low := builder.AllocateInstruction().AsVImul(v1lo, v2lo, ssa.VecLaneI32x4).Insert(builder).Return() 2654 2655 v1hi := builder.AllocateInstruction().AsWiden(v1, ssa.VecLaneI16x8, true, false).Insert(builder).Return() 2656 v2hi := builder.AllocateInstruction().AsWiden(v2, ssa.VecLaneI16x8, true, false).Insert(builder).Return() 2657 high := builder.AllocateInstruction().AsVImul(v1hi, v2hi, ssa.VecLaneI32x4).Insert(builder).Return() 2658 2659 ret := builder.AllocateInstruction().AsIaddPairwise(low, high, ssa.VecLaneI32x4).Insert(builder).Return() 2660 state.push(ret) 2661 2662 case wasm.OpcodeVecF32x4Eq, wasm.OpcodeVecF64x2Eq: 2663 if state.unreachable { 2664 break 2665 } 2666 var lane ssa.VecLane 2667 switch vecOp { 2668 case wasm.OpcodeVecF32x4Eq: 2669 lane = ssa.VecLaneF32x4 2670 case wasm.OpcodeVecF64x2Eq: 2671 lane = ssa.VecLaneF64x2 2672 } 2673 v2 := state.pop() 2674 v1 := state.pop() 2675 ret := builder.AllocateInstruction(). 2676 AsVFcmp(v1, v2, ssa.FloatCmpCondEqual, lane).Insert(builder).Return() 2677 state.push(ret) 2678 case wasm.OpcodeVecF32x4Ne, wasm.OpcodeVecF64x2Ne: 2679 if state.unreachable { 2680 break 2681 } 2682 var lane ssa.VecLane 2683 switch vecOp { 2684 case wasm.OpcodeVecF32x4Ne: 2685 lane = ssa.VecLaneF32x4 2686 case wasm.OpcodeVecF64x2Ne: 2687 lane = ssa.VecLaneF64x2 2688 } 2689 v2 := state.pop() 2690 v1 := state.pop() 2691 ret := builder.AllocateInstruction(). 2692 AsVFcmp(v1, v2, ssa.FloatCmpCondNotEqual, lane).Insert(builder).Return() 2693 state.push(ret) 2694 case wasm.OpcodeVecF32x4Lt, wasm.OpcodeVecF64x2Lt: 2695 if state.unreachable { 2696 break 2697 } 2698 var lane ssa.VecLane 2699 switch vecOp { 2700 case wasm.OpcodeVecF32x4Lt: 2701 lane = ssa.VecLaneF32x4 2702 case wasm.OpcodeVecF64x2Lt: 2703 lane = ssa.VecLaneF64x2 2704 } 2705 v2 := state.pop() 2706 v1 := state.pop() 2707 ret := builder.AllocateInstruction(). 2708 AsVFcmp(v1, v2, ssa.FloatCmpCondLessThan, lane).Insert(builder).Return() 2709 state.push(ret) 2710 case wasm.OpcodeVecF32x4Le, wasm.OpcodeVecF64x2Le: 2711 if state.unreachable { 2712 break 2713 } 2714 var lane ssa.VecLane 2715 switch vecOp { 2716 case wasm.OpcodeVecF32x4Le: 2717 lane = ssa.VecLaneF32x4 2718 case wasm.OpcodeVecF64x2Le: 2719 lane = ssa.VecLaneF64x2 2720 } 2721 v2 := state.pop() 2722 v1 := state.pop() 2723 ret := builder.AllocateInstruction(). 2724 AsVFcmp(v1, v2, ssa.FloatCmpCondLessThanOrEqual, lane).Insert(builder).Return() 2725 state.push(ret) 2726 case wasm.OpcodeVecF32x4Gt, wasm.OpcodeVecF64x2Gt: 2727 if state.unreachable { 2728 break 2729 } 2730 var lane ssa.VecLane 2731 switch vecOp { 2732 case wasm.OpcodeVecF32x4Gt: 2733 lane = ssa.VecLaneF32x4 2734 case wasm.OpcodeVecF64x2Gt: 2735 lane = ssa.VecLaneF64x2 2736 } 2737 v2 := state.pop() 2738 v1 := state.pop() 2739 ret := builder.AllocateInstruction(). 2740 AsVFcmp(v1, v2, ssa.FloatCmpCondGreaterThan, lane).Insert(builder).Return() 2741 state.push(ret) 2742 case wasm.OpcodeVecF32x4Ge, wasm.OpcodeVecF64x2Ge: 2743 if state.unreachable { 2744 break 2745 } 2746 var lane ssa.VecLane 2747 switch vecOp { 2748 case wasm.OpcodeVecF32x4Ge: 2749 lane = ssa.VecLaneF32x4 2750 case wasm.OpcodeVecF64x2Ge: 2751 lane = ssa.VecLaneF64x2 2752 } 2753 v2 := state.pop() 2754 v1 := state.pop() 2755 ret := builder.AllocateInstruction(). 2756 AsVFcmp(v1, v2, ssa.FloatCmpCondGreaterThanOrEqual, lane).Insert(builder).Return() 2757 state.push(ret) 2758 case wasm.OpcodeVecF32x4Ceil, wasm.OpcodeVecF64x2Ceil: 2759 if state.unreachable { 2760 break 2761 } 2762 var lane ssa.VecLane 2763 switch vecOp { 2764 case wasm.OpcodeVecF32x4Ceil: 2765 lane = ssa.VecLaneF32x4 2766 case wasm.OpcodeVecF64x2Ceil: 2767 lane = ssa.VecLaneF64x2 2768 } 2769 v1 := state.pop() 2770 ret := builder.AllocateInstruction().AsVCeil(v1, lane).Insert(builder).Return() 2771 state.push(ret) 2772 case wasm.OpcodeVecF32x4Floor, wasm.OpcodeVecF64x2Floor: 2773 if state.unreachable { 2774 break 2775 } 2776 var lane ssa.VecLane 2777 switch vecOp { 2778 case wasm.OpcodeVecF32x4Floor: 2779 lane = ssa.VecLaneF32x4 2780 case wasm.OpcodeVecF64x2Floor: 2781 lane = ssa.VecLaneF64x2 2782 } 2783 v1 := state.pop() 2784 ret := builder.AllocateInstruction().AsVFloor(v1, lane).Insert(builder).Return() 2785 state.push(ret) 2786 case wasm.OpcodeVecF32x4Trunc, wasm.OpcodeVecF64x2Trunc: 2787 if state.unreachable { 2788 break 2789 } 2790 var lane ssa.VecLane 2791 switch vecOp { 2792 case wasm.OpcodeVecF32x4Trunc: 2793 lane = ssa.VecLaneF32x4 2794 case wasm.OpcodeVecF64x2Trunc: 2795 lane = ssa.VecLaneF64x2 2796 } 2797 v1 := state.pop() 2798 ret := builder.AllocateInstruction().AsVTrunc(v1, lane).Insert(builder).Return() 2799 state.push(ret) 2800 case wasm.OpcodeVecF32x4Nearest, wasm.OpcodeVecF64x2Nearest: 2801 if state.unreachable { 2802 break 2803 } 2804 var lane ssa.VecLane 2805 switch vecOp { 2806 case wasm.OpcodeVecF32x4Nearest: 2807 lane = ssa.VecLaneF32x4 2808 case wasm.OpcodeVecF64x2Nearest: 2809 lane = ssa.VecLaneF64x2 2810 } 2811 v1 := state.pop() 2812 ret := builder.AllocateInstruction().AsVNearest(v1, lane).Insert(builder).Return() 2813 state.push(ret) 2814 case wasm.OpcodeVecF32x4Pmin, wasm.OpcodeVecF64x2Pmin: 2815 if state.unreachable { 2816 break 2817 } 2818 var lane ssa.VecLane 2819 switch vecOp { 2820 case wasm.OpcodeVecF32x4Pmin: 2821 lane = ssa.VecLaneF32x4 2822 case wasm.OpcodeVecF64x2Pmin: 2823 lane = ssa.VecLaneF64x2 2824 } 2825 v2 := state.pop() 2826 v1 := state.pop() 2827 ret := builder.AllocateInstruction().AsVMinPseudo(v1, v2, lane).Insert(builder).Return() 2828 state.push(ret) 2829 case wasm.OpcodeVecF32x4Pmax, wasm.OpcodeVecF64x2Pmax: 2830 if state.unreachable { 2831 break 2832 } 2833 var lane ssa.VecLane 2834 switch vecOp { 2835 case wasm.OpcodeVecF32x4Pmax: 2836 lane = ssa.VecLaneF32x4 2837 case wasm.OpcodeVecF64x2Pmax: 2838 lane = ssa.VecLaneF64x2 2839 } 2840 v2 := state.pop() 2841 v1 := state.pop() 2842 ret := builder.AllocateInstruction().AsVMaxPseudo(v1, v2, lane).Insert(builder).Return() 2843 state.push(ret) 2844 case wasm.OpcodeVecI32x4TruncSatF32x4S, wasm.OpcodeVecI32x4TruncSatF32x4U: 2845 if state.unreachable { 2846 break 2847 } 2848 v1 := state.pop() 2849 ret := builder.AllocateInstruction(). 2850 AsVFcvtToIntSat(v1, ssa.VecLaneF32x4, vecOp == wasm.OpcodeVecI32x4TruncSatF32x4S).Insert(builder).Return() 2851 state.push(ret) 2852 case wasm.OpcodeVecI32x4TruncSatF64x2SZero, wasm.OpcodeVecI32x4TruncSatF64x2UZero: 2853 if state.unreachable { 2854 break 2855 } 2856 v1 := state.pop() 2857 ret := builder.AllocateInstruction(). 2858 AsVFcvtToIntSat(v1, ssa.VecLaneF64x2, vecOp == wasm.OpcodeVecI32x4TruncSatF64x2SZero).Insert(builder).Return() 2859 state.push(ret) 2860 case wasm.OpcodeVecF32x4ConvertI32x4S, wasm.OpcodeVecF32x4ConvertI32x4U: 2861 if state.unreachable { 2862 break 2863 } 2864 v1 := state.pop() 2865 ret := builder.AllocateInstruction(). 2866 AsVFcvtFromInt(v1, ssa.VecLaneF32x4, vecOp == wasm.OpcodeVecF32x4ConvertI32x4S).Insert(builder).Return() 2867 state.push(ret) 2868 case wasm.OpcodeVecF64x2ConvertLowI32x4S, wasm.OpcodeVecF64x2ConvertLowI32x4U: 2869 if state.unreachable { 2870 break 2871 } 2872 v1 := state.pop() 2873 v1w := builder.AllocateInstruction(). 2874 AsWiden(v1, ssa.VecLaneI32x4, vecOp == wasm.OpcodeVecF64x2ConvertLowI32x4S, true).Insert(builder).Return() 2875 ret := builder.AllocateInstruction(). 2876 AsVFcvtFromInt(v1w, ssa.VecLaneF64x2, vecOp == wasm.OpcodeVecF64x2ConvertLowI32x4S). 2877 Insert(builder).Return() 2878 state.push(ret) 2879 case wasm.OpcodeVecI8x16NarrowI16x8S, wasm.OpcodeVecI8x16NarrowI16x8U: 2880 if state.unreachable { 2881 break 2882 } 2883 v2 := state.pop() 2884 v1 := state.pop() 2885 ret := builder.AllocateInstruction(). 2886 AsNarrow(v1, v2, ssa.VecLaneI16x8, vecOp == wasm.OpcodeVecI8x16NarrowI16x8S). 2887 Insert(builder).Return() 2888 state.push(ret) 2889 case wasm.OpcodeVecI16x8NarrowI32x4S, wasm.OpcodeVecI16x8NarrowI32x4U: 2890 if state.unreachable { 2891 break 2892 } 2893 v2 := state.pop() 2894 v1 := state.pop() 2895 ret := builder.AllocateInstruction(). 2896 AsNarrow(v1, v2, ssa.VecLaneI32x4, vecOp == wasm.OpcodeVecI16x8NarrowI32x4S). 2897 Insert(builder).Return() 2898 state.push(ret) 2899 case wasm.OpcodeVecI16x8ExtendLowI8x16S, wasm.OpcodeVecI16x8ExtendLowI8x16U: 2900 if state.unreachable { 2901 break 2902 } 2903 v1 := state.pop() 2904 ret := builder.AllocateInstruction(). 2905 AsWiden(v1, ssa.VecLaneI8x16, vecOp == wasm.OpcodeVecI16x8ExtendLowI8x16S, true). 2906 Insert(builder).Return() 2907 state.push(ret) 2908 case wasm.OpcodeVecI16x8ExtendHighI8x16S, wasm.OpcodeVecI16x8ExtendHighI8x16U: 2909 if state.unreachable { 2910 break 2911 } 2912 v1 := state.pop() 2913 ret := builder.AllocateInstruction(). 2914 AsWiden(v1, ssa.VecLaneI8x16, vecOp == wasm.OpcodeVecI16x8ExtendHighI8x16S, false). 2915 Insert(builder).Return() 2916 state.push(ret) 2917 case wasm.OpcodeVecI32x4ExtendLowI16x8S, wasm.OpcodeVecI32x4ExtendLowI16x8U: 2918 if state.unreachable { 2919 break 2920 } 2921 v1 := state.pop() 2922 ret := builder.AllocateInstruction(). 2923 AsWiden(v1, ssa.VecLaneI16x8, vecOp == wasm.OpcodeVecI32x4ExtendLowI16x8S, true). 2924 Insert(builder).Return() 2925 state.push(ret) 2926 case wasm.OpcodeVecI32x4ExtendHighI16x8S, wasm.OpcodeVecI32x4ExtendHighI16x8U: 2927 if state.unreachable { 2928 break 2929 } 2930 v1 := state.pop() 2931 ret := builder.AllocateInstruction(). 2932 AsWiden(v1, ssa.VecLaneI16x8, vecOp == wasm.OpcodeVecI32x4ExtendHighI16x8S, false). 2933 Insert(builder).Return() 2934 state.push(ret) 2935 case wasm.OpcodeVecI64x2ExtendLowI32x4S, wasm.OpcodeVecI64x2ExtendLowI32x4U: 2936 if state.unreachable { 2937 break 2938 } 2939 v1 := state.pop() 2940 ret := builder.AllocateInstruction(). 2941 AsWiden(v1, ssa.VecLaneI32x4, vecOp == wasm.OpcodeVecI64x2ExtendLowI32x4S, true). 2942 Insert(builder).Return() 2943 state.push(ret) 2944 case wasm.OpcodeVecI64x2ExtendHighI32x4S, wasm.OpcodeVecI64x2ExtendHighI32x4U: 2945 if state.unreachable { 2946 break 2947 } 2948 v1 := state.pop() 2949 ret := builder.AllocateInstruction(). 2950 AsWiden(v1, ssa.VecLaneI32x4, vecOp == wasm.OpcodeVecI64x2ExtendHighI32x4S, false). 2951 Insert(builder).Return() 2952 state.push(ret) 2953 2954 case wasm.OpcodeVecF64x2PromoteLowF32x4Zero: 2955 if state.unreachable { 2956 break 2957 } 2958 v1 := state.pop() 2959 ret := builder.AllocateInstruction(). 2960 AsFvpromoteLow(v1, ssa.VecLaneF32x4). 2961 Insert(builder).Return() 2962 state.push(ret) 2963 case wasm.OpcodeVecF32x4DemoteF64x2Zero: 2964 if state.unreachable { 2965 break 2966 } 2967 v1 := state.pop() 2968 ret := builder.AllocateInstruction(). 2969 AsFvdemote(v1, ssa.VecLaneF64x2). 2970 Insert(builder).Return() 2971 state.push(ret) 2972 case wasm.OpcodeVecI8x16Shl, wasm.OpcodeVecI16x8Shl, wasm.OpcodeVecI32x4Shl, wasm.OpcodeVecI64x2Shl: 2973 if state.unreachable { 2974 break 2975 } 2976 var lane ssa.VecLane 2977 switch vecOp { 2978 case wasm.OpcodeVecI8x16Shl: 2979 lane = ssa.VecLaneI8x16 2980 case wasm.OpcodeVecI16x8Shl: 2981 lane = ssa.VecLaneI16x8 2982 case wasm.OpcodeVecI32x4Shl: 2983 lane = ssa.VecLaneI32x4 2984 case wasm.OpcodeVecI64x2Shl: 2985 lane = ssa.VecLaneI64x2 2986 } 2987 v2 := state.pop() 2988 v1 := state.pop() 2989 ret := builder.AllocateInstruction().AsVIshl(v1, v2, lane).Insert(builder).Return() 2990 state.push(ret) 2991 case wasm.OpcodeVecI8x16ShrS, wasm.OpcodeVecI16x8ShrS, wasm.OpcodeVecI32x4ShrS, wasm.OpcodeVecI64x2ShrS: 2992 if state.unreachable { 2993 break 2994 } 2995 var lane ssa.VecLane 2996 switch vecOp { 2997 case wasm.OpcodeVecI8x16ShrS: 2998 lane = ssa.VecLaneI8x16 2999 case wasm.OpcodeVecI16x8ShrS: 3000 lane = ssa.VecLaneI16x8 3001 case wasm.OpcodeVecI32x4ShrS: 3002 lane = ssa.VecLaneI32x4 3003 case wasm.OpcodeVecI64x2ShrS: 3004 lane = ssa.VecLaneI64x2 3005 } 3006 v2 := state.pop() 3007 v1 := state.pop() 3008 ret := builder.AllocateInstruction().AsVSshr(v1, v2, lane).Insert(builder).Return() 3009 state.push(ret) 3010 case wasm.OpcodeVecI8x16ShrU, wasm.OpcodeVecI16x8ShrU, wasm.OpcodeVecI32x4ShrU, wasm.OpcodeVecI64x2ShrU: 3011 if state.unreachable { 3012 break 3013 } 3014 var lane ssa.VecLane 3015 switch vecOp { 3016 case wasm.OpcodeVecI8x16ShrU: 3017 lane = ssa.VecLaneI8x16 3018 case wasm.OpcodeVecI16x8ShrU: 3019 lane = ssa.VecLaneI16x8 3020 case wasm.OpcodeVecI32x4ShrU: 3021 lane = ssa.VecLaneI32x4 3022 case wasm.OpcodeVecI64x2ShrU: 3023 lane = ssa.VecLaneI64x2 3024 } 3025 v2 := state.pop() 3026 v1 := state.pop() 3027 ret := builder.AllocateInstruction().AsVUshr(v1, v2, lane).Insert(builder).Return() 3028 state.push(ret) 3029 case wasm.OpcodeVecI8x16ExtractLaneS, wasm.OpcodeVecI16x8ExtractLaneS: 3030 state.pc++ 3031 if state.unreachable { 3032 break 3033 } 3034 var lane ssa.VecLane 3035 switch vecOp { 3036 case wasm.OpcodeVecI8x16ExtractLaneS: 3037 lane = ssa.VecLaneI8x16 3038 case wasm.OpcodeVecI16x8ExtractLaneS: 3039 lane = ssa.VecLaneI16x8 3040 } 3041 v1 := state.pop() 3042 index := c.wasmFunctionBody[state.pc] 3043 ext := builder.AllocateInstruction().AsExtractlane(v1, index, lane, true).Insert(builder).Return() 3044 state.push(ext) 3045 case wasm.OpcodeVecI8x16ExtractLaneU, wasm.OpcodeVecI16x8ExtractLaneU, 3046 wasm.OpcodeVecI32x4ExtractLane, wasm.OpcodeVecI64x2ExtractLane, 3047 wasm.OpcodeVecF32x4ExtractLane, wasm.OpcodeVecF64x2ExtractLane: 3048 state.pc++ // Skip the immediate value. 3049 if state.unreachable { 3050 break 3051 } 3052 var lane ssa.VecLane 3053 switch vecOp { 3054 case wasm.OpcodeVecI8x16ExtractLaneU: 3055 lane = ssa.VecLaneI8x16 3056 case wasm.OpcodeVecI16x8ExtractLaneU: 3057 lane = ssa.VecLaneI16x8 3058 case wasm.OpcodeVecI32x4ExtractLane: 3059 lane = ssa.VecLaneI32x4 3060 case wasm.OpcodeVecI64x2ExtractLane: 3061 lane = ssa.VecLaneI64x2 3062 case wasm.OpcodeVecF32x4ExtractLane: 3063 lane = ssa.VecLaneF32x4 3064 case wasm.OpcodeVecF64x2ExtractLane: 3065 lane = ssa.VecLaneF64x2 3066 } 3067 v1 := state.pop() 3068 index := c.wasmFunctionBody[state.pc] 3069 ext := builder.AllocateInstruction().AsExtractlane(v1, index, lane, false).Insert(builder).Return() 3070 state.push(ext) 3071 case wasm.OpcodeVecI8x16ReplaceLane, wasm.OpcodeVecI16x8ReplaceLane, 3072 wasm.OpcodeVecI32x4ReplaceLane, wasm.OpcodeVecI64x2ReplaceLane, 3073 wasm.OpcodeVecF32x4ReplaceLane, wasm.OpcodeVecF64x2ReplaceLane: 3074 state.pc++ 3075 if state.unreachable { 3076 break 3077 } 3078 var lane ssa.VecLane 3079 switch vecOp { 3080 case wasm.OpcodeVecI8x16ReplaceLane: 3081 lane = ssa.VecLaneI8x16 3082 case wasm.OpcodeVecI16x8ReplaceLane: 3083 lane = ssa.VecLaneI16x8 3084 case wasm.OpcodeVecI32x4ReplaceLane: 3085 lane = ssa.VecLaneI32x4 3086 case wasm.OpcodeVecI64x2ReplaceLane: 3087 lane = ssa.VecLaneI64x2 3088 case wasm.OpcodeVecF32x4ReplaceLane: 3089 lane = ssa.VecLaneF32x4 3090 case wasm.OpcodeVecF64x2ReplaceLane: 3091 lane = ssa.VecLaneF64x2 3092 } 3093 v2 := state.pop() 3094 v1 := state.pop() 3095 index := c.wasmFunctionBody[state.pc] 3096 ret := builder.AllocateInstruction().AsInsertlane(v1, v2, index, lane).Insert(builder).Return() 3097 state.push(ret) 3098 case wasm.OpcodeVecV128i8x16Shuffle: 3099 state.pc++ 3100 laneIndexes := c.wasmFunctionBody[state.pc : state.pc+16] 3101 state.pc += 15 3102 if state.unreachable { 3103 break 3104 } 3105 v2 := state.pop() 3106 v1 := state.pop() 3107 ret := builder.AllocateInstruction().AsShuffle(v1, v2, laneIndexes).Insert(builder).Return() 3108 state.push(ret) 3109 3110 case wasm.OpcodeVecI8x16Swizzle: 3111 if state.unreachable { 3112 break 3113 } 3114 v2 := state.pop() 3115 v1 := state.pop() 3116 ret := builder.AllocateInstruction().AsSwizzle(v1, v2, ssa.VecLaneI8x16).Insert(builder).Return() 3117 state.push(ret) 3118 3119 case wasm.OpcodeVecI8x16Splat, 3120 wasm.OpcodeVecI16x8Splat, 3121 wasm.OpcodeVecI32x4Splat, 3122 wasm.OpcodeVecI64x2Splat, 3123 wasm.OpcodeVecF32x4Splat, 3124 wasm.OpcodeVecF64x2Splat: 3125 if state.unreachable { 3126 break 3127 } 3128 var lane ssa.VecLane 3129 switch vecOp { 3130 case wasm.OpcodeVecI8x16Splat: 3131 lane = ssa.VecLaneI8x16 3132 case wasm.OpcodeVecI16x8Splat: 3133 lane = ssa.VecLaneI16x8 3134 case wasm.OpcodeVecI32x4Splat: 3135 lane = ssa.VecLaneI32x4 3136 case wasm.OpcodeVecI64x2Splat: 3137 lane = ssa.VecLaneI64x2 3138 case wasm.OpcodeVecF32x4Splat: 3139 lane = ssa.VecLaneF32x4 3140 case wasm.OpcodeVecF64x2Splat: 3141 lane = ssa.VecLaneF64x2 3142 } 3143 v1 := state.pop() 3144 ret := builder.AllocateInstruction().AsSplat(v1, lane).Insert(builder).Return() 3145 state.push(ret) 3146 3147 default: 3148 panic("TODO: unsupported vector instruction: " + wasm.VectorInstructionName(vecOp)) 3149 } 3150 case wasm.OpcodeRefFunc: 3151 funcIndex := c.readI32u() 3152 if state.unreachable { 3153 break 3154 } 3155 3156 c.storeCallerModuleContext() 3157 3158 funcIndexVal := builder.AllocateInstruction().AsIconst32(funcIndex).Insert(builder).Return() 3159 3160 refFuncPtr := builder.AllocateInstruction(). 3161 AsLoad(c.execCtxPtrValue, 3162 wazevoapi.ExecutionContextOffsetRefFuncTrampolineAddress.U32(), 3163 ssa.TypeI64, 3164 ).Insert(builder).Return() 3165 3166 // TODO: reuse the slice. 3167 args := []ssa.Value{c.execCtxPtrValue, funcIndexVal} 3168 refFuncRet := builder. 3169 AllocateInstruction(). 3170 AsCallIndirect(refFuncPtr, &c.refFuncSig, args). 3171 Insert(builder).Return() 3172 state.push(refFuncRet) 3173 3174 case wasm.OpcodeRefNull: 3175 c.loweringState.pc++ // skips the reference type as we treat both of them as i64(0). 3176 if state.unreachable { 3177 break 3178 } 3179 ret := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return() 3180 state.push(ret) 3181 case wasm.OpcodeRefIsNull: 3182 if state.unreachable { 3183 break 3184 } 3185 r := state.pop() 3186 zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder) 3187 icmp := builder.AllocateInstruction(). 3188 AsIcmp(r, zero.Return(), ssa.IntegerCmpCondEqual). 3189 Insert(builder). 3190 Return() 3191 state.push(icmp) 3192 case wasm.OpcodeTableSet: 3193 tableIndex := c.readI32u() 3194 if state.unreachable { 3195 break 3196 } 3197 r := state.pop() 3198 targetOffsetInTable := state.pop() 3199 3200 elementAddr := c.lowerAccessTableWithBoundsCheck(tableIndex, targetOffsetInTable) 3201 builder.AllocateInstruction().AsStore(ssa.OpcodeStore, r, elementAddr, 0).Insert(builder) 3202 3203 case wasm.OpcodeTableGet: 3204 tableIndex := c.readI32u() 3205 if state.unreachable { 3206 break 3207 } 3208 targetOffsetInTable := state.pop() 3209 elementAddr := c.lowerAccessTableWithBoundsCheck(tableIndex, targetOffsetInTable) 3210 loaded := builder.AllocateInstruction().AsLoad(elementAddr, 0, ssa.TypeI64).Insert(builder).Return() 3211 state.push(loaded) 3212 default: 3213 panic("TODO: unsupported in wazevo yet: " + wasm.InstructionName(op)) 3214 } 3215 3216 if wazevoapi.FrontEndLoggingEnabled { 3217 fmt.Println("--------- Translated " + wasm.InstructionName(op) + " --------") 3218 fmt.Println("state: " + c.loweringState.String()) 3219 fmt.Println(c.formatBuilder()) 3220 fmt.Println("--------------------------") 3221 } 3222 c.loweringState.pc++ 3223 } 3224 3225 func (c *Compiler) lowerExtMul(v1, v2 ssa.Value, from, to ssa.VecLane, signed, low bool) ssa.Value { 3226 // TODO: The sequence `Widen; Widen; VIMul` can be substituted for a single instruction on some ISAs. 3227 builder := c.ssaBuilder 3228 3229 v1lo := builder.AllocateInstruction().AsWiden(v1, from, signed, low).Insert(builder).Return() 3230 v2lo := builder.AllocateInstruction().AsWiden(v2, from, signed, low).Insert(builder).Return() 3231 3232 return builder.AllocateInstruction().AsVImul(v1lo, v2lo, to).Insert(builder).Return() 3233 } 3234 3235 const ( 3236 tableInstanceBaseAddressOffset = 0 3237 tableInstanceLenOffset = tableInstanceBaseAddressOffset + 8 3238 ) 3239 3240 func (c *Compiler) lowerAccessTableWithBoundsCheck(tableIndex uint32, elementOffsetInTable ssa.Value) (elementAddress ssa.Value) { 3241 builder := c.ssaBuilder 3242 3243 // Load the table. 3244 loadTableInstancePtr := builder.AllocateInstruction() 3245 loadTableInstancePtr.AsLoad(c.moduleCtxPtrValue, c.offset.TableOffset(int(tableIndex)).U32(), ssa.TypeI64) 3246 builder.InsertInstruction(loadTableInstancePtr) 3247 tableInstancePtr := loadTableInstancePtr.Return() 3248 3249 // Load the table's length. 3250 loadTableLen := builder.AllocateInstruction() 3251 loadTableLen.AsLoad(tableInstancePtr, tableInstanceLenOffset, ssa.TypeI32) 3252 builder.InsertInstruction(loadTableLen) 3253 tableLen := loadTableLen.Return() 3254 3255 // Compare the length and the target, and trap if out of bounds. 3256 checkOOB := builder.AllocateInstruction() 3257 checkOOB.AsIcmp(elementOffsetInTable, tableLen, ssa.IntegerCmpCondUnsignedGreaterThanOrEqual) 3258 builder.InsertInstruction(checkOOB) 3259 exitIfOOB := builder.AllocateInstruction() 3260 exitIfOOB.AsExitIfTrueWithCode(c.execCtxPtrValue, checkOOB.Return(), wazevoapi.ExitCodeTableOutOfBounds) 3261 builder.InsertInstruction(exitIfOOB) 3262 3263 // Get the base address of wasm.TableInstance.References. 3264 loadTableBaseAddress := builder.AllocateInstruction() 3265 loadTableBaseAddress.AsLoad(tableInstancePtr, tableInstanceBaseAddressOffset, ssa.TypeI64) 3266 builder.InsertInstruction(loadTableBaseAddress) 3267 tableBase := loadTableBaseAddress.Return() 3268 3269 // Calculate the address of the target function. First we need to multiply targetOffsetInTable by 8 (pointer size). 3270 multiplyBy8 := builder.AllocateInstruction() 3271 three := builder.AllocateInstruction() 3272 three.AsIconst64(3) 3273 builder.InsertInstruction(three) 3274 multiplyBy8.AsIshl(elementOffsetInTable, three.Return()) 3275 builder.InsertInstruction(multiplyBy8) 3276 targetOffsetInTableMultipliedBy8 := multiplyBy8.Return() 3277 3278 // Then add the multiplied value to the base which results in the address of the target function (*wazevo.functionInstance) 3279 calcElementAddressInTable := builder.AllocateInstruction() 3280 calcElementAddressInTable.AsIadd(tableBase, targetOffsetInTableMultipliedBy8) 3281 builder.InsertInstruction(calcElementAddressInTable) 3282 return calcElementAddressInTable.Return() 3283 } 3284 3285 func (c *Compiler) lowerCallIndirect(typeIndex, tableIndex uint32) { 3286 builder := c.ssaBuilder 3287 state := c.state() 3288 3289 elementOffsetInTable := state.pop() 3290 functionInstancePtrAddress := c.lowerAccessTableWithBoundsCheck(tableIndex, elementOffsetInTable) 3291 loadFunctionInstancePtr := builder.AllocateInstruction() 3292 loadFunctionInstancePtr.AsLoad(functionInstancePtrAddress, 0, ssa.TypeI64) 3293 builder.InsertInstruction(loadFunctionInstancePtr) 3294 functionInstancePtr := loadFunctionInstancePtr.Return() 3295 3296 // Check if it is not the null pointer. 3297 zero := builder.AllocateInstruction() 3298 zero.AsIconst64(0) 3299 builder.InsertInstruction(zero) 3300 checkNull := builder.AllocateInstruction() 3301 checkNull.AsIcmp(functionInstancePtr, zero.Return(), ssa.IntegerCmpCondEqual) 3302 builder.InsertInstruction(checkNull) 3303 exitIfNull := builder.AllocateInstruction() 3304 exitIfNull.AsExitIfTrueWithCode(c.execCtxPtrValue, checkNull.Return(), wazevoapi.ExitCodeIndirectCallNullPointer) 3305 builder.InsertInstruction(exitIfNull) 3306 3307 // We need to do the type check. First, load the target function instance's typeID. 3308 loadTypeID := builder.AllocateInstruction() 3309 loadTypeID.AsLoad(functionInstancePtr, wazevoapi.FunctionInstanceTypeIDOffset, ssa.TypeI32) 3310 builder.InsertInstruction(loadTypeID) 3311 actualTypeID := loadTypeID.Return() 3312 3313 // Next, we load the expected TypeID: 3314 loadTypeIDsBegin := builder.AllocateInstruction() 3315 loadTypeIDsBegin.AsLoad(c.moduleCtxPtrValue, c.offset.TypeIDs1stElement.U32(), ssa.TypeI64) 3316 builder.InsertInstruction(loadTypeIDsBegin) 3317 typeIDsBegin := loadTypeIDsBegin.Return() 3318 3319 loadExpectedTypeID := builder.AllocateInstruction() 3320 loadExpectedTypeID.AsLoad(typeIDsBegin, uint32(typeIndex)*4 /* size of wasm.FunctionTypeID */, ssa.TypeI32) 3321 builder.InsertInstruction(loadExpectedTypeID) 3322 expectedTypeID := loadExpectedTypeID.Return() 3323 3324 // Check if the type ID matches. 3325 checkTypeID := builder.AllocateInstruction() 3326 checkTypeID.AsIcmp(actualTypeID, expectedTypeID, ssa.IntegerCmpCondNotEqual) 3327 builder.InsertInstruction(checkTypeID) 3328 exitIfNotMatch := builder.AllocateInstruction() 3329 exitIfNotMatch.AsExitIfTrueWithCode(c.execCtxPtrValue, checkTypeID.Return(), wazevoapi.ExitCodeIndirectCallTypeMismatch) 3330 builder.InsertInstruction(exitIfNotMatch) 3331 3332 // Now ready to call the function. Load the executable and moduleContextOpaquePtr from the function instance. 3333 loadExecutablePtr := builder.AllocateInstruction() 3334 loadExecutablePtr.AsLoad(functionInstancePtr, wazevoapi.FunctionInstanceExecutableOffset, ssa.TypeI64) 3335 builder.InsertInstruction(loadExecutablePtr) 3336 executablePtr := loadExecutablePtr.Return() 3337 loadModuleContextOpaquePtr := builder.AllocateInstruction() 3338 loadModuleContextOpaquePtr.AsLoad(functionInstancePtr, wazevoapi.FunctionInstanceModuleContextOpaquePtrOffset, ssa.TypeI64) 3339 builder.InsertInstruction(loadModuleContextOpaquePtr) 3340 moduleContextOpaquePtr := loadModuleContextOpaquePtr.Return() 3341 3342 // TODO: reuse slice? 3343 typ := &c.m.TypeSection[typeIndex] 3344 argN := len(typ.Params) 3345 args := make([]ssa.Value, argN+2) 3346 args[0] = c.execCtxPtrValue 3347 args[1] = moduleContextOpaquePtr 3348 state.nPopInto(argN, args[2:]) 3349 3350 // Before transfer the control to the callee, we have to store the current module's moduleContextPtr 3351 // into execContext.callerModuleContextPtr in case when the callee is a Go function. 3352 c.storeCallerModuleContext() 3353 3354 call := builder.AllocateInstruction() 3355 call.AsCallIndirect(executablePtr, c.signatures[typ], args) 3356 builder.InsertInstruction(call) 3357 3358 first, rest := call.Returns() 3359 if first.Valid() { 3360 state.push(first) 3361 } 3362 for _, v := range rest { 3363 state.push(v) 3364 } 3365 3366 c.reloadAfterCall() 3367 } 3368 3369 // memOpSetup inserts the bounds check and calculates the address of the memory operation (loads/stores). 3370 func (c *Compiler) memOpSetup(baseAddr ssa.Value, constOffset, operationSizeInBytes uint64) (address ssa.Value) { 3371 builder := c.ssaBuilder 3372 3373 ceil := constOffset + operationSizeInBytes 3374 ceilConst := builder.AllocateInstruction() 3375 ceilConst.AsIconst64(ceil) 3376 builder.InsertInstruction(ceilConst) 3377 3378 // We calculate the offset in 64-bit space. 3379 extBaseAddr := builder.AllocateInstruction() 3380 extBaseAddr.AsUExtend(baseAddr, 32, 64) 3381 builder.InsertInstruction(extBaseAddr) 3382 3383 // Note: memLen is already zero extended to 64-bit space at the load time. 3384 memLen := c.getMemoryLenValue(false) 3385 3386 // baseAddrPlusCeil = baseAddr + ceil 3387 baseAddrPlusCeil := builder.AllocateInstruction() 3388 baseAddrPlusCeil.AsIadd(extBaseAddr.Return(), ceilConst.Return()) 3389 builder.InsertInstruction(baseAddrPlusCeil) 3390 3391 // Check for out of bounds memory access: `memLen >= baseAddrPlusCeil`. 3392 cmp := builder.AllocateInstruction() 3393 cmp.AsIcmp(memLen, baseAddrPlusCeil.Return(), ssa.IntegerCmpCondUnsignedLessThan) 3394 builder.InsertInstruction(cmp) 3395 exitIfNZ := builder.AllocateInstruction() 3396 exitIfNZ.AsExitIfTrueWithCode(c.execCtxPtrValue, cmp.Return(), wazevoapi.ExitCodeMemoryOutOfBounds) 3397 builder.InsertInstruction(exitIfNZ) 3398 3399 // Load the value from memBase + extBaseAddr. 3400 memBase := c.getMemoryBaseValue(false) 3401 addrCalc := builder.AllocateInstruction() 3402 addrCalc.AsIadd(memBase, extBaseAddr.Return()) 3403 builder.InsertInstruction(addrCalc) 3404 return addrCalc.Return() 3405 } 3406 3407 func (c *Compiler) callMemmove(dst, src, size ssa.Value) { 3408 args := []ssa.Value{dst, src, size} // TODO: reuse the slice. 3409 3410 builder := c.ssaBuilder 3411 memmovePtr := builder.AllocateInstruction(). 3412 AsLoad(c.execCtxPtrValue, 3413 wazevoapi.ExecutionContextOffsetMemmoveAddress.U32(), 3414 ssa.TypeI64, 3415 ).Insert(builder).Return() 3416 builder.AllocateInstruction().AsCallIndirect(memmovePtr, &c.memmoveSig, args).Insert(builder) 3417 } 3418 3419 func (c *Compiler) reloadAfterCall() { 3420 // Note that when these are not used in the following instructions, they will be optimized out. 3421 // So in any ways, we define them! 3422 3423 // After calling any function, memory buffer might have changed. So we need to re-defined the variable. 3424 if c.needMemory { 3425 c.reloadMemoryBaseLen() 3426 } 3427 3428 // Also, any mutable Global can change. 3429 for _, index := range c.mutableGlobalVariablesIndexes { 3430 _ = c.getWasmGlobalValue(index, true) 3431 } 3432 } 3433 3434 func (c *Compiler) reloadMemoryBaseLen() { 3435 _ = c.getMemoryBaseValue(true) 3436 _ = c.getMemoryLenValue(true) 3437 } 3438 3439 // globalInstanceValueOffset is the offsetOf .Value field of wasm.GlobalInstance. 3440 const globalInstanceValueOffset = 8 3441 3442 func (c *Compiler) setWasmGlobalValue(index wasm.Index, v ssa.Value) { 3443 variable := c.globalVariables[index] 3444 instanceOffset := c.offset.GlobalInstanceOffset(index) 3445 3446 builder := c.ssaBuilder 3447 loadGlobalInstPtr := builder.AllocateInstruction() 3448 loadGlobalInstPtr.AsLoad(c.moduleCtxPtrValue, uint32(instanceOffset), ssa.TypeI64) 3449 builder.InsertInstruction(loadGlobalInstPtr) 3450 3451 store := builder.AllocateInstruction() 3452 store.AsStore(ssa.OpcodeStore, v, loadGlobalInstPtr.Return(), uint32(globalInstanceValueOffset)) 3453 builder.InsertInstruction(store) 3454 3455 // The value has changed to `v`, so we record it. 3456 builder.DefineVariableInCurrentBB(variable, v) 3457 } 3458 3459 func (c *Compiler) getWasmGlobalValue(index wasm.Index, forceLoad bool) ssa.Value { 3460 variable := c.globalVariables[index] 3461 typ := c.globalVariablesTypes[index] 3462 instanceOffset := c.offset.GlobalInstanceOffset(index) 3463 3464 builder := c.ssaBuilder 3465 if !forceLoad { 3466 if v := builder.FindValueInLinearPath(variable); v.Valid() { 3467 return v 3468 } 3469 } 3470 3471 loadGlobalInstPtr := builder.AllocateInstruction() 3472 loadGlobalInstPtr.AsLoad(c.moduleCtxPtrValue, uint32(instanceOffset), ssa.TypeI64) 3473 builder.InsertInstruction(loadGlobalInstPtr) 3474 3475 load := builder.AllocateInstruction() 3476 load.AsLoad(loadGlobalInstPtr.Return(), uint32(globalInstanceValueOffset), typ) 3477 builder.InsertInstruction(load) 3478 ret := load.Return() 3479 builder.DefineVariableInCurrentBB(variable, ret) 3480 return ret 3481 } 3482 3483 const ( 3484 memoryInstanceBufOffset = 0 3485 memoryInstanceBufSizeOffset = memoryInstanceBufOffset + 8 3486 ) 3487 3488 func (c *Compiler) getMemoryBaseValue(forceReload bool) ssa.Value { 3489 builder := c.ssaBuilder 3490 variable := c.memoryBaseVariable 3491 if !forceReload { 3492 if v := builder.FindValueInLinearPath(variable); v.Valid() { 3493 return v 3494 } 3495 } 3496 3497 var ret ssa.Value 3498 if c.offset.LocalMemoryBegin < 0 { 3499 loadMemInstPtr := builder.AllocateInstruction() 3500 loadMemInstPtr.AsLoad(c.moduleCtxPtrValue, c.offset.ImportedMemoryBegin.U32(), ssa.TypeI64) 3501 builder.InsertInstruction(loadMemInstPtr) 3502 memInstPtr := loadMemInstPtr.Return() 3503 3504 loadBufPtr := builder.AllocateInstruction() 3505 loadBufPtr.AsLoad(memInstPtr, memoryInstanceBufOffset, ssa.TypeI64) 3506 builder.InsertInstruction(loadBufPtr) 3507 ret = loadBufPtr.Return() 3508 } else { 3509 load := builder.AllocateInstruction() 3510 load.AsLoad(c.moduleCtxPtrValue, c.offset.LocalMemoryBase().U32(), ssa.TypeI64) 3511 builder.InsertInstruction(load) 3512 ret = load.Return() 3513 } 3514 3515 builder.DefineVariableInCurrentBB(variable, ret) 3516 return ret 3517 } 3518 3519 func (c *Compiler) getMemoryLenValue(forceReload bool) ssa.Value { 3520 variable := c.memoryLenVariable 3521 builder := c.ssaBuilder 3522 if !forceReload { 3523 if v := builder.FindValueInLinearPath(variable); v.Valid() { 3524 return v 3525 } 3526 } 3527 3528 var ret ssa.Value 3529 if c.offset.LocalMemoryBegin < 0 { 3530 loadMemInstPtr := builder.AllocateInstruction() 3531 loadMemInstPtr.AsLoad(c.moduleCtxPtrValue, c.offset.ImportedMemoryBegin.U32(), ssa.TypeI64) 3532 builder.InsertInstruction(loadMemInstPtr) 3533 memInstPtr := loadMemInstPtr.Return() 3534 3535 loadBufSizePtr := builder.AllocateInstruction() 3536 loadBufSizePtr.AsLoad(memInstPtr, memoryInstanceBufSizeOffset, ssa.TypeI64) 3537 builder.InsertInstruction(loadBufSizePtr) 3538 3539 ret = loadBufSizePtr.Return() 3540 } else { 3541 load := builder.AllocateInstruction() 3542 load.AsExtLoad(ssa.OpcodeUload32, c.moduleCtxPtrValue, c.offset.LocalMemoryLen().U32(), true) 3543 builder.InsertInstruction(load) 3544 ret = load.Return() 3545 } 3546 3547 builder.DefineVariableInCurrentBB(variable, ret) 3548 return ret 3549 } 3550 3551 func (c *Compiler) insertIcmp(cond ssa.IntegerCmpCond) { 3552 state, builder := c.state(), c.ssaBuilder 3553 y, x := state.pop(), state.pop() 3554 cmp := builder.AllocateInstruction() 3555 cmp.AsIcmp(x, y, cond) 3556 builder.InsertInstruction(cmp) 3557 value := cmp.Return() 3558 state.push(value) 3559 } 3560 3561 func (c *Compiler) insertFcmp(cond ssa.FloatCmpCond) { 3562 state, builder := c.state(), c.ssaBuilder 3563 y, x := state.pop(), state.pop() 3564 cmp := builder.AllocateInstruction() 3565 cmp.AsFcmp(x, y, cond) 3566 builder.InsertInstruction(cmp) 3567 value := cmp.Return() 3568 state.push(value) 3569 } 3570 3571 // storeCallerModuleContext stores the current module's moduleContextPtr into execContext.callerModuleContextPtr. 3572 func (c *Compiler) storeCallerModuleContext() { 3573 builder := c.ssaBuilder 3574 execCtx := c.execCtxPtrValue 3575 store := builder.AllocateInstruction() 3576 store.AsStore(ssa.OpcodeStore, 3577 c.moduleCtxPtrValue, execCtx, wazevoapi.ExecutionContextOffsetCallerModuleContextPtr.U32()) 3578 builder.InsertInstruction(store) 3579 } 3580 3581 func (c *Compiler) readI32u() uint32 { 3582 v, n, err := leb128.LoadUint32(c.wasmFunctionBody[c.loweringState.pc+1:]) 3583 if err != nil { 3584 panic(err) // shouldn't be reached since compilation comes after validation. 3585 } 3586 c.loweringState.pc += int(n) 3587 return v 3588 } 3589 3590 func (c *Compiler) readI32s() int32 { 3591 v, n, err := leb128.LoadInt32(c.wasmFunctionBody[c.loweringState.pc+1:]) 3592 if err != nil { 3593 panic(err) // shouldn't be reached since compilation comes after validation. 3594 } 3595 c.loweringState.pc += int(n) 3596 return v 3597 } 3598 3599 func (c *Compiler) readI64s() int64 { 3600 v, n, err := leb128.LoadInt64(c.wasmFunctionBody[c.loweringState.pc+1:]) 3601 if err != nil { 3602 panic(err) // shouldn't be reached since compilation comes after validation. 3603 } 3604 c.loweringState.pc += int(n) 3605 return v 3606 } 3607 3608 func (c *Compiler) readF32() float32 { 3609 v := math.Float32frombits(binary.LittleEndian.Uint32(c.wasmFunctionBody[c.loweringState.pc+1:])) 3610 c.loweringState.pc += 4 3611 return v 3612 } 3613 3614 func (c *Compiler) readF64() float64 { 3615 v := math.Float64frombits(binary.LittleEndian.Uint64(c.wasmFunctionBody[c.loweringState.pc+1:])) 3616 c.loweringState.pc += 8 3617 return v 3618 } 3619 3620 // readBlockType reads the block type from the current position of the bytecode reader. 3621 func (c *Compiler) readBlockType() *wasm.FunctionType { 3622 state := c.state() 3623 3624 c.br.Reset(c.wasmFunctionBody[state.pc+1:]) 3625 bt, num, err := wasm.DecodeBlockType(c.m.TypeSection, c.br, api.CoreFeaturesV2) 3626 if err != nil { 3627 panic(err) // shouldn't be reached since compilation comes after validation. 3628 } 3629 state.pc += int(num) 3630 3631 return bt 3632 } 3633 3634 func (c *Compiler) readMemArg() (align, offset uint32) { 3635 state := c.state() 3636 3637 align, num, err := leb128.LoadUint32(c.wasmFunctionBody[state.pc+1:]) 3638 if err != nil { 3639 panic(fmt.Errorf("read memory align: %v", err)) 3640 } 3641 3642 state.pc += int(num) 3643 offset, num, err = leb128.LoadUint32(c.wasmFunctionBody[state.pc+1:]) 3644 if err != nil { 3645 panic(fmt.Errorf("read memory offset: %v", err)) 3646 } 3647 3648 state.pc += int(num) 3649 return align, offset 3650 } 3651 3652 // insertJumpToBlock inserts a jump instruction to the given block in the current block. 3653 func (c *Compiler) insertJumpToBlock(args []ssa.Value, targetBlk ssa.BasicBlock) { 3654 if targetBlk.ReturnBlock() { 3655 if c.needListener { 3656 c.callListenerAfter() 3657 } 3658 } 3659 3660 builder := c.ssaBuilder 3661 jmp := builder.AllocateInstruction() 3662 jmp.AsJump(args, targetBlk) 3663 builder.InsertInstruction(jmp) 3664 } 3665 3666 func (c *Compiler) insertIntegerExtend(signed bool, from, to byte) { 3667 state := c.state() 3668 builder := c.ssaBuilder 3669 v := state.pop() 3670 extend := builder.AllocateInstruction() 3671 if signed { 3672 extend.AsSExtend(v, from, to) 3673 } else { 3674 extend.AsUExtend(v, from, to) 3675 } 3676 builder.InsertInstruction(extend) 3677 value := extend.Return() 3678 state.push(value) 3679 } 3680 3681 func (c *Compiler) switchTo(originalStackLen int, targetBlk ssa.BasicBlock) { 3682 if targetBlk.Preds() == 0 { 3683 c.loweringState.unreachable = true 3684 } 3685 3686 // Now we should adjust the stack and start translating the continuation block. 3687 c.loweringState.values = c.loweringState.values[:originalStackLen] 3688 3689 c.ssaBuilder.SetCurrentBlock(targetBlk) 3690 3691 // At this point, blocks params consist only of the Wasm-level parameters, 3692 // (since it's added only when we are trying to resolve variable *inside* this block). 3693 for i := 0; i < targetBlk.Params(); i++ { 3694 value := targetBlk.Param(i) 3695 c.loweringState.push(value) 3696 } 3697 } 3698 3699 // cloneValuesList clones the given values list. 3700 func cloneValuesList(in []ssa.Value) (ret []ssa.Value) { 3701 ret = make([]ssa.Value, len(in)) 3702 for i := range ret { 3703 ret[i] = in[i] 3704 } 3705 return 3706 } 3707 3708 // results returns the number of results of the current function. 3709 func (c *Compiler) results() int { 3710 return len(c.wasmFunctionTyp.Results) 3711 } 3712 3713 func (c *Compiler) lowerBrTable(labels []uint32, index ssa.Value) { 3714 state := c.state() 3715 builder := c.ssaBuilder 3716 3717 f := state.ctrlPeekAt(int(labels[0])) 3718 var numArgs int 3719 if f.isLoop() { 3720 numArgs = len(f.blockType.Params) 3721 } else { 3722 numArgs = len(f.blockType.Results) 3723 } 3724 3725 targets := make([]ssa.BasicBlock, len(labels)) 3726 3727 // We need trampoline blocks since depending on the target block structure, we might end up inserting moves before jumps, 3728 // which cannot be done with br_table. Instead, we can do such per-block moves in the trampoline blocks. 3729 // At the linking phase (very end of the backend), we can remove the unnecessary jumps, and therefore no runtime overhead. 3730 currentBlk := builder.CurrentBlock() 3731 for i, l := range labels { 3732 // Args are always on the top of the stack. Note that we should not share the args slice 3733 // among the jump instructions since the args are modified during passes (e.g. redundant phi elimination). 3734 args := c.loweringState.nPeekDup(numArgs) 3735 targetBlk, _ := state.brTargetArgNumFor(l) 3736 trampoline := builder.AllocateBasicBlock() 3737 builder.SetCurrentBlock(trampoline) 3738 c.insertJumpToBlock(args, targetBlk) 3739 targets[i] = trampoline 3740 } 3741 builder.SetCurrentBlock(currentBlk) 3742 3743 // If the target block has no arguments, we can just jump to the target block. 3744 brTable := builder.AllocateInstruction() 3745 brTable.AsBrTable(index, targets) 3746 builder.InsertInstruction(brTable) 3747 3748 for _, trampoline := range targets { 3749 builder.Seal(trampoline) 3750 } 3751 } 3752 3753 func (l *loweringState) brTargetArgNumFor(labelIndex uint32) (targetBlk ssa.BasicBlock, argNum int) { 3754 targetFrame := l.ctrlPeekAt(int(labelIndex)) 3755 if targetFrame.isLoop() { 3756 targetBlk, argNum = targetFrame.blk, len(targetFrame.blockType.Params) 3757 } else { 3758 targetBlk, argNum = targetFrame.followingBlock, len(targetFrame.blockType.Results) 3759 } 3760 return 3761 } 3762 3763 func (c *Compiler) callListenerBefore() { 3764 c.storeCallerModuleContext() 3765 3766 builder := c.ssaBuilder 3767 beforeListeners1stElement := builder.AllocateInstruction(). 3768 AsLoad(c.moduleCtxPtrValue, 3769 c.offset.BeforeListenerTrampolines1stElement.U32(), 3770 ssa.TypeI64, 3771 ).Insert(builder).Return() 3772 3773 beforeListenerPtr := builder.AllocateInstruction(). 3774 AsLoad(beforeListeners1stElement, uint32(c.wasmFunctionTypeIndex)*8 /* 8 bytes per index */, ssa.TypeI64).Insert(builder).Return() 3775 3776 entry := builder.EntryBlock() 3777 ps := entry.Params() 3778 // TODO: reuse! 3779 args := make([]ssa.Value, ps) 3780 args[0] = c.execCtxPtrValue 3781 args[1] = builder.AllocateInstruction().AsIconst32(c.wasmLocalFunctionIndex).Insert(builder).Return() 3782 for i := 2; i < ps; i++ { 3783 args[i] = entry.Param(i) 3784 } 3785 3786 beforeSig := c.listenerSignatures[c.wasmFunctionTyp][0] 3787 builder.AllocateInstruction(). 3788 AsCallIndirect(beforeListenerPtr, beforeSig, args). 3789 Insert(builder) 3790 } 3791 3792 func (c *Compiler) callListenerAfter() { 3793 c.storeCallerModuleContext() 3794 3795 builder := c.ssaBuilder 3796 afterListeners1stElement := builder.AllocateInstruction(). 3797 AsLoad(c.moduleCtxPtrValue, 3798 c.offset.AfterListenerTrampolines1stElement.U32(), 3799 ssa.TypeI64, 3800 ).Insert(builder).Return() 3801 3802 afterListenerPtr := builder.AllocateInstruction(). 3803 AsLoad(afterListeners1stElement, 3804 uint32(c.wasmFunctionTypeIndex)*8 /* 8 bytes per index */, ssa.TypeI64). 3805 Insert(builder). 3806 Return() 3807 3808 afterSig := c.listenerSignatures[c.wasmFunctionTyp][1] 3809 results := c.loweringState.nPeekDup(c.results()) 3810 3811 // TODO: reuse! 3812 args := make([]ssa.Value, len(results)+2) 3813 args[0] = c.execCtxPtrValue 3814 args[1] = builder.AllocateInstruction().AsIconst32(c.wasmLocalFunctionIndex).Insert(builder).Return() 3815 for i, r := range results { 3816 args[i+2] = r 3817 } 3818 builder.AllocateInstruction(). 3819 AsCallIndirect(afterListenerPtr, afterSig, args). 3820 Insert(builder) 3821 } 3822 3823 const ( 3824 elementOrDataInstanceLenOffset = 8 3825 elementOrDataInstanceSize = 24 3826 ) 3827 3828 // dropInstance inserts instructions to drop the element/data instance specified by the given index. 3829 func (c *Compiler) dropDataOrElementInstance(index uint32, firstItemOffset wazevoapi.Offset) { 3830 builder := c.ssaBuilder 3831 instPtr := c.dataOrElementInstanceAddr(index, firstItemOffset) 3832 3833 zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return() 3834 3835 // Clear the instance. 3836 builder.AllocateInstruction().AsStore(ssa.OpcodeStore, zero, instPtr, 0).Insert(builder) 3837 builder.AllocateInstruction().AsStore(ssa.OpcodeStore, zero, instPtr, elementOrDataInstanceLenOffset).Insert(builder) 3838 builder.AllocateInstruction().AsStore(ssa.OpcodeStore, zero, instPtr, elementOrDataInstanceLenOffset+8).Insert(builder) 3839 } 3840 3841 func (c *Compiler) dataOrElementInstanceAddr(index uint32, firstItemOffset wazevoapi.Offset) ssa.Value { 3842 builder := c.ssaBuilder 3843 3844 _1stItemPtr := builder. 3845 AllocateInstruction(). 3846 AsLoad(c.moduleCtxPtrValue, firstItemOffset.U32(), ssa.TypeI64). 3847 Insert(builder).Return() 3848 3849 // Each data/element instance is a slice, so we need to multiply index by 16 to get the offset of the target instance. 3850 index = index * elementOrDataInstanceSize 3851 indexExt := builder.AllocateInstruction().AsIconst64(uint64(index)).Insert(builder).Return() 3852 // Then, add the offset to the address of the instance. 3853 instPtr := builder.AllocateInstruction().AsIadd(_1stItemPtr, indexExt).Insert(builder).Return() 3854 return instPtr 3855 } 3856 3857 func (c *Compiler) boundsCheckInDataOrElementInstance(instPtr, offsetInInstance, copySize ssa.Value, exitCode wazevoapi.ExitCode) { 3858 builder := c.ssaBuilder 3859 dataInstLen := builder.AllocateInstruction(). 3860 AsLoad(instPtr, elementOrDataInstanceLenOffset, ssa.TypeI64). 3861 Insert(builder).Return() 3862 ceil := builder.AllocateInstruction().AsIadd(offsetInInstance, copySize).Insert(builder).Return() 3863 cmp := builder.AllocateInstruction(). 3864 AsIcmp(dataInstLen, ceil, ssa.IntegerCmpCondUnsignedLessThan). 3865 Insert(builder). 3866 Return() 3867 builder.AllocateInstruction(). 3868 AsExitIfTrueWithCode(c.execCtxPtrValue, cmp, exitCode). 3869 Insert(builder) 3870 } 3871 3872 func (c *Compiler) boundsCheckInTable(tableIndex uint32, offset, size ssa.Value) (tableInstancePtr ssa.Value) { 3873 builder := c.ssaBuilder 3874 dstCeil := builder.AllocateInstruction().AsIadd(offset, size).Insert(builder).Return() 3875 3876 // Load the table. 3877 tableInstancePtr = builder.AllocateInstruction(). 3878 AsLoad(c.moduleCtxPtrValue, c.offset.TableOffset(int(tableIndex)).U32(), ssa.TypeI64). 3879 Insert(builder).Return() 3880 3881 // Load the table's length. 3882 tableLen := builder.AllocateInstruction(). 3883 AsLoad(tableInstancePtr, tableInstanceLenOffset, ssa.TypeI32).Insert(builder).Return() 3884 tableLenExt := builder.AllocateInstruction().AsUExtend(tableLen, 32, 64).Insert(builder).Return() 3885 3886 // Compare the length and the target, and trap if out of bounds. 3887 checkOOB := builder.AllocateInstruction() 3888 checkOOB.AsIcmp(tableLenExt, dstCeil, ssa.IntegerCmpCondUnsignedLessThan) 3889 builder.InsertInstruction(checkOOB) 3890 exitIfOOB := builder.AllocateInstruction() 3891 exitIfOOB.AsExitIfTrueWithCode(c.execCtxPtrValue, checkOOB.Return(), wazevoapi.ExitCodeTableOutOfBounds) 3892 builder.InsertInstruction(exitIfOOB) 3893 return 3894 } 3895 3896 func (c *Compiler) loadTableBaseAddr(tableInstancePtr ssa.Value) ssa.Value { 3897 builder := c.ssaBuilder 3898 loadTableBaseAddress := builder. 3899 AllocateInstruction(). 3900 AsLoad(tableInstancePtr, tableInstanceBaseAddressOffset, ssa.TypeI64). 3901 Insert(builder) 3902 return loadTableBaseAddress.Return() 3903 } 3904 3905 func (c *Compiler) boundsCheckInMemory(memLen, offset, size ssa.Value) { 3906 builder := c.ssaBuilder 3907 ceil := builder.AllocateInstruction().AsIadd(offset, size).Insert(builder).Return() 3908 cmp := builder.AllocateInstruction(). 3909 AsIcmp(memLen, ceil, ssa.IntegerCmpCondUnsignedLessThan). 3910 Insert(builder). 3911 Return() 3912 builder.AllocateInstruction(). 3913 AsExitIfTrueWithCode(c.execCtxPtrValue, cmp, wazevoapi.ExitCodeMemoryOutOfBounds). 3914 Insert(builder) 3915 }