wa-lang.org/wazero@v1.0.2/internal/engine/compiler/impl_arm64_test.go (about) 1 package compiler 2 3 import ( 4 "testing" 5 "unsafe" 6 7 arm64 "wa-lang.org/wazero/internal/asm/arm64" 8 "wa-lang.org/wazero/internal/testing/require" 9 "wa-lang.org/wazero/internal/wasm" 10 "wa-lang.org/wazero/internal/wazeroir" 11 ) 12 13 // TestArm64Compiler_indirectCallWithTargetOnCallingConvReg is the regression test for #526. 14 // In short, the offset register for call_indirect might be the same as arm64CallingConventionModuleInstanceAddressRegister 15 // and that must not be a failure. 16 func TestArm64Compiler_indirectCallWithTargetOnCallingConvReg(t *testing.T) { 17 env := newCompilerEnvironment() 18 table := make([]wasm.Reference, 1) 19 env.addTable(&wasm.TableInstance{References: table}) 20 // Ensure that the module instance has the type information for targetOperation.TypeIndex, 21 // and the typeID matches the table[targetOffset]'s type ID. 22 operation := &wazeroir.OperationCallIndirect{TypeIndex: 0} 23 env.module().TypeIDs = []wasm.FunctionTypeID{0} 24 env.module().Engine = &moduleEngine{functions: []*function{}} 25 26 me := env.moduleEngine() 27 { // Compiling call target. 28 compiler := env.requireNewCompiler(t, newCompiler, nil) 29 err := compiler.compilePreamble() 30 require.NoError(t, err) 31 err = compiler.compileReturnFunction() 32 require.NoError(t, err) 33 34 c, _, err := compiler.compile() 35 require.NoError(t, err) 36 37 f := &function{ 38 parent: &code{codeSegment: c}, 39 codeInitialAddress: uintptr(unsafe.Pointer(&c[0])), 40 moduleInstanceAddress: uintptr(unsafe.Pointer(env.moduleInstance)), 41 source: &wasm.FunctionInstance{TypeID: 0}, 42 } 43 me.functions = append(me.functions, f) 44 table[0] = uintptr(unsafe.Pointer(f)) 45 } 46 47 compiler := env.requireNewCompiler(t, newCompiler, &wazeroir.CompilationResult{ 48 Signature: &wasm.FunctionType{}, 49 Types: []*wasm.FunctionType{{}}, 50 HasTable: true, 51 }).(*arm64Compiler) 52 err := compiler.compilePreamble() 53 require.NoError(t, err) 54 55 // Place the offset into the calling-convention reserved register. 56 offsetLoc := compiler.pushRuntimeValueLocationOnRegister(arm64CallingConventionModuleInstanceAddressRegister, 57 runtimeValueTypeI32) 58 compiler.assembler.CompileConstToRegister(arm64.MOVD, 0, offsetLoc.register) 59 60 require.NoError(t, compiler.compileCallIndirect(operation)) 61 62 err = compiler.compileReturnFunction() 63 require.NoError(t, err) 64 65 // Generate the code under test and run. 66 code, _, err := compiler.compile() 67 require.NoError(t, err) 68 env.exec(code) 69 } 70 71 func TestArm64Compiler_readInstructionAddress(t *testing.T) { 72 env := newCompilerEnvironment() 73 compiler := env.requireNewCompiler(t, newArm64Compiler, nil).(*arm64Compiler) 74 75 err := compiler.compilePreamble() 76 require.NoError(t, err) 77 78 // Set the acquisition target instruction to the one after RET, 79 // and read the absolute address into destinationRegister. 80 const addressReg = arm64ReservedRegisterForTemporary 81 compiler.assembler.CompileReadInstructionAddress(addressReg, arm64.RET) 82 83 // Branch to the instruction after RET below via the absolute 84 // address stored in destinationRegister. 85 compiler.assembler.CompileJumpToRegister(arm64.B, addressReg) 86 87 // If we fail to branch, we reach here and exit with unreachable status, 88 // so the assertion would fail. 89 compiler.compileExitFromNativeCode(nativeCallStatusCodeUnreachable) 90 91 // This could be the read instruction target as this is the 92 // right after RET. Therefore, the branch instruction above 93 // must target here. 94 err = compiler.compileReturnFunction() 95 require.NoError(t, err) 96 97 code, _, err := compiler.compile() 98 require.NoError(t, err) 99 100 env.exec(code) 101 102 require.Equal(t, nativeCallStatusCodeReturned, env.compilerStatus()) 103 } 104 105 // compile implements compilerImpl.getOnStackPointerCeilDeterminedCallBack for the amd64 architecture. 106 func (c *arm64Compiler) getOnStackPointerCeilDeterminedCallBack() func(uint64) { 107 return c.onStackPointerCeilDeterminedCallBack 108 } 109 110 // compile implements compilerImpl.setStackPointerCeil for the amd64 architecture. 111 func (c *arm64Compiler) setStackPointerCeil(v uint64) { 112 c.stackPointerCeil = v 113 } 114 115 // compile implements compilerImpl.setRuntimeValueLocationStack for the amd64 architecture. 116 func (c *arm64Compiler) setRuntimeValueLocationStack(s *runtimeValueLocationStack) { 117 c.locationStack = s 118 }