github.com/wasilibs/wazerox@v0.0.0-20240124024944-4923be63ab5f/internal/engine/compiler/impl_arm64_test.go (about)

     1  package compiler
     2  
     3  import (
     4  	"testing"
     5  	"unsafe"
     6  
     7  	"github.com/wasilibs/wazerox/internal/asm"
     8  	arm64 "github.com/wasilibs/wazerox/internal/asm/arm64"
     9  	"github.com/wasilibs/wazerox/internal/testing/require"
    10  	"github.com/wasilibs/wazerox/internal/wasm"
    11  	"github.com/wasilibs/wazerox/internal/wazeroir"
    12  )
    13  
    14  // TestArm64Compiler_indirectCallWithTargetOnCallingConvReg is the regression test for #526.
    15  // In short, the offset register for call_indirect might be the same as arm64CallingConventionModuleInstanceAddressRegister
    16  // and that must not be a failure.
    17  func TestArm64Compiler_indirectCallWithTargetOnCallingConvReg(t *testing.T) {
    18  	env := newCompilerEnvironment()
    19  	table := make([]wasm.Reference, 1)
    20  	env.addTable(&wasm.TableInstance{References: table})
    21  	// Ensure that the module instance has the type information for targetOperation.TypeIndex,
    22  	// and the typeID  matches the table[targetOffset]'s type ID.
    23  	operation := operationPtr(wazeroir.NewOperationCallIndirect(0, 0))
    24  	env.module().TypeIDs = []wasm.FunctionTypeID{0}
    25  	env.module().Engine = &moduleEngine{functions: []function{}}
    26  
    27  	me := env.moduleEngine()
    28  	{ // Compiling call target.
    29  		compiler := env.requireNewCompiler(t, &wasm.FunctionType{}, newCompiler, nil)
    30  		err := compiler.compilePreamble()
    31  		require.NoError(t, err)
    32  		err = compiler.compileReturnFunction()
    33  		require.NoError(t, err)
    34  
    35  		code := asm.CodeSegment{}
    36  		defer func() { require.NoError(t, code.Unmap()) }()
    37  
    38  		_, err = compiler.compile(code.NextCodeSection())
    39  		require.NoError(t, err)
    40  
    41  		executable := code.Bytes()
    42  		makeExecutable(executable)
    43  
    44  		f := function{
    45  			parent:             &compiledFunction{parent: &compiledCode{executable: code}},
    46  			codeInitialAddress: code.Addr(),
    47  			moduleInstance:     env.moduleInstance,
    48  		}
    49  		me.functions = append(me.functions, f)
    50  		table[0] = uintptr(unsafe.Pointer(&f))
    51  	}
    52  
    53  	compiler := env.requireNewCompiler(t, &wasm.FunctionType{}, newCompiler, &wazeroir.CompilationResult{
    54  		Types:    []wasm.FunctionType{{}},
    55  		HasTable: true,
    56  	}).(*arm64Compiler)
    57  	err := compiler.compilePreamble()
    58  	require.NoError(t, err)
    59  
    60  	// Place the offset into the calling-convention reserved register.
    61  	offsetLoc := compiler.pushRuntimeValueLocationOnRegister(arm64CallingConventionModuleInstanceAddressRegister,
    62  		runtimeValueTypeI32)
    63  	compiler.assembler.CompileConstToRegister(arm64.MOVD, 0, offsetLoc.register)
    64  
    65  	require.NoError(t, compiler.compileCallIndirect(operation))
    66  
    67  	err = compiler.compileReturnFunction()
    68  	require.NoError(t, err)
    69  
    70  	code := asm.CodeSegment{}
    71  	defer func() { require.NoError(t, code.Unmap()) }()
    72  
    73  	// Generate the code under test and run.
    74  	_, err = compiler.compile(code.NextCodeSection())
    75  	require.NoError(t, err)
    76  	env.exec(code.Bytes())
    77  }
    78  
    79  func TestArm64Compiler_readInstructionAddress(t *testing.T) {
    80  	env := newCompilerEnvironment()
    81  	compiler := env.requireNewCompiler(t, &wasm.FunctionType{}, newArm64Compiler, nil).(*arm64Compiler)
    82  
    83  	err := compiler.compilePreamble()
    84  	require.NoError(t, err)
    85  
    86  	// Set the acquisition target instruction to the one after RET,
    87  	// and read the absolute address into destinationRegister.
    88  	const addressReg = arm64ReservedRegisterForTemporary
    89  	compiler.assembler.CompileReadInstructionAddress(addressReg, arm64.RET)
    90  
    91  	// Branch to the instruction after RET below via the absolute
    92  	// address stored in destinationRegister.
    93  	compiler.assembler.CompileJumpToRegister(arm64.B, addressReg)
    94  
    95  	// If we fail to branch, we reach here and exit with unreachable status,
    96  	// so the assertion would fail.
    97  	compiler.compileExitFromNativeCode(nativeCallStatusCodeUnreachable)
    98  
    99  	// This could be the read instruction target as this is the
   100  	// right after RET. Therefore, the branch instruction above
   101  	// must target here.
   102  	err = compiler.compileReturnFunction()
   103  	require.NoError(t, err)
   104  
   105  	code := asm.CodeSegment{}
   106  	defer func() { require.NoError(t, code.Unmap()) }()
   107  
   108  	_, err = compiler.compile(code.NextCodeSection())
   109  	require.NoError(t, err)
   110  	env.exec(code.Bytes())
   111  
   112  	require.Equal(t, nativeCallStatusCodeReturned, env.compilerStatus())
   113  }
   114  
   115  func TestArm64Compiler_label(t *testing.T) {
   116  	c := &arm64Compiler{}
   117  	c.label(wazeroir.NewLabel(wazeroir.LabelKindContinuation, 100))
   118  	require.Equal(t, 100, c.frameIDMax)
   119  	require.Equal(t, 101, len(c.labels[wazeroir.LabelKindContinuation]))
   120  
   121  	// frameIDMax is for all LabelKind, so this shouldn't change frameIDMax.
   122  	c.label(wazeroir.NewLabel(wazeroir.LabelKindHeader, 2))
   123  	require.Equal(t, 100, c.frameIDMax)
   124  	require.Equal(t, 3, len(c.labels[wazeroir.LabelKindHeader]))
   125  }
   126  
   127  func TestArm64Compiler_Init(t *testing.T) {
   128  	c := &arm64Compiler{
   129  		locationStackForEntrypoint: newRuntimeValueLocationStack(),
   130  		assembler:                  arm64.NewAssembler(0),
   131  	}
   132  	const stackCap = 12345
   133  	c.locationStackForEntrypoint.stack = make([]runtimeValueLocation, stackCap)
   134  	c.locationStackForEntrypoint.sp = 5555
   135  
   136  	c.Init(&wasm.FunctionType{}, nil, false)
   137  
   138  	// locationStack is the pointer to locationStackForEntrypoint after init.
   139  	require.Equal(t, c.locationStack, &c.locationStackForEntrypoint)
   140  	// And the underlying stack must be reused (the capacity preserved).
   141  	require.Equal(t, stackCap, cap(c.locationStack.stack))
   142  	require.Equal(t, stackCap, cap(c.locationStackForEntrypoint.stack))
   143  }
   144  
   145  func TestArm64Compiler_resetLabels(t *testing.T) {
   146  	c := newArm64Compiler().(*arm64Compiler)
   147  	nop := c.compileNOP()
   148  
   149  	const (
   150  		frameIDMax = 50
   151  		capacity   = 12345
   152  	)
   153  	c.frameIDMax = frameIDMax
   154  	for i := range c.labels {
   155  		ifs := make([]arm64LabelInfo, frameIDMax*2)
   156  		c.labels[i] = ifs
   157  		for j := 0; j <= frameIDMax; j++ {
   158  			ifs[j].stackInitialized = true
   159  			ifs[j].initialInstruction = nop
   160  			ifs[j].initialStack = newRuntimeValueLocationStack()
   161  			ifs[j].initialStack.sp = 5555 // should be cleared via runtimeLocationStack.Reset().
   162  			ifs[j].initialStack.stack = make([]runtimeValueLocation, 0, capacity)
   163  		}
   164  	}
   165  	c.resetLabels()
   166  	for i := range c.labels {
   167  		for j := 0; j < len(c.labels[i]); j++ {
   168  			l := &c.labels[i][j]
   169  			require.False(t, l.stackInitialized)
   170  			require.Nil(t, l.initialInstruction)
   171  			require.Equal(t, 0, len(l.initialStack.stack))
   172  			if j > frameIDMax {
   173  				require.Equal(t, 0, cap(l.initialStack.stack))
   174  			} else {
   175  				require.Equal(t, capacity, cap(l.initialStack.stack))
   176  			}
   177  			require.Equal(t, uint64(0), l.initialStack.sp)
   178  		}
   179  	}
   180  }
   181  
   182  func TestArm64Compiler_getSavedTemporaryLocationStack(t *testing.T) {
   183  	t.Run("len(brTableTmp)<len(current)", func(t *testing.T) {
   184  		st := newRuntimeValueLocationStack()
   185  		c := &arm64Compiler{locationStack: &st}
   186  
   187  		c.locationStack.sp = 3
   188  		c.locationStack.stack = []runtimeValueLocation{{stackPointer: 150}, {stackPointer: 200}, {stackPointer: 300}}
   189  
   190  		actual := c.getSavedTemporaryLocationStack()
   191  		require.Equal(t, uint64(3), actual.sp)
   192  		require.Equal(t, 3, len(actual.stack))
   193  		require.Equal(t, c.locationStack.stack[:3], actual.stack)
   194  	})
   195  	t.Run("len(brTableTmp)==len(current)", func(t *testing.T) {
   196  		st := newRuntimeValueLocationStack()
   197  		c := &arm64Compiler{locationStack: &st, brTableTmp: make([]runtimeValueLocation, 3)}
   198  		initSlicePtr := &c.brTableTmp
   199  
   200  		c.locationStack.sp = 3
   201  		c.locationStack.stack = []runtimeValueLocation{{stackPointer: 150}, {stackPointer: 200}, {stackPointer: 300}}
   202  
   203  		actual := c.getSavedTemporaryLocationStack()
   204  		require.Equal(t, uint64(3), actual.sp)
   205  		require.Equal(t, 3, len(actual.stack))
   206  		require.Equal(t, c.locationStack.stack[:3], actual.stack)
   207  		// The underlying temporary slice shouldn't be changed.
   208  		require.Equal(t, initSlicePtr, &c.brTableTmp)
   209  	})
   210  
   211  	t.Run("len(brTableTmp)>len(current)", func(t *testing.T) {
   212  		const temporarySliceSize = 100
   213  		st := newRuntimeValueLocationStack()
   214  		c := &arm64Compiler{locationStack: &st, brTableTmp: make([]runtimeValueLocation, temporarySliceSize)}
   215  
   216  		c.locationStack.sp = 3
   217  		c.locationStack.stack = []runtimeValueLocation{
   218  			{stackPointer: 150},
   219  			{stackPointer: 200},
   220  			{stackPointer: 300},
   221  			{},
   222  			{},
   223  			{},
   224  			{},
   225  			{stackPointer: 1231455}, // Entries here shouldn't be copied as they are avobe sp.
   226  		}
   227  
   228  		actual := c.getSavedTemporaryLocationStack()
   229  		require.Equal(t, uint64(3), actual.sp)
   230  		require.Equal(t, temporarySliceSize, len(actual.stack))
   231  		require.Equal(t, c.locationStack.stack[:3], actual.stack[:3])
   232  		for i := int(actual.sp); i < len(actual.stack); i++ {
   233  			// Above the stack pointer, the values must not be copied.
   234  			require.Zero(t, actual.stack[i].stackPointer)
   235  		}
   236  	})
   237  }
   238  
   239  // https://github.com/tetratelabs/wazero/issues/1522
   240  func TestArm64Compiler_LargeTrapOffsets(t *testing.T) {
   241  	env := newCompilerEnvironment()
   242  	compiler := env.requireNewCompiler(t, &wasm.FunctionType{}, newCompiler, &wazeroir.CompilationResult{
   243  		Types: []wasm.FunctionType{{}},
   244  	})
   245  	err := compiler.compilePreamble()
   246  	require.NoError(t, err)
   247  
   248  	one := operationPtr(wazeroir.NewOperationConstI32(uint32(1)))
   249  	five := operationPtr(wazeroir.NewOperationConstI32(uint32(5)))
   250  	div := operationPtr(wazeroir.NewOperationDiv(wazeroir.SignedTypeInt32))
   251  
   252  	// Place the offset value.
   253  	err = compiler.compileConstI32(one)
   254  	require.NoError(t, err)
   255  
   256  	// Repeat enough times that jump labels are not within (-524288, 524287).
   257  	// Relative offset -2097164/4(=-524291).
   258  	// At the time of writing, 52429 is empirically the value that starts
   259  	// triggering the bug on arm64. We impose an arbitrarily higher value
   260  	// to account for possible future improvement to the number of instructions
   261  	// we emit.
   262  	for i := 0; i < 80_000; i++ {
   263  		err = compiler.compileConstI32(five)
   264  		require.NoError(t, err)
   265  
   266  		err = compiler.compileDiv(div)
   267  		require.NoError(t, err)
   268  	}
   269  
   270  	err = compiler.compileReturnFunction()
   271  	require.NoError(t, err)
   272  
   273  	code := asm.CodeSegment{}
   274  	defer func() { require.NoError(t, code.Unmap()) }()
   275  
   276  	// Generate the code under test and run.
   277  	_, err = compiler.compile(code.NextCodeSection())
   278  	require.NoError(t, err)
   279  
   280  	env.exec(code.Bytes())
   281  
   282  	require.Equal(t, nativeCallStatusCodeReturned.String(), env.compilerStatus().String())
   283  }
   284  
   285  // compile implements compilerImpl.setStackPointerCeil for the amd64 architecture.
   286  func (c *arm64Compiler) setStackPointerCeil(v uint64) {
   287  	c.stackPointerCeil = v
   288  }
   289  
   290  // compile implements compilerImpl.setRuntimeValueLocationStack for the amd64 architecture.
   291  func (c *arm64Compiler) setRuntimeValueLocationStack(s *runtimeValueLocationStack) {
   292  	c.locationStack = s
   293  }