github.com/goshafaq/sonic@v0.0.0-20231026082336-871835fb94c6/internal/abi/abi_regabi_amd64.go (about)

     1  //go:build go1.17
     2  // +build go1.17
     3  
     4  /*
     5   * Copyright 2022 ByteDance Inc.
     6   *
     7   * Licensed under the Apache License, Version 2.0 (the "License");
     8   * you may not use this file except in compliance with the License.
     9   * You may obtain a copy of the License at
    10   *
    11   *     http://www.apache.org/licenses/LICENSE-2.0
    12   *
    13   * Unless required by applicable law or agreed to in writing, software
    14   * distributed under the License is distributed on an "AS IS" BASIS,
    15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    16   * See the License for the specific language governing permissions and
    17   * limitations under the License.
    18   */
    19  
    20  /** Go Internal ABI implementation
    21   *
    22   *  This module implements the function layout algorithm described by the Go internal ABI.
    23   *  See https://github.com/golang/go/blob/master/src/cmd/compile/abi-internal.md for more info.
    24   */
    25  
    26  package abi
    27  
    28  import (
    29  	"fmt"
    30  	"reflect"
    31  
    32  	. "github.com/chenzhuoyu/iasm/x86_64"
    33  )
    34  
    35  /** Frame Structure of the Generated Function
    36      FP  +------------------------------+
    37          |             . . .            |
    38          | 2nd reg argument spill space |
    39          + 1st reg argument spill space |
    40          | <pointer-sized alignment>    |
    41          |             . . .            |
    42          | 2nd stack-assigned result    |
    43          + 1st stack-assigned result    |
    44          | <pointer-sized alignment>    |
    45          |             . . .            |
    46          | 2nd stack-assigned argument  |
    47          | 1st stack-assigned argument  |
    48          | stack-assigned receiver      |
    49  prev()  +------------------------------+ (Previous Frame)
    50                  Return PC              |
    51  size()  -------------------------------|
    52                 Saved RBP               |
    53  offs()  -------------------------------|
    54             1th Reserved Registers      |
    55          -------------------------------|
    56             2th Reserved Registers      |
    57          -------------------------------|
    58             Local Variables             |
    59      RSP -------------------------------|↓ lower addresses
    60  */
    61  
    62  const zeroRegGo = XMM15
    63  
    64  var iregOrderGo = [...]Register64{
    65  	RAX, // RDI
    66  	RBX, // RSI
    67  	RCX, // RDX
    68  	RDI, // RCX
    69  	RSI, // R8
    70  	R8,  // R9
    71  	R9,
    72  	R10,
    73  	R11,
    74  }
    75  
    76  var xregOrderGo = [...]XMMRegister{
    77  	XMM0,
    78  	XMM1,
    79  	XMM2,
    80  	XMM3,
    81  	XMM4,
    82  	XMM5,
    83  	XMM6,
    84  	XMM7,
    85  	XMM8,
    86  	XMM9,
    87  	XMM10,
    88  	XMM11,
    89  	XMM12,
    90  	XMM13,
    91  	XMM14,
    92  }
    93  
    94  func ReservedRegs(callc bool) []Register {
    95  	if callc {
    96  		return nil
    97  	}
    98  	return []Register{
    99  		R14, // current goroutine
   100  		R15, // GOT reference
   101  	}
   102  }
   103  
   104  type stackAlloc struct {
   105  	s uint32
   106  	i int
   107  	x int
   108  }
   109  
   110  func (self *stackAlloc) reset() {
   111  	self.i, self.x = 0, 0
   112  }
   113  
   114  func (self *stackAlloc) ireg(vt reflect.Type) (p Parameter) {
   115  	p = mkIReg(vt, iregOrderGo[self.i])
   116  	self.i++
   117  	return
   118  }
   119  
   120  func (self *stackAlloc) xreg(vt reflect.Type) (p Parameter) {
   121  	p = mkXReg(vt, xregOrderGo[self.x])
   122  	self.x++
   123  	return
   124  }
   125  
   126  func (self *stackAlloc) stack(vt reflect.Type) (p Parameter) {
   127  	p = mkStack(vt, self.s)
   128  	self.s += uint32(vt.Size())
   129  	return
   130  }
   131  
   132  func (self *stackAlloc) spill(n uint32, a int) uint32 {
   133  	self.s = alignUp(self.s, a) + n
   134  	return self.s
   135  }
   136  
   137  func (self *stackAlloc) alloc(p []Parameter, vt reflect.Type) []Parameter {
   138  	nb := vt.Size()
   139  	vk := vt.Kind()
   140  
   141  	/* zero-sized objects are allocated on stack */
   142  	if nb == 0 {
   143  		return append(p, mkStack(intType, self.s))
   144  	}
   145  
   146  	/* check for value type */
   147  	switch vk {
   148  	case reflect.Bool:
   149  		return self.valloc(p, reflect.TypeOf(false))
   150  	case reflect.Int:
   151  		return self.valloc(p, intType)
   152  	case reflect.Int8:
   153  		return self.valloc(p, reflect.TypeOf(int8(0)))
   154  	case reflect.Int16:
   155  		return self.valloc(p, reflect.TypeOf(int16(0)))
   156  	case reflect.Int32:
   157  		return self.valloc(p, reflect.TypeOf(uint32(0)))
   158  	case reflect.Int64:
   159  		return self.valloc(p, reflect.TypeOf(int64(0)))
   160  	case reflect.Uint:
   161  		return self.valloc(p, reflect.TypeOf(uint(0)))
   162  	case reflect.Uint8:
   163  		return self.valloc(p, reflect.TypeOf(uint8(0)))
   164  	case reflect.Uint16:
   165  		return self.valloc(p, reflect.TypeOf(uint16(0)))
   166  	case reflect.Uint32:
   167  		return self.valloc(p, reflect.TypeOf(uint32(0)))
   168  	case reflect.Uint64:
   169  		return self.valloc(p, reflect.TypeOf(uint64(0)))
   170  	case reflect.Uintptr:
   171  		return self.valloc(p, reflect.TypeOf(uintptr(0)))
   172  	case reflect.Float32:
   173  		return self.valloc(p, reflect.TypeOf(float32(0)))
   174  	case reflect.Float64:
   175  		return self.valloc(p, reflect.TypeOf(float64(0)))
   176  	case reflect.Complex64:
   177  		panic("abi: go117: not implemented: complex64")
   178  	case reflect.Complex128:
   179  		panic("abi: go117: not implemented: complex128")
   180  	case reflect.Array:
   181  		panic("abi: go117: not implemented: arrays")
   182  	case reflect.Chan:
   183  		return self.valloc(p, reflect.TypeOf((chan int)(nil)))
   184  	case reflect.Func:
   185  		return self.valloc(p, reflect.TypeOf((func())(nil)))
   186  	case reflect.Map:
   187  		return self.valloc(p, reflect.TypeOf((map[int]int)(nil)))
   188  	case reflect.Ptr:
   189  		return self.valloc(p, reflect.TypeOf((*int)(nil)))
   190  	case reflect.UnsafePointer:
   191  		return self.valloc(p, ptrType)
   192  	case reflect.Interface:
   193  		return self.valloc(p, ptrType, ptrType)
   194  	case reflect.Slice:
   195  		return self.valloc(p, ptrType, intType, intType)
   196  	case reflect.String:
   197  		return self.valloc(p, ptrType, intType)
   198  	case reflect.Struct:
   199  		panic("abi: go117: not implemented: structs")
   200  	default:
   201  		panic("abi: invalid value type")
   202  	}
   203  }
   204  
   205  func (self *stackAlloc) valloc(p []Parameter, vts ...reflect.Type) []Parameter {
   206  	for _, vt := range vts {
   207  		enum := isFloat(vt)
   208  		if enum != notFloatKind && self.x < len(xregOrderGo) {
   209  			p = append(p, self.xreg(vt))
   210  		} else if enum == notFloatKind && self.i < len(iregOrderGo) {
   211  			p = append(p, self.ireg(vt))
   212  		} else {
   213  			p = append(p, self.stack(vt))
   214  		}
   215  	}
   216  	return p
   217  }
   218  
   219  func NewFunctionLayout(ft reflect.Type) FunctionLayout {
   220  	var sa stackAlloc
   221  	var fn FunctionLayout
   222  
   223  	/* assign every arguments */
   224  	for i := 0; i < ft.NumIn(); i++ {
   225  		fn.Args = sa.alloc(fn.Args, ft.In(i))
   226  	}
   227  
   228  	/* reset the register counter, and add a pointer alignment field */
   229  	sa.reset()
   230  
   231  	/* assign every return value */
   232  	for i := 0; i < ft.NumOut(); i++ {
   233  		fn.Rets = sa.alloc(fn.Rets, ft.Out(i))
   234  	}
   235  
   236  	sa.spill(0, PtrAlign)
   237  
   238  	/* assign spill slots */
   239  	for i := 0; i < len(fn.Args); i++ {
   240  		if fn.Args[i].InRegister {
   241  			fn.Args[i].Mem = sa.spill(PtrSize, PtrAlign) - PtrSize
   242  		}
   243  	}
   244  
   245  	/* add the final pointer alignment field */
   246  	fn.FP = sa.spill(0, PtrAlign)
   247  	return fn
   248  }
   249  
   250  func (self *Frame) emitExchangeArgs(p *Program) {
   251  	iregArgs := make([]Parameter, 0, len(self.desc.Args))
   252  	xregArgs := 0
   253  	for _, v := range self.desc.Args {
   254  		if v.InRegister {
   255  			if v.IsFloat != notFloatKind {
   256  				xregArgs += 1
   257  			} else {
   258  				iregArgs = append(iregArgs, v)
   259  			}
   260  		} else {
   261  			panic("not support stack-assgined arguments now")
   262  		}
   263  	}
   264  	if xregArgs > len(xregOrderC) {
   265  		panic("too many arguments, only support at most 8 integer register arguments now")
   266  	}
   267  
   268  	switch len(iregArgs) {
   269  	case 0, 1, 2, 3:
   270  		{
   271  			//Fast-Path: when arguments count are less than four, just exchange the registers
   272  			for i := 0; i < len(iregArgs); i++ {
   273  				p.MOVQ(iregOrderGo[i], iregOrderC[i])
   274  			}
   275  		}
   276  	case 4, 5, 6:
   277  		{
   278  			// need to spill 3th ~ regArgs registers before exchange
   279  			for i := 3; i < len(iregArgs); i++ {
   280  				arg := iregArgs[i]
   281  				// pointer args have already been spilled
   282  				if !arg.IsPointer {
   283  					p.MOVQ(iregOrderGo[i], Ptr(RSP, int32(self.Prev()+arg.Mem)))
   284  				}
   285  			}
   286  			p.MOVQ(iregOrderGo[0], iregOrderC[0])
   287  			p.MOVQ(iregOrderGo[1], iregOrderC[1])
   288  			p.MOVQ(iregOrderGo[2], iregOrderC[2])
   289  			for i := 3; i < len(iregArgs); i++ {
   290  				arg := iregArgs[i]
   291  				p.MOVQ(Ptr(RSP, int32(self.Prev()+arg.Mem)), iregOrderC[i])
   292  			}
   293  		}
   294  	default:
   295  		panic("too many arguments, only support at most 6 integer register arguments now")
   296  	}
   297  }
   298  
   299  func (self *Frame) emitStackCheck(p *Program, to *Label, maxStack uintptr) {
   300  	p.LEAQ(Ptr(RSP, int32(-(self.Size()+uint32(maxStack)))), R12)
   301  	p.CMPQ(Ptr(R14, _G_stackguard0), R12)
   302  	p.JBE(to)
   303  }
   304  
   305  func (self *Frame) StackCheckTextSize() uint32 {
   306  	p := DefaultArch.CreateProgram()
   307  	p.LEAQ(Ptr(RSP, int32(-(self.Size()))), R12)
   308  	p.CMPQ(Ptr(R14, _G_stackguard0), R12)
   309  	to := CreateLabel("")
   310  	p.Link(to)
   311  	p.JBE(to)
   312  	return uint32(len(p.Assemble(0)))
   313  }
   314  
   315  func (self *Frame) emitExchangeRets(p *Program) {
   316  	if len(self.desc.Rets) > 1 {
   317  		panic("too many results, only support one result now")
   318  	}
   319  	// store result
   320  	if len(self.desc.Rets) == 1 && !self.desc.Rets[0].InRegister {
   321  		if self.desc.Rets[0].IsFloat == floatKind64 {
   322  			p.MOVSD(xregOrderC[0], self.retv(0))
   323  		} else if self.desc.Rets[0].IsFloat == floatKind32 {
   324  			p.MOVSS(xregOrderC[0], self.retv(0))
   325  		} else {
   326  			p.MOVQ(RAX, self.retv(0))
   327  		}
   328  	}
   329  }
   330  
   331  func (self *Frame) emitRestoreRegs(p *Program) {
   332  	// load reserved registers
   333  	for i, r := range ReservedRegs(self.ccall) {
   334  		switch r.(type) {
   335  		case Register64:
   336  			p.MOVQ(self.resv(i), r)
   337  		case XMMRegister:
   338  			p.MOVSD(self.resv(i), r)
   339  		default:
   340  			panic(fmt.Sprintf("unsupported register type %t to reserve", r))
   341  		}
   342  	}
   343  	// zero xmm15 for go abi
   344  	p.XORPS(zeroRegGo, zeroRegGo)
   345  }