github.com/go-asm/go@v1.21.1-0.20240213172139-40c5ead50c48/cmd/asm/arch/arm64.go (about)

     1  // Copyright 2015 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  // This file encapsulates some of the odd characteristics of the ARM64
     6  // instruction set, to minimize its interaction with the core of the
     7  // assembler.
     8  
     9  package arch
    10  
    11  import (
    12  	"errors"
    13  
    14  	"github.com/go-asm/go/cmd/obj"
    15  	"github.com/go-asm/go/cmd/obj/arm64"
    16  )
    17  
    18  var arm64LS = map[string]uint8{
    19  	"P": arm64.C_XPOST,
    20  	"W": arm64.C_XPRE,
    21  }
    22  
    23  var arm64Jump = map[string]bool{
    24  	"B":     true,
    25  	"BL":    true,
    26  	"BEQ":   true,
    27  	"BNE":   true,
    28  	"BCS":   true,
    29  	"BHS":   true,
    30  	"BCC":   true,
    31  	"BLO":   true,
    32  	"BMI":   true,
    33  	"BPL":   true,
    34  	"BVS":   true,
    35  	"BVC":   true,
    36  	"BHI":   true,
    37  	"BLS":   true,
    38  	"BGE":   true,
    39  	"BLT":   true,
    40  	"BGT":   true,
    41  	"BLE":   true,
    42  	"CALL":  true,
    43  	"CBZ":   true,
    44  	"CBZW":  true,
    45  	"CBNZ":  true,
    46  	"CBNZW": true,
    47  	"JMP":   true,
    48  	"TBNZ":  true,
    49  	"TBZ":   true,
    50  
    51  	// ADR isn't really a jump, but it takes a PC or label reference,
    52  	// which needs to patched like a jump.
    53  	"ADR":  true,
    54  	"ADRP": true,
    55  }
    56  
    57  func jumpArm64(word string) bool {
    58  	return arm64Jump[word]
    59  }
    60  
    61  var arm64SpecialOperand map[string]arm64.SpecialOperand
    62  
    63  // GetARM64SpecialOperand returns the internal representation of a special operand.
    64  func GetARM64SpecialOperand(name string) arm64.SpecialOperand {
    65  	if arm64SpecialOperand == nil {
    66  		// Generate the mapping automatically when the first time the function is called.
    67  		arm64SpecialOperand = map[string]arm64.SpecialOperand{}
    68  		for opd := arm64.SPOP_BEGIN; opd < arm64.SPOP_END; opd++ {
    69  			arm64SpecialOperand[opd.String()] = opd
    70  		}
    71  
    72  		// Handle some special cases.
    73  		specialMapping := map[string]arm64.SpecialOperand{
    74  			// The internal representation of CS(CC) and HS(LO) are the same.
    75  			"CS": arm64.SPOP_HS,
    76  			"CC": arm64.SPOP_LO,
    77  		}
    78  		for s, opd := range specialMapping {
    79  			arm64SpecialOperand[s] = opd
    80  		}
    81  	}
    82  	if opd, ok := arm64SpecialOperand[name]; ok {
    83  		return opd
    84  	}
    85  	return arm64.SPOP_END
    86  }
    87  
    88  // IsARM64ADR reports whether the op (as defined by an arm64.A* constant) is
    89  // one of the comparison instructions that require special handling.
    90  func IsARM64ADR(op obj.As) bool {
    91  	switch op {
    92  	case arm64.AADR, arm64.AADRP:
    93  		return true
    94  	}
    95  	return false
    96  }
    97  
    98  // IsARM64CMP reports whether the op (as defined by an arm64.A* constant) is
    99  // one of the comparison instructions that require special handling.
   100  func IsARM64CMP(op obj.As) bool {
   101  	switch op {
   102  	case arm64.ACMN, arm64.ACMP, arm64.ATST,
   103  		arm64.ACMNW, arm64.ACMPW, arm64.ATSTW,
   104  		arm64.AFCMPS, arm64.AFCMPD,
   105  		arm64.AFCMPES, arm64.AFCMPED:
   106  		return true
   107  	}
   108  	return false
   109  }
   110  
   111  // IsARM64STLXR reports whether the op (as defined by an arm64.A*
   112  // constant) is one of the STLXR-like instructions that require special
   113  // handling.
   114  func IsARM64STLXR(op obj.As) bool {
   115  	switch op {
   116  	case arm64.ASTLXRB, arm64.ASTLXRH, arm64.ASTLXRW, arm64.ASTLXR,
   117  		arm64.ASTXRB, arm64.ASTXRH, arm64.ASTXRW, arm64.ASTXR,
   118  		arm64.ASTXP, arm64.ASTXPW, arm64.ASTLXP, arm64.ASTLXPW:
   119  		return true
   120  	}
   121  	// LDADDx/SWPx/CASx atomic instructions
   122  	return arm64.IsAtomicInstruction(op)
   123  }
   124  
   125  // IsARM64TBL reports whether the op (as defined by an arm64.A*
   126  // constant) is one of the TBL-like instructions and one of its
   127  // inputs does not fit into prog.Reg, so require special handling.
   128  func IsARM64TBL(op obj.As) bool {
   129  	switch op {
   130  	case arm64.AVTBL, arm64.AVTBX, arm64.AVMOVQ:
   131  		return true
   132  	}
   133  	return false
   134  }
   135  
   136  // IsARM64CASP reports whether the op (as defined by an arm64.A*
   137  // constant) is one of the CASP-like instructions, and its 2nd
   138  // destination is a register pair that require special handling.
   139  func IsARM64CASP(op obj.As) bool {
   140  	switch op {
   141  	case arm64.ACASPD, arm64.ACASPW:
   142  		return true
   143  	}
   144  	return false
   145  }
   146  
   147  // ARM64Suffix handles the special suffix for the ARM64.
   148  // It returns a boolean to indicate success; failure means
   149  // cond was unrecognized.
   150  func ARM64Suffix(prog *obj.Prog, cond string) bool {
   151  	if cond == "" {
   152  		return true
   153  	}
   154  	bits, ok := parseARM64Suffix(cond)
   155  	if !ok {
   156  		return false
   157  	}
   158  	prog.Scond = bits
   159  	return true
   160  }
   161  
   162  // parseARM64Suffix parses the suffix attached to an ARM64 instruction.
   163  // The input is a single string consisting of period-separated condition
   164  // codes, such as ".P.W". An initial period is ignored.
   165  func parseARM64Suffix(cond string) (uint8, bool) {
   166  	if cond == "" {
   167  		return 0, true
   168  	}
   169  	return parseARMCondition(cond, arm64LS, nil)
   170  }
   171  
   172  func arm64RegisterNumber(name string, n int16) (int16, bool) {
   173  	switch name {
   174  	case "F":
   175  		if 0 <= n && n <= 31 {
   176  			return arm64.REG_F0 + n, true
   177  		}
   178  	case "R":
   179  		if 0 <= n && n <= 30 { // not 31
   180  			return arm64.REG_R0 + n, true
   181  		}
   182  	case "V":
   183  		if 0 <= n && n <= 31 {
   184  			return arm64.REG_V0 + n, true
   185  		}
   186  	}
   187  	return 0, false
   188  }
   189  
   190  // ARM64RegisterShift constructs an ARM64 register with shift operation.
   191  func ARM64RegisterShift(reg, op, count int16) (int64, error) {
   192  	// the base register of shift operations must be general register.
   193  	if reg > arm64.REG_R31 || reg < arm64.REG_R0 {
   194  		return 0, errors.New("invalid register for shift operation")
   195  	}
   196  	return int64(reg&31)<<16 | int64(op)<<22 | int64(uint16(count)), nil
   197  }
   198  
   199  // ARM64RegisterExtension constructs an ARM64 register with extension or arrangement.
   200  func ARM64RegisterExtension(a *obj.Addr, ext string, reg, num int16, isAmount, isIndex bool) error {
   201  	Rnum := (reg & 31) + int16(num<<5)
   202  	if isAmount {
   203  		if num < 0 || num > 7 {
   204  			return errors.New("index shift amount is out of range")
   205  		}
   206  	}
   207  	if reg <= arm64.REG_R31 && reg >= arm64.REG_R0 {
   208  		if !isAmount {
   209  			return errors.New("invalid register extension")
   210  		}
   211  		switch ext {
   212  		case "UXTB":
   213  			if a.Type == obj.TYPE_MEM {
   214  				return errors.New("invalid shift for the register offset addressing mode")
   215  			}
   216  			a.Reg = arm64.REG_UXTB + Rnum
   217  		case "UXTH":
   218  			if a.Type == obj.TYPE_MEM {
   219  				return errors.New("invalid shift for the register offset addressing mode")
   220  			}
   221  			a.Reg = arm64.REG_UXTH + Rnum
   222  		case "UXTW":
   223  			// effective address of memory is a base register value and an offset register value.
   224  			if a.Type == obj.TYPE_MEM {
   225  				a.Index = arm64.REG_UXTW + Rnum
   226  			} else {
   227  				a.Reg = arm64.REG_UXTW + Rnum
   228  			}
   229  		case "UXTX":
   230  			if a.Type == obj.TYPE_MEM {
   231  				return errors.New("invalid shift for the register offset addressing mode")
   232  			}
   233  			a.Reg = arm64.REG_UXTX + Rnum
   234  		case "SXTB":
   235  			if a.Type == obj.TYPE_MEM {
   236  				return errors.New("invalid shift for the register offset addressing mode")
   237  			}
   238  			a.Reg = arm64.REG_SXTB + Rnum
   239  		case "SXTH":
   240  			if a.Type == obj.TYPE_MEM {
   241  				return errors.New("invalid shift for the register offset addressing mode")
   242  			}
   243  			a.Reg = arm64.REG_SXTH + Rnum
   244  		case "SXTW":
   245  			if a.Type == obj.TYPE_MEM {
   246  				a.Index = arm64.REG_SXTW + Rnum
   247  			} else {
   248  				a.Reg = arm64.REG_SXTW + Rnum
   249  			}
   250  		case "SXTX":
   251  			if a.Type == obj.TYPE_MEM {
   252  				a.Index = arm64.REG_SXTX + Rnum
   253  			} else {
   254  				a.Reg = arm64.REG_SXTX + Rnum
   255  			}
   256  		case "LSL":
   257  			a.Index = arm64.REG_LSL + Rnum
   258  		default:
   259  			return errors.New("unsupported general register extension type: " + ext)
   260  
   261  		}
   262  	} else if reg <= arm64.REG_V31 && reg >= arm64.REG_V0 {
   263  		switch ext {
   264  		case "B8":
   265  			if isIndex {
   266  				return errors.New("invalid register extension")
   267  			}
   268  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_8B & 15) << 5)
   269  		case "B16":
   270  			if isIndex {
   271  				return errors.New("invalid register extension")
   272  			}
   273  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_16B & 15) << 5)
   274  		case "H4":
   275  			if isIndex {
   276  				return errors.New("invalid register extension")
   277  			}
   278  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_4H & 15) << 5)
   279  		case "H8":
   280  			if isIndex {
   281  				return errors.New("invalid register extension")
   282  			}
   283  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_8H & 15) << 5)
   284  		case "S2":
   285  			if isIndex {
   286  				return errors.New("invalid register extension")
   287  			}
   288  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_2S & 15) << 5)
   289  		case "S4":
   290  			if isIndex {
   291  				return errors.New("invalid register extension")
   292  			}
   293  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_4S & 15) << 5)
   294  		case "D1":
   295  			if isIndex {
   296  				return errors.New("invalid register extension")
   297  			}
   298  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_1D & 15) << 5)
   299  		case "D2":
   300  			if isIndex {
   301  				return errors.New("invalid register extension")
   302  			}
   303  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_2D & 15) << 5)
   304  		case "Q1":
   305  			if isIndex {
   306  				return errors.New("invalid register extension")
   307  			}
   308  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_1Q & 15) << 5)
   309  		case "B":
   310  			if !isIndex {
   311  				return nil
   312  			}
   313  			a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_B & 15) << 5)
   314  			a.Index = num
   315  		case "H":
   316  			if !isIndex {
   317  				return nil
   318  			}
   319  			a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_H & 15) << 5)
   320  			a.Index = num
   321  		case "S":
   322  			if !isIndex {
   323  				return nil
   324  			}
   325  			a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_S & 15) << 5)
   326  			a.Index = num
   327  		case "D":
   328  			if !isIndex {
   329  				return nil
   330  			}
   331  			a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_D & 15) << 5)
   332  			a.Index = num
   333  		default:
   334  			return errors.New("unsupported simd register extension type: " + ext)
   335  		}
   336  	} else {
   337  		return errors.New("invalid register and extension combination")
   338  	}
   339  	return nil
   340  }
   341  
   342  // ARM64RegisterArrangement constructs an ARM64 vector register arrangement.
   343  func ARM64RegisterArrangement(reg int16, name, arng string) (int64, error) {
   344  	var curQ, curSize uint16
   345  	if name[0] != 'V' {
   346  		return 0, errors.New("expect V0 through V31; found: " + name)
   347  	}
   348  	if reg < 0 {
   349  		return 0, errors.New("invalid register number: " + name)
   350  	}
   351  	switch arng {
   352  	case "B8":
   353  		curSize = 0
   354  		curQ = 0
   355  	case "B16":
   356  		curSize = 0
   357  		curQ = 1
   358  	case "H4":
   359  		curSize = 1
   360  		curQ = 0
   361  	case "H8":
   362  		curSize = 1
   363  		curQ = 1
   364  	case "S2":
   365  		curSize = 2
   366  		curQ = 0
   367  	case "S4":
   368  		curSize = 2
   369  		curQ = 1
   370  	case "D1":
   371  		curSize = 3
   372  		curQ = 0
   373  	case "D2":
   374  		curSize = 3
   375  		curQ = 1
   376  	default:
   377  		return 0, errors.New("invalid arrangement in ARM64 register list")
   378  	}
   379  	return (int64(curQ) & 1 << 30) | (int64(curSize&3) << 10), nil
   380  }
   381  
   382  // ARM64RegisterListOffset generates offset encoding according to AArch64 specification.
   383  func ARM64RegisterListOffset(firstReg, regCnt int, arrangement int64) (int64, error) {
   384  	offset := int64(firstReg)
   385  	switch regCnt {
   386  	case 1:
   387  		offset |= 0x7 << 12
   388  	case 2:
   389  		offset |= 0xa << 12
   390  	case 3:
   391  		offset |= 0x6 << 12
   392  	case 4:
   393  		offset |= 0x2 << 12
   394  	default:
   395  		return 0, errors.New("invalid register numbers in ARM64 register list")
   396  	}
   397  	offset |= arrangement
   398  	// arm64 uses the 60th bit to differentiate from other archs
   399  	// For more details, refer to: obj/arm64/list7.go
   400  	offset |= 1 << 60
   401  	return offset, nil
   402  }