github.com/bir3/gocompiler@v0.9.2202/src/cmd/asm/internal/arch/arm64.go (about) 1 // Copyright 2015 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 // This file encapsulates some of the odd characteristics of the ARM64 6 // instruction set, to minimize its interaction with the core of the 7 // assembler. 8 9 package arch 10 11 import ( 12 "github.com/bir3/gocompiler/src/cmd/internal/obj" 13 "github.com/bir3/gocompiler/src/cmd/internal/obj/arm64" 14 "errors" 15 ) 16 17 var arm64LS = map[string]uint8{ 18 "P": arm64.C_XPOST, 19 "W": arm64.C_XPRE, 20 } 21 22 var arm64Jump = map[string]bool{ 23 "B": true, 24 "BL": true, 25 "BEQ": true, 26 "BNE": true, 27 "BCS": true, 28 "BHS": true, 29 "BCC": true, 30 "BLO": true, 31 "BMI": true, 32 "BPL": true, 33 "BVS": true, 34 "BVC": true, 35 "BHI": true, 36 "BLS": true, 37 "BGE": true, 38 "BLT": true, 39 "BGT": true, 40 "BLE": true, 41 "CALL": true, 42 "CBZ": true, 43 "CBZW": true, 44 "CBNZ": true, 45 "CBNZW": true, 46 "JMP": true, 47 "TBNZ": true, 48 "TBZ": true, 49 50 // ADR isn't really a jump, but it takes a PC or label reference, 51 // which needs to patched like a jump. 52 "ADR": true, 53 "ADRP": true, 54 } 55 56 func jumpArm64(word string) bool { 57 return arm64Jump[word] 58 } 59 60 var arm64SpecialOperand map[string]arm64.SpecialOperand 61 62 // GetARM64SpecialOperand returns the internal representation of a special operand. 63 func GetARM64SpecialOperand(name string) arm64.SpecialOperand { 64 if arm64SpecialOperand == nil { 65 // Generate the mapping automatically when the first time the function is called. 66 arm64SpecialOperand = map[string]arm64.SpecialOperand{} 67 for opd := arm64.SPOP_BEGIN; opd < arm64.SPOP_END; opd++ { 68 arm64SpecialOperand[opd.String()] = opd 69 } 70 71 // Handle some special cases. 72 specialMapping := map[string]arm64.SpecialOperand{ 73 // The internal representation of CS(CC) and HS(LO) are the same. 74 "CS": arm64.SPOP_HS, 75 "CC": arm64.SPOP_LO, 76 } 77 for s, opd := range specialMapping { 78 arm64SpecialOperand[s] = opd 79 } 80 } 81 if opd, ok := arm64SpecialOperand[name]; ok { 82 return opd 83 } 84 return arm64.SPOP_END 85 } 86 87 // IsARM64ADR reports whether the op (as defined by an arm64.A* constant) is 88 // one of the comparison instructions that require special handling. 89 func IsARM64ADR(op obj.As) bool { 90 switch op { 91 case arm64.AADR, arm64.AADRP: 92 return true 93 } 94 return false 95 } 96 97 // IsARM64CMP reports whether the op (as defined by an arm64.A* constant) is 98 // one of the comparison instructions that require special handling. 99 func IsARM64CMP(op obj.As) bool { 100 switch op { 101 case arm64.ACMN, arm64.ACMP, arm64.ATST, 102 arm64.ACMNW, arm64.ACMPW, arm64.ATSTW, 103 arm64.AFCMPS, arm64.AFCMPD, 104 arm64.AFCMPES, arm64.AFCMPED: 105 return true 106 } 107 return false 108 } 109 110 // IsARM64STLXR reports whether the op (as defined by an arm64.A* 111 // constant) is one of the STLXR-like instructions that require special 112 // handling. 113 func IsARM64STLXR(op obj.As) bool { 114 switch op { 115 case arm64.ASTLXRB, arm64.ASTLXRH, arm64.ASTLXRW, arm64.ASTLXR, 116 arm64.ASTXRB, arm64.ASTXRH, arm64.ASTXRW, arm64.ASTXR, 117 arm64.ASTXP, arm64.ASTXPW, arm64.ASTLXP, arm64.ASTLXPW: 118 return true 119 } 120 // LDADDx/SWPx/CASx atomic instructions 121 return arm64.IsAtomicInstruction(op) 122 } 123 124 // IsARM64TBL reports whether the op (as defined by an arm64.A* 125 // constant) is one of the TBL-like instructions and one of its 126 // inputs does not fit into prog.Reg, so require special handling. 127 func IsARM64TBL(op obj.As) bool { 128 switch op { 129 case arm64.AVTBL, arm64.AVTBX, arm64.AVMOVQ: 130 return true 131 } 132 return false 133 } 134 135 // IsARM64CASP reports whether the op (as defined by an arm64.A* 136 // constant) is one of the CASP-like instructions, and its 2nd 137 // destination is a register pair that require special handling. 138 func IsARM64CASP(op obj.As) bool { 139 switch op { 140 case arm64.ACASPD, arm64.ACASPW: 141 return true 142 } 143 return false 144 } 145 146 // ARM64Suffix handles the special suffix for the ARM64. 147 // It returns a boolean to indicate success; failure means 148 // cond was unrecognized. 149 func ARM64Suffix(prog *obj.Prog, cond string) bool { 150 if cond == "" { 151 return true 152 } 153 bits, ok := parseARM64Suffix(cond) 154 if !ok { 155 return false 156 } 157 prog.Scond = bits 158 return true 159 } 160 161 // parseARM64Suffix parses the suffix attached to an ARM64 instruction. 162 // The input is a single string consisting of period-separated condition 163 // codes, such as ".P.W". An initial period is ignored. 164 func parseARM64Suffix(cond string) (uint8, bool) { 165 if cond == "" { 166 return 0, true 167 } 168 return parseARMCondition(cond, arm64LS, nil) 169 } 170 171 func arm64RegisterNumber(name string, n int16) (int16, bool) { 172 switch name { 173 case "F": 174 if 0 <= n && n <= 31 { 175 return arm64.REG_F0 + n, true 176 } 177 case "R": 178 if 0 <= n && n <= 30 { // not 31 179 return arm64.REG_R0 + n, true 180 } 181 case "V": 182 if 0 <= n && n <= 31 { 183 return arm64.REG_V0 + n, true 184 } 185 } 186 return 0, false 187 } 188 189 // ARM64RegisterShift constructs an ARM64 register with shift operation. 190 func ARM64RegisterShift(reg, op, count int16) (int64, error) { 191 // the base register of shift operations must be general register. 192 if reg > arm64.REG_R31 || reg < arm64.REG_R0 { 193 return 0, errors.New("invalid register for shift operation") 194 } 195 return int64(reg&31)<<16 | int64(op)<<22 | int64(uint16(count)), nil 196 } 197 198 // ARM64RegisterExtension constructs an ARM64 register with extension or arrangement. 199 func ARM64RegisterExtension(a *obj.Addr, ext string, reg, num int16, isAmount, isIndex bool) error { 200 Rnum := (reg & 31) + int16(num<<5) 201 if isAmount { 202 if num < 0 || num > 7 { 203 return errors.New("index shift amount is out of range") 204 } 205 } 206 if reg <= arm64.REG_R31 && reg >= arm64.REG_R0 { 207 if !isAmount { 208 return errors.New("invalid register extension") 209 } 210 switch ext { 211 case "UXTB": 212 if a.Type == obj.TYPE_MEM { 213 return errors.New("invalid shift for the register offset addressing mode") 214 } 215 a.Reg = arm64.REG_UXTB + Rnum 216 case "UXTH": 217 if a.Type == obj.TYPE_MEM { 218 return errors.New("invalid shift for the register offset addressing mode") 219 } 220 a.Reg = arm64.REG_UXTH + Rnum 221 case "UXTW": 222 // effective address of memory is a base register value and an offset register value. 223 if a.Type == obj.TYPE_MEM { 224 a.Index = arm64.REG_UXTW + Rnum 225 } else { 226 a.Reg = arm64.REG_UXTW + Rnum 227 } 228 case "UXTX": 229 if a.Type == obj.TYPE_MEM { 230 return errors.New("invalid shift for the register offset addressing mode") 231 } 232 a.Reg = arm64.REG_UXTX + Rnum 233 case "SXTB": 234 if a.Type == obj.TYPE_MEM { 235 return errors.New("invalid shift for the register offset addressing mode") 236 } 237 a.Reg = arm64.REG_SXTB + Rnum 238 case "SXTH": 239 if a.Type == obj.TYPE_MEM { 240 return errors.New("invalid shift for the register offset addressing mode") 241 } 242 a.Reg = arm64.REG_SXTH + Rnum 243 case "SXTW": 244 if a.Type == obj.TYPE_MEM { 245 a.Index = arm64.REG_SXTW + Rnum 246 } else { 247 a.Reg = arm64.REG_SXTW + Rnum 248 } 249 case "SXTX": 250 if a.Type == obj.TYPE_MEM { 251 a.Index = arm64.REG_SXTX + Rnum 252 } else { 253 a.Reg = arm64.REG_SXTX + Rnum 254 } 255 case "LSL": 256 a.Index = arm64.REG_LSL + Rnum 257 default: 258 return errors.New("unsupported general register extension type: " + ext) 259 260 } 261 } else if reg <= arm64.REG_V31 && reg >= arm64.REG_V0 { 262 switch ext { 263 case "B8": 264 if isIndex { 265 return errors.New("invalid register extension") 266 } 267 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_8B & 15) << 5) 268 case "B16": 269 if isIndex { 270 return errors.New("invalid register extension") 271 } 272 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_16B & 15) << 5) 273 case "H4": 274 if isIndex { 275 return errors.New("invalid register extension") 276 } 277 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_4H & 15) << 5) 278 case "H8": 279 if isIndex { 280 return errors.New("invalid register extension") 281 } 282 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_8H & 15) << 5) 283 case "S2": 284 if isIndex { 285 return errors.New("invalid register extension") 286 } 287 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_2S & 15) << 5) 288 case "S4": 289 if isIndex { 290 return errors.New("invalid register extension") 291 } 292 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_4S & 15) << 5) 293 case "D1": 294 if isIndex { 295 return errors.New("invalid register extension") 296 } 297 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_1D & 15) << 5) 298 case "D2": 299 if isIndex { 300 return errors.New("invalid register extension") 301 } 302 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_2D & 15) << 5) 303 case "Q1": 304 if isIndex { 305 return errors.New("invalid register extension") 306 } 307 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_1Q & 15) << 5) 308 case "B": 309 if !isIndex { 310 return nil 311 } 312 a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_B & 15) << 5) 313 a.Index = num 314 case "H": 315 if !isIndex { 316 return nil 317 } 318 a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_H & 15) << 5) 319 a.Index = num 320 case "S": 321 if !isIndex { 322 return nil 323 } 324 a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_S & 15) << 5) 325 a.Index = num 326 case "D": 327 if !isIndex { 328 return nil 329 } 330 a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_D & 15) << 5) 331 a.Index = num 332 default: 333 return errors.New("unsupported simd register extension type: " + ext) 334 } 335 } else { 336 return errors.New("invalid register and extension combination") 337 } 338 return nil 339 } 340 341 // ARM64RegisterArrangement constructs an ARM64 vector register arrangement. 342 func ARM64RegisterArrangement(reg int16, name, arng string) (int64, error) { 343 var curQ, curSize uint16 344 if name[0] != 'V' { 345 return 0, errors.New("expect V0 through V31; found: " + name) 346 } 347 if reg < 0 { 348 return 0, errors.New("invalid register number: " + name) 349 } 350 switch arng { 351 case "B8": 352 curSize = 0 353 curQ = 0 354 case "B16": 355 curSize = 0 356 curQ = 1 357 case "H4": 358 curSize = 1 359 curQ = 0 360 case "H8": 361 curSize = 1 362 curQ = 1 363 case "S2": 364 curSize = 2 365 curQ = 0 366 case "S4": 367 curSize = 2 368 curQ = 1 369 case "D1": 370 curSize = 3 371 curQ = 0 372 case "D2": 373 curSize = 3 374 curQ = 1 375 default: 376 return 0, errors.New("invalid arrangement in ARM64 register list") 377 } 378 return (int64(curQ) & 1 << 30) | (int64(curSize&3) << 10), nil 379 } 380 381 // ARM64RegisterListOffset generates offset encoding according to AArch64 specification. 382 func ARM64RegisterListOffset(firstReg, regCnt int, arrangement int64) (int64, error) { 383 offset := int64(firstReg) 384 switch regCnt { 385 case 1: 386 offset |= 0x7 << 12 387 case 2: 388 offset |= 0xa << 12 389 case 3: 390 offset |= 0x6 << 12 391 case 4: 392 offset |= 0x2 << 12 393 default: 394 return 0, errors.New("invalid register numbers in ARM64 register list") 395 } 396 offset |= arrangement 397 // arm64 uses the 60th bit to differentiate from other archs 398 // For more details, refer to: obj/arm64/list7.go 399 offset |= 1 << 60 400 return offset, nil 401 }