github.com/bir3/gocompiler@v0.9.2202/src/cmd/compile/internal/ssa/rewriteARM64.go (about) 1 // Code generated from _gen/ARM64.rules using 'go generate'; DO NOT EDIT. 2 3 package ssa 4 5 import "github.com/bir3/gocompiler/src/cmd/compile/internal/types" 6 7 func rewriteValueARM64(v *Value) bool { 8 switch v.Op { 9 case OpARM64ADCSflags: 10 return rewriteValueARM64_OpARM64ADCSflags(v) 11 case OpARM64ADD: 12 return rewriteValueARM64_OpARM64ADD(v) 13 case OpARM64ADDSflags: 14 return rewriteValueARM64_OpARM64ADDSflags(v) 15 case OpARM64ADDconst: 16 return rewriteValueARM64_OpARM64ADDconst(v) 17 case OpARM64ADDshiftLL: 18 return rewriteValueARM64_OpARM64ADDshiftLL(v) 19 case OpARM64ADDshiftRA: 20 return rewriteValueARM64_OpARM64ADDshiftRA(v) 21 case OpARM64ADDshiftRL: 22 return rewriteValueARM64_OpARM64ADDshiftRL(v) 23 case OpARM64AND: 24 return rewriteValueARM64_OpARM64AND(v) 25 case OpARM64ANDconst: 26 return rewriteValueARM64_OpARM64ANDconst(v) 27 case OpARM64ANDshiftLL: 28 return rewriteValueARM64_OpARM64ANDshiftLL(v) 29 case OpARM64ANDshiftRA: 30 return rewriteValueARM64_OpARM64ANDshiftRA(v) 31 case OpARM64ANDshiftRL: 32 return rewriteValueARM64_OpARM64ANDshiftRL(v) 33 case OpARM64ANDshiftRO: 34 return rewriteValueARM64_OpARM64ANDshiftRO(v) 35 case OpARM64BIC: 36 return rewriteValueARM64_OpARM64BIC(v) 37 case OpARM64BICshiftLL: 38 return rewriteValueARM64_OpARM64BICshiftLL(v) 39 case OpARM64BICshiftRA: 40 return rewriteValueARM64_OpARM64BICshiftRA(v) 41 case OpARM64BICshiftRL: 42 return rewriteValueARM64_OpARM64BICshiftRL(v) 43 case OpARM64BICshiftRO: 44 return rewriteValueARM64_OpARM64BICshiftRO(v) 45 case OpARM64CMN: 46 return rewriteValueARM64_OpARM64CMN(v) 47 case OpARM64CMNW: 48 return rewriteValueARM64_OpARM64CMNW(v) 49 case OpARM64CMNWconst: 50 return rewriteValueARM64_OpARM64CMNWconst(v) 51 case OpARM64CMNconst: 52 return rewriteValueARM64_OpARM64CMNconst(v) 53 case OpARM64CMNshiftLL: 54 return rewriteValueARM64_OpARM64CMNshiftLL(v) 55 case OpARM64CMNshiftRA: 56 return rewriteValueARM64_OpARM64CMNshiftRA(v) 57 case OpARM64CMNshiftRL: 58 return rewriteValueARM64_OpARM64CMNshiftRL(v) 59 case OpARM64CMP: 60 return rewriteValueARM64_OpARM64CMP(v) 61 case OpARM64CMPW: 62 return rewriteValueARM64_OpARM64CMPW(v) 63 case OpARM64CMPWconst: 64 return rewriteValueARM64_OpARM64CMPWconst(v) 65 case OpARM64CMPconst: 66 return rewriteValueARM64_OpARM64CMPconst(v) 67 case OpARM64CMPshiftLL: 68 return rewriteValueARM64_OpARM64CMPshiftLL(v) 69 case OpARM64CMPshiftRA: 70 return rewriteValueARM64_OpARM64CMPshiftRA(v) 71 case OpARM64CMPshiftRL: 72 return rewriteValueARM64_OpARM64CMPshiftRL(v) 73 case OpARM64CSEL: 74 return rewriteValueARM64_OpARM64CSEL(v) 75 case OpARM64CSEL0: 76 return rewriteValueARM64_OpARM64CSEL0(v) 77 case OpARM64CSETM: 78 return rewriteValueARM64_OpARM64CSETM(v) 79 case OpARM64CSINC: 80 return rewriteValueARM64_OpARM64CSINC(v) 81 case OpARM64CSINV: 82 return rewriteValueARM64_OpARM64CSINV(v) 83 case OpARM64CSNEG: 84 return rewriteValueARM64_OpARM64CSNEG(v) 85 case OpARM64DIV: 86 return rewriteValueARM64_OpARM64DIV(v) 87 case OpARM64DIVW: 88 return rewriteValueARM64_OpARM64DIVW(v) 89 case OpARM64EON: 90 return rewriteValueARM64_OpARM64EON(v) 91 case OpARM64EONshiftLL: 92 return rewriteValueARM64_OpARM64EONshiftLL(v) 93 case OpARM64EONshiftRA: 94 return rewriteValueARM64_OpARM64EONshiftRA(v) 95 case OpARM64EONshiftRL: 96 return rewriteValueARM64_OpARM64EONshiftRL(v) 97 case OpARM64EONshiftRO: 98 return rewriteValueARM64_OpARM64EONshiftRO(v) 99 case OpARM64Equal: 100 return rewriteValueARM64_OpARM64Equal(v) 101 case OpARM64FADDD: 102 return rewriteValueARM64_OpARM64FADDD(v) 103 case OpARM64FADDS: 104 return rewriteValueARM64_OpARM64FADDS(v) 105 case OpARM64FCMPD: 106 return rewriteValueARM64_OpARM64FCMPD(v) 107 case OpARM64FCMPS: 108 return rewriteValueARM64_OpARM64FCMPS(v) 109 case OpARM64FMOVDfpgp: 110 return rewriteValueARM64_OpARM64FMOVDfpgp(v) 111 case OpARM64FMOVDgpfp: 112 return rewriteValueARM64_OpARM64FMOVDgpfp(v) 113 case OpARM64FMOVDload: 114 return rewriteValueARM64_OpARM64FMOVDload(v) 115 case OpARM64FMOVDloadidx: 116 return rewriteValueARM64_OpARM64FMOVDloadidx(v) 117 case OpARM64FMOVDloadidx8: 118 return rewriteValueARM64_OpARM64FMOVDloadidx8(v) 119 case OpARM64FMOVDstore: 120 return rewriteValueARM64_OpARM64FMOVDstore(v) 121 case OpARM64FMOVDstoreidx: 122 return rewriteValueARM64_OpARM64FMOVDstoreidx(v) 123 case OpARM64FMOVDstoreidx8: 124 return rewriteValueARM64_OpARM64FMOVDstoreidx8(v) 125 case OpARM64FMOVSload: 126 return rewriteValueARM64_OpARM64FMOVSload(v) 127 case OpARM64FMOVSloadidx: 128 return rewriteValueARM64_OpARM64FMOVSloadidx(v) 129 case OpARM64FMOVSloadidx4: 130 return rewriteValueARM64_OpARM64FMOVSloadidx4(v) 131 case OpARM64FMOVSstore: 132 return rewriteValueARM64_OpARM64FMOVSstore(v) 133 case OpARM64FMOVSstoreidx: 134 return rewriteValueARM64_OpARM64FMOVSstoreidx(v) 135 case OpARM64FMOVSstoreidx4: 136 return rewriteValueARM64_OpARM64FMOVSstoreidx4(v) 137 case OpARM64FMULD: 138 return rewriteValueARM64_OpARM64FMULD(v) 139 case OpARM64FMULS: 140 return rewriteValueARM64_OpARM64FMULS(v) 141 case OpARM64FNEGD: 142 return rewriteValueARM64_OpARM64FNEGD(v) 143 case OpARM64FNEGS: 144 return rewriteValueARM64_OpARM64FNEGS(v) 145 case OpARM64FNMULD: 146 return rewriteValueARM64_OpARM64FNMULD(v) 147 case OpARM64FNMULS: 148 return rewriteValueARM64_OpARM64FNMULS(v) 149 case OpARM64FSUBD: 150 return rewriteValueARM64_OpARM64FSUBD(v) 151 case OpARM64FSUBS: 152 return rewriteValueARM64_OpARM64FSUBS(v) 153 case OpARM64GreaterEqual: 154 return rewriteValueARM64_OpARM64GreaterEqual(v) 155 case OpARM64GreaterEqualF: 156 return rewriteValueARM64_OpARM64GreaterEqualF(v) 157 case OpARM64GreaterEqualNoov: 158 return rewriteValueARM64_OpARM64GreaterEqualNoov(v) 159 case OpARM64GreaterEqualU: 160 return rewriteValueARM64_OpARM64GreaterEqualU(v) 161 case OpARM64GreaterThan: 162 return rewriteValueARM64_OpARM64GreaterThan(v) 163 case OpARM64GreaterThanF: 164 return rewriteValueARM64_OpARM64GreaterThanF(v) 165 case OpARM64GreaterThanU: 166 return rewriteValueARM64_OpARM64GreaterThanU(v) 167 case OpARM64LDP: 168 return rewriteValueARM64_OpARM64LDP(v) 169 case OpARM64LessEqual: 170 return rewriteValueARM64_OpARM64LessEqual(v) 171 case OpARM64LessEqualF: 172 return rewriteValueARM64_OpARM64LessEqualF(v) 173 case OpARM64LessEqualU: 174 return rewriteValueARM64_OpARM64LessEqualU(v) 175 case OpARM64LessThan: 176 return rewriteValueARM64_OpARM64LessThan(v) 177 case OpARM64LessThanF: 178 return rewriteValueARM64_OpARM64LessThanF(v) 179 case OpARM64LessThanNoov: 180 return rewriteValueARM64_OpARM64LessThanNoov(v) 181 case OpARM64LessThanU: 182 return rewriteValueARM64_OpARM64LessThanU(v) 183 case OpARM64MADD: 184 return rewriteValueARM64_OpARM64MADD(v) 185 case OpARM64MADDW: 186 return rewriteValueARM64_OpARM64MADDW(v) 187 case OpARM64MNEG: 188 return rewriteValueARM64_OpARM64MNEG(v) 189 case OpARM64MNEGW: 190 return rewriteValueARM64_OpARM64MNEGW(v) 191 case OpARM64MOD: 192 return rewriteValueARM64_OpARM64MOD(v) 193 case OpARM64MODW: 194 return rewriteValueARM64_OpARM64MODW(v) 195 case OpARM64MOVBUload: 196 return rewriteValueARM64_OpARM64MOVBUload(v) 197 case OpARM64MOVBUloadidx: 198 return rewriteValueARM64_OpARM64MOVBUloadidx(v) 199 case OpARM64MOVBUreg: 200 return rewriteValueARM64_OpARM64MOVBUreg(v) 201 case OpARM64MOVBload: 202 return rewriteValueARM64_OpARM64MOVBload(v) 203 case OpARM64MOVBloadidx: 204 return rewriteValueARM64_OpARM64MOVBloadidx(v) 205 case OpARM64MOVBreg: 206 return rewriteValueARM64_OpARM64MOVBreg(v) 207 case OpARM64MOVBstore: 208 return rewriteValueARM64_OpARM64MOVBstore(v) 209 case OpARM64MOVBstoreidx: 210 return rewriteValueARM64_OpARM64MOVBstoreidx(v) 211 case OpARM64MOVBstorezero: 212 return rewriteValueARM64_OpARM64MOVBstorezero(v) 213 case OpARM64MOVBstorezeroidx: 214 return rewriteValueARM64_OpARM64MOVBstorezeroidx(v) 215 case OpARM64MOVDload: 216 return rewriteValueARM64_OpARM64MOVDload(v) 217 case OpARM64MOVDloadidx: 218 return rewriteValueARM64_OpARM64MOVDloadidx(v) 219 case OpARM64MOVDloadidx8: 220 return rewriteValueARM64_OpARM64MOVDloadidx8(v) 221 case OpARM64MOVDnop: 222 return rewriteValueARM64_OpARM64MOVDnop(v) 223 case OpARM64MOVDreg: 224 return rewriteValueARM64_OpARM64MOVDreg(v) 225 case OpARM64MOVDstore: 226 return rewriteValueARM64_OpARM64MOVDstore(v) 227 case OpARM64MOVDstoreidx: 228 return rewriteValueARM64_OpARM64MOVDstoreidx(v) 229 case OpARM64MOVDstoreidx8: 230 return rewriteValueARM64_OpARM64MOVDstoreidx8(v) 231 case OpARM64MOVDstorezero: 232 return rewriteValueARM64_OpARM64MOVDstorezero(v) 233 case OpARM64MOVDstorezeroidx: 234 return rewriteValueARM64_OpARM64MOVDstorezeroidx(v) 235 case OpARM64MOVDstorezeroidx8: 236 return rewriteValueARM64_OpARM64MOVDstorezeroidx8(v) 237 case OpARM64MOVHUload: 238 return rewriteValueARM64_OpARM64MOVHUload(v) 239 case OpARM64MOVHUloadidx: 240 return rewriteValueARM64_OpARM64MOVHUloadidx(v) 241 case OpARM64MOVHUloadidx2: 242 return rewriteValueARM64_OpARM64MOVHUloadidx2(v) 243 case OpARM64MOVHUreg: 244 return rewriteValueARM64_OpARM64MOVHUreg(v) 245 case OpARM64MOVHload: 246 return rewriteValueARM64_OpARM64MOVHload(v) 247 case OpARM64MOVHloadidx: 248 return rewriteValueARM64_OpARM64MOVHloadidx(v) 249 case OpARM64MOVHloadidx2: 250 return rewriteValueARM64_OpARM64MOVHloadidx2(v) 251 case OpARM64MOVHreg: 252 return rewriteValueARM64_OpARM64MOVHreg(v) 253 case OpARM64MOVHstore: 254 return rewriteValueARM64_OpARM64MOVHstore(v) 255 case OpARM64MOVHstoreidx: 256 return rewriteValueARM64_OpARM64MOVHstoreidx(v) 257 case OpARM64MOVHstoreidx2: 258 return rewriteValueARM64_OpARM64MOVHstoreidx2(v) 259 case OpARM64MOVHstorezero: 260 return rewriteValueARM64_OpARM64MOVHstorezero(v) 261 case OpARM64MOVHstorezeroidx: 262 return rewriteValueARM64_OpARM64MOVHstorezeroidx(v) 263 case OpARM64MOVHstorezeroidx2: 264 return rewriteValueARM64_OpARM64MOVHstorezeroidx2(v) 265 case OpARM64MOVQstorezero: 266 return rewriteValueARM64_OpARM64MOVQstorezero(v) 267 case OpARM64MOVWUload: 268 return rewriteValueARM64_OpARM64MOVWUload(v) 269 case OpARM64MOVWUloadidx: 270 return rewriteValueARM64_OpARM64MOVWUloadidx(v) 271 case OpARM64MOVWUloadidx4: 272 return rewriteValueARM64_OpARM64MOVWUloadidx4(v) 273 case OpARM64MOVWUreg: 274 return rewriteValueARM64_OpARM64MOVWUreg(v) 275 case OpARM64MOVWload: 276 return rewriteValueARM64_OpARM64MOVWload(v) 277 case OpARM64MOVWloadidx: 278 return rewriteValueARM64_OpARM64MOVWloadidx(v) 279 case OpARM64MOVWloadidx4: 280 return rewriteValueARM64_OpARM64MOVWloadidx4(v) 281 case OpARM64MOVWreg: 282 return rewriteValueARM64_OpARM64MOVWreg(v) 283 case OpARM64MOVWstore: 284 return rewriteValueARM64_OpARM64MOVWstore(v) 285 case OpARM64MOVWstoreidx: 286 return rewriteValueARM64_OpARM64MOVWstoreidx(v) 287 case OpARM64MOVWstoreidx4: 288 return rewriteValueARM64_OpARM64MOVWstoreidx4(v) 289 case OpARM64MOVWstorezero: 290 return rewriteValueARM64_OpARM64MOVWstorezero(v) 291 case OpARM64MOVWstorezeroidx: 292 return rewriteValueARM64_OpARM64MOVWstorezeroidx(v) 293 case OpARM64MOVWstorezeroidx4: 294 return rewriteValueARM64_OpARM64MOVWstorezeroidx4(v) 295 case OpARM64MSUB: 296 return rewriteValueARM64_OpARM64MSUB(v) 297 case OpARM64MSUBW: 298 return rewriteValueARM64_OpARM64MSUBW(v) 299 case OpARM64MUL: 300 return rewriteValueARM64_OpARM64MUL(v) 301 case OpARM64MULW: 302 return rewriteValueARM64_OpARM64MULW(v) 303 case OpARM64MVN: 304 return rewriteValueARM64_OpARM64MVN(v) 305 case OpARM64MVNshiftLL: 306 return rewriteValueARM64_OpARM64MVNshiftLL(v) 307 case OpARM64MVNshiftRA: 308 return rewriteValueARM64_OpARM64MVNshiftRA(v) 309 case OpARM64MVNshiftRL: 310 return rewriteValueARM64_OpARM64MVNshiftRL(v) 311 case OpARM64MVNshiftRO: 312 return rewriteValueARM64_OpARM64MVNshiftRO(v) 313 case OpARM64NEG: 314 return rewriteValueARM64_OpARM64NEG(v) 315 case OpARM64NEGshiftLL: 316 return rewriteValueARM64_OpARM64NEGshiftLL(v) 317 case OpARM64NEGshiftRA: 318 return rewriteValueARM64_OpARM64NEGshiftRA(v) 319 case OpARM64NEGshiftRL: 320 return rewriteValueARM64_OpARM64NEGshiftRL(v) 321 case OpARM64NotEqual: 322 return rewriteValueARM64_OpARM64NotEqual(v) 323 case OpARM64OR: 324 return rewriteValueARM64_OpARM64OR(v) 325 case OpARM64ORN: 326 return rewriteValueARM64_OpARM64ORN(v) 327 case OpARM64ORNshiftLL: 328 return rewriteValueARM64_OpARM64ORNshiftLL(v) 329 case OpARM64ORNshiftRA: 330 return rewriteValueARM64_OpARM64ORNshiftRA(v) 331 case OpARM64ORNshiftRL: 332 return rewriteValueARM64_OpARM64ORNshiftRL(v) 333 case OpARM64ORNshiftRO: 334 return rewriteValueARM64_OpARM64ORNshiftRO(v) 335 case OpARM64ORconst: 336 return rewriteValueARM64_OpARM64ORconst(v) 337 case OpARM64ORshiftLL: 338 return rewriteValueARM64_OpARM64ORshiftLL(v) 339 case OpARM64ORshiftRA: 340 return rewriteValueARM64_OpARM64ORshiftRA(v) 341 case OpARM64ORshiftRL: 342 return rewriteValueARM64_OpARM64ORshiftRL(v) 343 case OpARM64ORshiftRO: 344 return rewriteValueARM64_OpARM64ORshiftRO(v) 345 case OpARM64REV: 346 return rewriteValueARM64_OpARM64REV(v) 347 case OpARM64REVW: 348 return rewriteValueARM64_OpARM64REVW(v) 349 case OpARM64ROR: 350 return rewriteValueARM64_OpARM64ROR(v) 351 case OpARM64RORW: 352 return rewriteValueARM64_OpARM64RORW(v) 353 case OpARM64SBCSflags: 354 return rewriteValueARM64_OpARM64SBCSflags(v) 355 case OpARM64SLL: 356 return rewriteValueARM64_OpARM64SLL(v) 357 case OpARM64SLLconst: 358 return rewriteValueARM64_OpARM64SLLconst(v) 359 case OpARM64SRA: 360 return rewriteValueARM64_OpARM64SRA(v) 361 case OpARM64SRAconst: 362 return rewriteValueARM64_OpARM64SRAconst(v) 363 case OpARM64SRL: 364 return rewriteValueARM64_OpARM64SRL(v) 365 case OpARM64SRLconst: 366 return rewriteValueARM64_OpARM64SRLconst(v) 367 case OpARM64STP: 368 return rewriteValueARM64_OpARM64STP(v) 369 case OpARM64SUB: 370 return rewriteValueARM64_OpARM64SUB(v) 371 case OpARM64SUBconst: 372 return rewriteValueARM64_OpARM64SUBconst(v) 373 case OpARM64SUBshiftLL: 374 return rewriteValueARM64_OpARM64SUBshiftLL(v) 375 case OpARM64SUBshiftRA: 376 return rewriteValueARM64_OpARM64SUBshiftRA(v) 377 case OpARM64SUBshiftRL: 378 return rewriteValueARM64_OpARM64SUBshiftRL(v) 379 case OpARM64TST: 380 return rewriteValueARM64_OpARM64TST(v) 381 case OpARM64TSTW: 382 return rewriteValueARM64_OpARM64TSTW(v) 383 case OpARM64TSTWconst: 384 return rewriteValueARM64_OpARM64TSTWconst(v) 385 case OpARM64TSTconst: 386 return rewriteValueARM64_OpARM64TSTconst(v) 387 case OpARM64TSTshiftLL: 388 return rewriteValueARM64_OpARM64TSTshiftLL(v) 389 case OpARM64TSTshiftRA: 390 return rewriteValueARM64_OpARM64TSTshiftRA(v) 391 case OpARM64TSTshiftRL: 392 return rewriteValueARM64_OpARM64TSTshiftRL(v) 393 case OpARM64TSTshiftRO: 394 return rewriteValueARM64_OpARM64TSTshiftRO(v) 395 case OpARM64UBFIZ: 396 return rewriteValueARM64_OpARM64UBFIZ(v) 397 case OpARM64UBFX: 398 return rewriteValueARM64_OpARM64UBFX(v) 399 case OpARM64UDIV: 400 return rewriteValueARM64_OpARM64UDIV(v) 401 case OpARM64UDIVW: 402 return rewriteValueARM64_OpARM64UDIVW(v) 403 case OpARM64UMOD: 404 return rewriteValueARM64_OpARM64UMOD(v) 405 case OpARM64UMODW: 406 return rewriteValueARM64_OpARM64UMODW(v) 407 case OpARM64XOR: 408 return rewriteValueARM64_OpARM64XOR(v) 409 case OpARM64XORconst: 410 return rewriteValueARM64_OpARM64XORconst(v) 411 case OpARM64XORshiftLL: 412 return rewriteValueARM64_OpARM64XORshiftLL(v) 413 case OpARM64XORshiftRA: 414 return rewriteValueARM64_OpARM64XORshiftRA(v) 415 case OpARM64XORshiftRL: 416 return rewriteValueARM64_OpARM64XORshiftRL(v) 417 case OpARM64XORshiftRO: 418 return rewriteValueARM64_OpARM64XORshiftRO(v) 419 case OpAbs: 420 v.Op = OpARM64FABSD 421 return true 422 case OpAdd16: 423 v.Op = OpARM64ADD 424 return true 425 case OpAdd32: 426 v.Op = OpARM64ADD 427 return true 428 case OpAdd32F: 429 v.Op = OpARM64FADDS 430 return true 431 case OpAdd64: 432 v.Op = OpARM64ADD 433 return true 434 case OpAdd64F: 435 v.Op = OpARM64FADDD 436 return true 437 case OpAdd8: 438 v.Op = OpARM64ADD 439 return true 440 case OpAddPtr: 441 v.Op = OpARM64ADD 442 return true 443 case OpAddr: 444 return rewriteValueARM64_OpAddr(v) 445 case OpAnd16: 446 v.Op = OpARM64AND 447 return true 448 case OpAnd32: 449 v.Op = OpARM64AND 450 return true 451 case OpAnd64: 452 v.Op = OpARM64AND 453 return true 454 case OpAnd8: 455 v.Op = OpARM64AND 456 return true 457 case OpAndB: 458 v.Op = OpARM64AND 459 return true 460 case OpAtomicAdd32: 461 v.Op = OpARM64LoweredAtomicAdd32 462 return true 463 case OpAtomicAdd32Variant: 464 v.Op = OpARM64LoweredAtomicAdd32Variant 465 return true 466 case OpAtomicAdd64: 467 v.Op = OpARM64LoweredAtomicAdd64 468 return true 469 case OpAtomicAdd64Variant: 470 v.Op = OpARM64LoweredAtomicAdd64Variant 471 return true 472 case OpAtomicAnd32: 473 return rewriteValueARM64_OpAtomicAnd32(v) 474 case OpAtomicAnd32Variant: 475 return rewriteValueARM64_OpAtomicAnd32Variant(v) 476 case OpAtomicAnd8: 477 return rewriteValueARM64_OpAtomicAnd8(v) 478 case OpAtomicAnd8Variant: 479 return rewriteValueARM64_OpAtomicAnd8Variant(v) 480 case OpAtomicCompareAndSwap32: 481 v.Op = OpARM64LoweredAtomicCas32 482 return true 483 case OpAtomicCompareAndSwap32Variant: 484 v.Op = OpARM64LoweredAtomicCas32Variant 485 return true 486 case OpAtomicCompareAndSwap64: 487 v.Op = OpARM64LoweredAtomicCas64 488 return true 489 case OpAtomicCompareAndSwap64Variant: 490 v.Op = OpARM64LoweredAtomicCas64Variant 491 return true 492 case OpAtomicExchange32: 493 v.Op = OpARM64LoweredAtomicExchange32 494 return true 495 case OpAtomicExchange32Variant: 496 v.Op = OpARM64LoweredAtomicExchange32Variant 497 return true 498 case OpAtomicExchange64: 499 v.Op = OpARM64LoweredAtomicExchange64 500 return true 501 case OpAtomicExchange64Variant: 502 v.Op = OpARM64LoweredAtomicExchange64Variant 503 return true 504 case OpAtomicLoad32: 505 v.Op = OpARM64LDARW 506 return true 507 case OpAtomicLoad64: 508 v.Op = OpARM64LDAR 509 return true 510 case OpAtomicLoad8: 511 v.Op = OpARM64LDARB 512 return true 513 case OpAtomicLoadPtr: 514 v.Op = OpARM64LDAR 515 return true 516 case OpAtomicOr32: 517 return rewriteValueARM64_OpAtomicOr32(v) 518 case OpAtomicOr32Variant: 519 return rewriteValueARM64_OpAtomicOr32Variant(v) 520 case OpAtomicOr8: 521 return rewriteValueARM64_OpAtomicOr8(v) 522 case OpAtomicOr8Variant: 523 return rewriteValueARM64_OpAtomicOr8Variant(v) 524 case OpAtomicStore32: 525 v.Op = OpARM64STLRW 526 return true 527 case OpAtomicStore64: 528 v.Op = OpARM64STLR 529 return true 530 case OpAtomicStore8: 531 v.Op = OpARM64STLRB 532 return true 533 case OpAtomicStorePtrNoWB: 534 v.Op = OpARM64STLR 535 return true 536 case OpAvg64u: 537 return rewriteValueARM64_OpAvg64u(v) 538 case OpBitLen32: 539 return rewriteValueARM64_OpBitLen32(v) 540 case OpBitLen64: 541 return rewriteValueARM64_OpBitLen64(v) 542 case OpBitRev16: 543 return rewriteValueARM64_OpBitRev16(v) 544 case OpBitRev32: 545 v.Op = OpARM64RBITW 546 return true 547 case OpBitRev64: 548 v.Op = OpARM64RBIT 549 return true 550 case OpBitRev8: 551 return rewriteValueARM64_OpBitRev8(v) 552 case OpBswap16: 553 v.Op = OpARM64REV16W 554 return true 555 case OpBswap32: 556 v.Op = OpARM64REVW 557 return true 558 case OpBswap64: 559 v.Op = OpARM64REV 560 return true 561 case OpCeil: 562 v.Op = OpARM64FRINTPD 563 return true 564 case OpClosureCall: 565 v.Op = OpARM64CALLclosure 566 return true 567 case OpCom16: 568 v.Op = OpARM64MVN 569 return true 570 case OpCom32: 571 v.Op = OpARM64MVN 572 return true 573 case OpCom64: 574 v.Op = OpARM64MVN 575 return true 576 case OpCom8: 577 v.Op = OpARM64MVN 578 return true 579 case OpCondSelect: 580 return rewriteValueARM64_OpCondSelect(v) 581 case OpConst16: 582 return rewriteValueARM64_OpConst16(v) 583 case OpConst32: 584 return rewriteValueARM64_OpConst32(v) 585 case OpConst32F: 586 return rewriteValueARM64_OpConst32F(v) 587 case OpConst64: 588 return rewriteValueARM64_OpConst64(v) 589 case OpConst64F: 590 return rewriteValueARM64_OpConst64F(v) 591 case OpConst8: 592 return rewriteValueARM64_OpConst8(v) 593 case OpConstBool: 594 return rewriteValueARM64_OpConstBool(v) 595 case OpConstNil: 596 return rewriteValueARM64_OpConstNil(v) 597 case OpCtz16: 598 return rewriteValueARM64_OpCtz16(v) 599 case OpCtz16NonZero: 600 v.Op = OpCtz32 601 return true 602 case OpCtz32: 603 return rewriteValueARM64_OpCtz32(v) 604 case OpCtz32NonZero: 605 v.Op = OpCtz32 606 return true 607 case OpCtz64: 608 return rewriteValueARM64_OpCtz64(v) 609 case OpCtz64NonZero: 610 v.Op = OpCtz64 611 return true 612 case OpCtz8: 613 return rewriteValueARM64_OpCtz8(v) 614 case OpCtz8NonZero: 615 v.Op = OpCtz32 616 return true 617 case OpCvt32Fto32: 618 v.Op = OpARM64FCVTZSSW 619 return true 620 case OpCvt32Fto32U: 621 v.Op = OpARM64FCVTZUSW 622 return true 623 case OpCvt32Fto64: 624 v.Op = OpARM64FCVTZSS 625 return true 626 case OpCvt32Fto64F: 627 v.Op = OpARM64FCVTSD 628 return true 629 case OpCvt32Fto64U: 630 v.Op = OpARM64FCVTZUS 631 return true 632 case OpCvt32Uto32F: 633 v.Op = OpARM64UCVTFWS 634 return true 635 case OpCvt32Uto64F: 636 v.Op = OpARM64UCVTFWD 637 return true 638 case OpCvt32to32F: 639 v.Op = OpARM64SCVTFWS 640 return true 641 case OpCvt32to64F: 642 v.Op = OpARM64SCVTFWD 643 return true 644 case OpCvt64Fto32: 645 v.Op = OpARM64FCVTZSDW 646 return true 647 case OpCvt64Fto32F: 648 v.Op = OpARM64FCVTDS 649 return true 650 case OpCvt64Fto32U: 651 v.Op = OpARM64FCVTZUDW 652 return true 653 case OpCvt64Fto64: 654 v.Op = OpARM64FCVTZSD 655 return true 656 case OpCvt64Fto64U: 657 v.Op = OpARM64FCVTZUD 658 return true 659 case OpCvt64Uto32F: 660 v.Op = OpARM64UCVTFS 661 return true 662 case OpCvt64Uto64F: 663 v.Op = OpARM64UCVTFD 664 return true 665 case OpCvt64to32F: 666 v.Op = OpARM64SCVTFS 667 return true 668 case OpCvt64to64F: 669 v.Op = OpARM64SCVTFD 670 return true 671 case OpCvtBoolToUint8: 672 v.Op = OpCopy 673 return true 674 case OpDiv16: 675 return rewriteValueARM64_OpDiv16(v) 676 case OpDiv16u: 677 return rewriteValueARM64_OpDiv16u(v) 678 case OpDiv32: 679 return rewriteValueARM64_OpDiv32(v) 680 case OpDiv32F: 681 v.Op = OpARM64FDIVS 682 return true 683 case OpDiv32u: 684 v.Op = OpARM64UDIVW 685 return true 686 case OpDiv64: 687 return rewriteValueARM64_OpDiv64(v) 688 case OpDiv64F: 689 v.Op = OpARM64FDIVD 690 return true 691 case OpDiv64u: 692 v.Op = OpARM64UDIV 693 return true 694 case OpDiv8: 695 return rewriteValueARM64_OpDiv8(v) 696 case OpDiv8u: 697 return rewriteValueARM64_OpDiv8u(v) 698 case OpEq16: 699 return rewriteValueARM64_OpEq16(v) 700 case OpEq32: 701 return rewriteValueARM64_OpEq32(v) 702 case OpEq32F: 703 return rewriteValueARM64_OpEq32F(v) 704 case OpEq64: 705 return rewriteValueARM64_OpEq64(v) 706 case OpEq64F: 707 return rewriteValueARM64_OpEq64F(v) 708 case OpEq8: 709 return rewriteValueARM64_OpEq8(v) 710 case OpEqB: 711 return rewriteValueARM64_OpEqB(v) 712 case OpEqPtr: 713 return rewriteValueARM64_OpEqPtr(v) 714 case OpFMA: 715 return rewriteValueARM64_OpFMA(v) 716 case OpFloor: 717 v.Op = OpARM64FRINTMD 718 return true 719 case OpGetCallerPC: 720 v.Op = OpARM64LoweredGetCallerPC 721 return true 722 case OpGetCallerSP: 723 v.Op = OpARM64LoweredGetCallerSP 724 return true 725 case OpGetClosurePtr: 726 v.Op = OpARM64LoweredGetClosurePtr 727 return true 728 case OpHmul32: 729 return rewriteValueARM64_OpHmul32(v) 730 case OpHmul32u: 731 return rewriteValueARM64_OpHmul32u(v) 732 case OpHmul64: 733 v.Op = OpARM64MULH 734 return true 735 case OpHmul64u: 736 v.Op = OpARM64UMULH 737 return true 738 case OpInterCall: 739 v.Op = OpARM64CALLinter 740 return true 741 case OpIsInBounds: 742 return rewriteValueARM64_OpIsInBounds(v) 743 case OpIsNonNil: 744 return rewriteValueARM64_OpIsNonNil(v) 745 case OpIsSliceInBounds: 746 return rewriteValueARM64_OpIsSliceInBounds(v) 747 case OpLeq16: 748 return rewriteValueARM64_OpLeq16(v) 749 case OpLeq16U: 750 return rewriteValueARM64_OpLeq16U(v) 751 case OpLeq32: 752 return rewriteValueARM64_OpLeq32(v) 753 case OpLeq32F: 754 return rewriteValueARM64_OpLeq32F(v) 755 case OpLeq32U: 756 return rewriteValueARM64_OpLeq32U(v) 757 case OpLeq64: 758 return rewriteValueARM64_OpLeq64(v) 759 case OpLeq64F: 760 return rewriteValueARM64_OpLeq64F(v) 761 case OpLeq64U: 762 return rewriteValueARM64_OpLeq64U(v) 763 case OpLeq8: 764 return rewriteValueARM64_OpLeq8(v) 765 case OpLeq8U: 766 return rewriteValueARM64_OpLeq8U(v) 767 case OpLess16: 768 return rewriteValueARM64_OpLess16(v) 769 case OpLess16U: 770 return rewriteValueARM64_OpLess16U(v) 771 case OpLess32: 772 return rewriteValueARM64_OpLess32(v) 773 case OpLess32F: 774 return rewriteValueARM64_OpLess32F(v) 775 case OpLess32U: 776 return rewriteValueARM64_OpLess32U(v) 777 case OpLess64: 778 return rewriteValueARM64_OpLess64(v) 779 case OpLess64F: 780 return rewriteValueARM64_OpLess64F(v) 781 case OpLess64U: 782 return rewriteValueARM64_OpLess64U(v) 783 case OpLess8: 784 return rewriteValueARM64_OpLess8(v) 785 case OpLess8U: 786 return rewriteValueARM64_OpLess8U(v) 787 case OpLoad: 788 return rewriteValueARM64_OpLoad(v) 789 case OpLocalAddr: 790 return rewriteValueARM64_OpLocalAddr(v) 791 case OpLsh16x16: 792 return rewriteValueARM64_OpLsh16x16(v) 793 case OpLsh16x32: 794 return rewriteValueARM64_OpLsh16x32(v) 795 case OpLsh16x64: 796 return rewriteValueARM64_OpLsh16x64(v) 797 case OpLsh16x8: 798 return rewriteValueARM64_OpLsh16x8(v) 799 case OpLsh32x16: 800 return rewriteValueARM64_OpLsh32x16(v) 801 case OpLsh32x32: 802 return rewriteValueARM64_OpLsh32x32(v) 803 case OpLsh32x64: 804 return rewriteValueARM64_OpLsh32x64(v) 805 case OpLsh32x8: 806 return rewriteValueARM64_OpLsh32x8(v) 807 case OpLsh64x16: 808 return rewriteValueARM64_OpLsh64x16(v) 809 case OpLsh64x32: 810 return rewriteValueARM64_OpLsh64x32(v) 811 case OpLsh64x64: 812 return rewriteValueARM64_OpLsh64x64(v) 813 case OpLsh64x8: 814 return rewriteValueARM64_OpLsh64x8(v) 815 case OpLsh8x16: 816 return rewriteValueARM64_OpLsh8x16(v) 817 case OpLsh8x32: 818 return rewriteValueARM64_OpLsh8x32(v) 819 case OpLsh8x64: 820 return rewriteValueARM64_OpLsh8x64(v) 821 case OpLsh8x8: 822 return rewriteValueARM64_OpLsh8x8(v) 823 case OpMax32F: 824 v.Op = OpARM64FMAXS 825 return true 826 case OpMax64F: 827 v.Op = OpARM64FMAXD 828 return true 829 case OpMin32F: 830 v.Op = OpARM64FMINS 831 return true 832 case OpMin64F: 833 v.Op = OpARM64FMIND 834 return true 835 case OpMod16: 836 return rewriteValueARM64_OpMod16(v) 837 case OpMod16u: 838 return rewriteValueARM64_OpMod16u(v) 839 case OpMod32: 840 return rewriteValueARM64_OpMod32(v) 841 case OpMod32u: 842 v.Op = OpARM64UMODW 843 return true 844 case OpMod64: 845 return rewriteValueARM64_OpMod64(v) 846 case OpMod64u: 847 v.Op = OpARM64UMOD 848 return true 849 case OpMod8: 850 return rewriteValueARM64_OpMod8(v) 851 case OpMod8u: 852 return rewriteValueARM64_OpMod8u(v) 853 case OpMove: 854 return rewriteValueARM64_OpMove(v) 855 case OpMul16: 856 v.Op = OpARM64MULW 857 return true 858 case OpMul32: 859 v.Op = OpARM64MULW 860 return true 861 case OpMul32F: 862 v.Op = OpARM64FMULS 863 return true 864 case OpMul64: 865 v.Op = OpARM64MUL 866 return true 867 case OpMul64F: 868 v.Op = OpARM64FMULD 869 return true 870 case OpMul8: 871 v.Op = OpARM64MULW 872 return true 873 case OpNeg16: 874 v.Op = OpARM64NEG 875 return true 876 case OpNeg32: 877 v.Op = OpARM64NEG 878 return true 879 case OpNeg32F: 880 v.Op = OpARM64FNEGS 881 return true 882 case OpNeg64: 883 v.Op = OpARM64NEG 884 return true 885 case OpNeg64F: 886 v.Op = OpARM64FNEGD 887 return true 888 case OpNeg8: 889 v.Op = OpARM64NEG 890 return true 891 case OpNeq16: 892 return rewriteValueARM64_OpNeq16(v) 893 case OpNeq32: 894 return rewriteValueARM64_OpNeq32(v) 895 case OpNeq32F: 896 return rewriteValueARM64_OpNeq32F(v) 897 case OpNeq64: 898 return rewriteValueARM64_OpNeq64(v) 899 case OpNeq64F: 900 return rewriteValueARM64_OpNeq64F(v) 901 case OpNeq8: 902 return rewriteValueARM64_OpNeq8(v) 903 case OpNeqB: 904 v.Op = OpARM64XOR 905 return true 906 case OpNeqPtr: 907 return rewriteValueARM64_OpNeqPtr(v) 908 case OpNilCheck: 909 v.Op = OpARM64LoweredNilCheck 910 return true 911 case OpNot: 912 return rewriteValueARM64_OpNot(v) 913 case OpOffPtr: 914 return rewriteValueARM64_OpOffPtr(v) 915 case OpOr16: 916 v.Op = OpARM64OR 917 return true 918 case OpOr32: 919 v.Op = OpARM64OR 920 return true 921 case OpOr64: 922 v.Op = OpARM64OR 923 return true 924 case OpOr8: 925 v.Op = OpARM64OR 926 return true 927 case OpOrB: 928 v.Op = OpARM64OR 929 return true 930 case OpPanicBounds: 931 return rewriteValueARM64_OpPanicBounds(v) 932 case OpPopCount16: 933 return rewriteValueARM64_OpPopCount16(v) 934 case OpPopCount32: 935 return rewriteValueARM64_OpPopCount32(v) 936 case OpPopCount64: 937 return rewriteValueARM64_OpPopCount64(v) 938 case OpPrefetchCache: 939 return rewriteValueARM64_OpPrefetchCache(v) 940 case OpPrefetchCacheStreamed: 941 return rewriteValueARM64_OpPrefetchCacheStreamed(v) 942 case OpPubBarrier: 943 return rewriteValueARM64_OpPubBarrier(v) 944 case OpRotateLeft16: 945 return rewriteValueARM64_OpRotateLeft16(v) 946 case OpRotateLeft32: 947 return rewriteValueARM64_OpRotateLeft32(v) 948 case OpRotateLeft64: 949 return rewriteValueARM64_OpRotateLeft64(v) 950 case OpRotateLeft8: 951 return rewriteValueARM64_OpRotateLeft8(v) 952 case OpRound: 953 v.Op = OpARM64FRINTAD 954 return true 955 case OpRound32F: 956 v.Op = OpARM64LoweredRound32F 957 return true 958 case OpRound64F: 959 v.Op = OpARM64LoweredRound64F 960 return true 961 case OpRoundToEven: 962 v.Op = OpARM64FRINTND 963 return true 964 case OpRsh16Ux16: 965 return rewriteValueARM64_OpRsh16Ux16(v) 966 case OpRsh16Ux32: 967 return rewriteValueARM64_OpRsh16Ux32(v) 968 case OpRsh16Ux64: 969 return rewriteValueARM64_OpRsh16Ux64(v) 970 case OpRsh16Ux8: 971 return rewriteValueARM64_OpRsh16Ux8(v) 972 case OpRsh16x16: 973 return rewriteValueARM64_OpRsh16x16(v) 974 case OpRsh16x32: 975 return rewriteValueARM64_OpRsh16x32(v) 976 case OpRsh16x64: 977 return rewriteValueARM64_OpRsh16x64(v) 978 case OpRsh16x8: 979 return rewriteValueARM64_OpRsh16x8(v) 980 case OpRsh32Ux16: 981 return rewriteValueARM64_OpRsh32Ux16(v) 982 case OpRsh32Ux32: 983 return rewriteValueARM64_OpRsh32Ux32(v) 984 case OpRsh32Ux64: 985 return rewriteValueARM64_OpRsh32Ux64(v) 986 case OpRsh32Ux8: 987 return rewriteValueARM64_OpRsh32Ux8(v) 988 case OpRsh32x16: 989 return rewriteValueARM64_OpRsh32x16(v) 990 case OpRsh32x32: 991 return rewriteValueARM64_OpRsh32x32(v) 992 case OpRsh32x64: 993 return rewriteValueARM64_OpRsh32x64(v) 994 case OpRsh32x8: 995 return rewriteValueARM64_OpRsh32x8(v) 996 case OpRsh64Ux16: 997 return rewriteValueARM64_OpRsh64Ux16(v) 998 case OpRsh64Ux32: 999 return rewriteValueARM64_OpRsh64Ux32(v) 1000 case OpRsh64Ux64: 1001 return rewriteValueARM64_OpRsh64Ux64(v) 1002 case OpRsh64Ux8: 1003 return rewriteValueARM64_OpRsh64Ux8(v) 1004 case OpRsh64x16: 1005 return rewriteValueARM64_OpRsh64x16(v) 1006 case OpRsh64x32: 1007 return rewriteValueARM64_OpRsh64x32(v) 1008 case OpRsh64x64: 1009 return rewriteValueARM64_OpRsh64x64(v) 1010 case OpRsh64x8: 1011 return rewriteValueARM64_OpRsh64x8(v) 1012 case OpRsh8Ux16: 1013 return rewriteValueARM64_OpRsh8Ux16(v) 1014 case OpRsh8Ux32: 1015 return rewriteValueARM64_OpRsh8Ux32(v) 1016 case OpRsh8Ux64: 1017 return rewriteValueARM64_OpRsh8Ux64(v) 1018 case OpRsh8Ux8: 1019 return rewriteValueARM64_OpRsh8Ux8(v) 1020 case OpRsh8x16: 1021 return rewriteValueARM64_OpRsh8x16(v) 1022 case OpRsh8x32: 1023 return rewriteValueARM64_OpRsh8x32(v) 1024 case OpRsh8x64: 1025 return rewriteValueARM64_OpRsh8x64(v) 1026 case OpRsh8x8: 1027 return rewriteValueARM64_OpRsh8x8(v) 1028 case OpSelect0: 1029 return rewriteValueARM64_OpSelect0(v) 1030 case OpSelect1: 1031 return rewriteValueARM64_OpSelect1(v) 1032 case OpSelectN: 1033 return rewriteValueARM64_OpSelectN(v) 1034 case OpSignExt16to32: 1035 v.Op = OpARM64MOVHreg 1036 return true 1037 case OpSignExt16to64: 1038 v.Op = OpARM64MOVHreg 1039 return true 1040 case OpSignExt32to64: 1041 v.Op = OpARM64MOVWreg 1042 return true 1043 case OpSignExt8to16: 1044 v.Op = OpARM64MOVBreg 1045 return true 1046 case OpSignExt8to32: 1047 v.Op = OpARM64MOVBreg 1048 return true 1049 case OpSignExt8to64: 1050 v.Op = OpARM64MOVBreg 1051 return true 1052 case OpSlicemask: 1053 return rewriteValueARM64_OpSlicemask(v) 1054 case OpSqrt: 1055 v.Op = OpARM64FSQRTD 1056 return true 1057 case OpSqrt32: 1058 v.Op = OpARM64FSQRTS 1059 return true 1060 case OpStaticCall: 1061 v.Op = OpARM64CALLstatic 1062 return true 1063 case OpStore: 1064 return rewriteValueARM64_OpStore(v) 1065 case OpSub16: 1066 v.Op = OpARM64SUB 1067 return true 1068 case OpSub32: 1069 v.Op = OpARM64SUB 1070 return true 1071 case OpSub32F: 1072 v.Op = OpARM64FSUBS 1073 return true 1074 case OpSub64: 1075 v.Op = OpARM64SUB 1076 return true 1077 case OpSub64F: 1078 v.Op = OpARM64FSUBD 1079 return true 1080 case OpSub8: 1081 v.Op = OpARM64SUB 1082 return true 1083 case OpSubPtr: 1084 v.Op = OpARM64SUB 1085 return true 1086 case OpTailCall: 1087 v.Op = OpARM64CALLtail 1088 return true 1089 case OpTrunc: 1090 v.Op = OpARM64FRINTZD 1091 return true 1092 case OpTrunc16to8: 1093 v.Op = OpCopy 1094 return true 1095 case OpTrunc32to16: 1096 v.Op = OpCopy 1097 return true 1098 case OpTrunc32to8: 1099 v.Op = OpCopy 1100 return true 1101 case OpTrunc64to16: 1102 v.Op = OpCopy 1103 return true 1104 case OpTrunc64to32: 1105 v.Op = OpCopy 1106 return true 1107 case OpTrunc64to8: 1108 v.Op = OpCopy 1109 return true 1110 case OpWB: 1111 v.Op = OpARM64LoweredWB 1112 return true 1113 case OpXor16: 1114 v.Op = OpARM64XOR 1115 return true 1116 case OpXor32: 1117 v.Op = OpARM64XOR 1118 return true 1119 case OpXor64: 1120 v.Op = OpARM64XOR 1121 return true 1122 case OpXor8: 1123 v.Op = OpARM64XOR 1124 return true 1125 case OpZero: 1126 return rewriteValueARM64_OpZero(v) 1127 case OpZeroExt16to32: 1128 v.Op = OpARM64MOVHUreg 1129 return true 1130 case OpZeroExt16to64: 1131 v.Op = OpARM64MOVHUreg 1132 return true 1133 case OpZeroExt32to64: 1134 v.Op = OpARM64MOVWUreg 1135 return true 1136 case OpZeroExt8to16: 1137 v.Op = OpARM64MOVBUreg 1138 return true 1139 case OpZeroExt8to32: 1140 v.Op = OpARM64MOVBUreg 1141 return true 1142 case OpZeroExt8to64: 1143 v.Op = OpARM64MOVBUreg 1144 return true 1145 } 1146 return false 1147 } 1148 func rewriteValueARM64_OpARM64ADCSflags(v *Value) bool { 1149 v_2 := v.Args[2] 1150 v_1 := v.Args[1] 1151 v_0 := v.Args[0] 1152 b := v.Block 1153 typ := &b.Func.Config.Types 1154 // match: (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] (ADCzerocarry <typ.UInt64> c)))) 1155 // result: (ADCSflags x y c) 1156 for { 1157 x := v_0 1158 y := v_1 1159 if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags { 1160 break 1161 } 1162 v_2_0 := v_2.Args[0] 1163 if v_2_0.Op != OpARM64ADDSconstflags || auxIntToInt64(v_2_0.AuxInt) != -1 { 1164 break 1165 } 1166 v_2_0_0 := v_2_0.Args[0] 1167 if v_2_0_0.Op != OpARM64ADCzerocarry || v_2_0_0.Type != typ.UInt64 { 1168 break 1169 } 1170 c := v_2_0_0.Args[0] 1171 v.reset(OpARM64ADCSflags) 1172 v.AddArg3(x, y, c) 1173 return true 1174 } 1175 // match: (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] (MOVDconst [0])))) 1176 // result: (ADDSflags x y) 1177 for { 1178 x := v_0 1179 y := v_1 1180 if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags { 1181 break 1182 } 1183 v_2_0 := v_2.Args[0] 1184 if v_2_0.Op != OpARM64ADDSconstflags || auxIntToInt64(v_2_0.AuxInt) != -1 { 1185 break 1186 } 1187 v_2_0_0 := v_2_0.Args[0] 1188 if v_2_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_2_0_0.AuxInt) != 0 { 1189 break 1190 } 1191 v.reset(OpARM64ADDSflags) 1192 v.AddArg2(x, y) 1193 return true 1194 } 1195 return false 1196 } 1197 func rewriteValueARM64_OpARM64ADD(v *Value) bool { 1198 v_1 := v.Args[1] 1199 v_0 := v.Args[0] 1200 // match: (ADD x (MOVDconst <t> [c])) 1201 // cond: !t.IsPtr() 1202 // result: (ADDconst [c] x) 1203 for { 1204 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1205 x := v_0 1206 if v_1.Op != OpARM64MOVDconst { 1207 continue 1208 } 1209 t := v_1.Type 1210 c := auxIntToInt64(v_1.AuxInt) 1211 if !(!t.IsPtr()) { 1212 continue 1213 } 1214 v.reset(OpARM64ADDconst) 1215 v.AuxInt = int64ToAuxInt(c) 1216 v.AddArg(x) 1217 return true 1218 } 1219 break 1220 } 1221 // match: (ADD a l:(MUL x y)) 1222 // cond: l.Uses==1 && clobber(l) 1223 // result: (MADD a x y) 1224 for { 1225 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1226 a := v_0 1227 l := v_1 1228 if l.Op != OpARM64MUL { 1229 continue 1230 } 1231 y := l.Args[1] 1232 x := l.Args[0] 1233 if !(l.Uses == 1 && clobber(l)) { 1234 continue 1235 } 1236 v.reset(OpARM64MADD) 1237 v.AddArg3(a, x, y) 1238 return true 1239 } 1240 break 1241 } 1242 // match: (ADD a l:(MNEG x y)) 1243 // cond: l.Uses==1 && clobber(l) 1244 // result: (MSUB a x y) 1245 for { 1246 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1247 a := v_0 1248 l := v_1 1249 if l.Op != OpARM64MNEG { 1250 continue 1251 } 1252 y := l.Args[1] 1253 x := l.Args[0] 1254 if !(l.Uses == 1 && clobber(l)) { 1255 continue 1256 } 1257 v.reset(OpARM64MSUB) 1258 v.AddArg3(a, x, y) 1259 return true 1260 } 1261 break 1262 } 1263 // match: (ADD a l:(MULW x y)) 1264 // cond: v.Type.Size() <= 4 && l.Uses==1 && clobber(l) 1265 // result: (MADDW a x y) 1266 for { 1267 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1268 a := v_0 1269 l := v_1 1270 if l.Op != OpARM64MULW { 1271 continue 1272 } 1273 y := l.Args[1] 1274 x := l.Args[0] 1275 if !(v.Type.Size() <= 4 && l.Uses == 1 && clobber(l)) { 1276 continue 1277 } 1278 v.reset(OpARM64MADDW) 1279 v.AddArg3(a, x, y) 1280 return true 1281 } 1282 break 1283 } 1284 // match: (ADD a l:(MNEGW x y)) 1285 // cond: v.Type.Size() <= 4 && l.Uses==1 && clobber(l) 1286 // result: (MSUBW a x y) 1287 for { 1288 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1289 a := v_0 1290 l := v_1 1291 if l.Op != OpARM64MNEGW { 1292 continue 1293 } 1294 y := l.Args[1] 1295 x := l.Args[0] 1296 if !(v.Type.Size() <= 4 && l.Uses == 1 && clobber(l)) { 1297 continue 1298 } 1299 v.reset(OpARM64MSUBW) 1300 v.AddArg3(a, x, y) 1301 return true 1302 } 1303 break 1304 } 1305 // match: (ADD x (NEG y)) 1306 // result: (SUB x y) 1307 for { 1308 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1309 x := v_0 1310 if v_1.Op != OpARM64NEG { 1311 continue 1312 } 1313 y := v_1.Args[0] 1314 v.reset(OpARM64SUB) 1315 v.AddArg2(x, y) 1316 return true 1317 } 1318 break 1319 } 1320 // match: (ADD x0 x1:(SLLconst [c] y)) 1321 // cond: clobberIfDead(x1) 1322 // result: (ADDshiftLL x0 y [c]) 1323 for { 1324 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1325 x0 := v_0 1326 x1 := v_1 1327 if x1.Op != OpARM64SLLconst { 1328 continue 1329 } 1330 c := auxIntToInt64(x1.AuxInt) 1331 y := x1.Args[0] 1332 if !(clobberIfDead(x1)) { 1333 continue 1334 } 1335 v.reset(OpARM64ADDshiftLL) 1336 v.AuxInt = int64ToAuxInt(c) 1337 v.AddArg2(x0, y) 1338 return true 1339 } 1340 break 1341 } 1342 // match: (ADD x0 x1:(SRLconst [c] y)) 1343 // cond: clobberIfDead(x1) 1344 // result: (ADDshiftRL x0 y [c]) 1345 for { 1346 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1347 x0 := v_0 1348 x1 := v_1 1349 if x1.Op != OpARM64SRLconst { 1350 continue 1351 } 1352 c := auxIntToInt64(x1.AuxInt) 1353 y := x1.Args[0] 1354 if !(clobberIfDead(x1)) { 1355 continue 1356 } 1357 v.reset(OpARM64ADDshiftRL) 1358 v.AuxInt = int64ToAuxInt(c) 1359 v.AddArg2(x0, y) 1360 return true 1361 } 1362 break 1363 } 1364 // match: (ADD x0 x1:(SRAconst [c] y)) 1365 // cond: clobberIfDead(x1) 1366 // result: (ADDshiftRA x0 y [c]) 1367 for { 1368 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1369 x0 := v_0 1370 x1 := v_1 1371 if x1.Op != OpARM64SRAconst { 1372 continue 1373 } 1374 c := auxIntToInt64(x1.AuxInt) 1375 y := x1.Args[0] 1376 if !(clobberIfDead(x1)) { 1377 continue 1378 } 1379 v.reset(OpARM64ADDshiftRA) 1380 v.AuxInt = int64ToAuxInt(c) 1381 v.AddArg2(x0, y) 1382 return true 1383 } 1384 break 1385 } 1386 return false 1387 } 1388 func rewriteValueARM64_OpARM64ADDSflags(v *Value) bool { 1389 v_1 := v.Args[1] 1390 v_0 := v.Args[0] 1391 // match: (ADDSflags x (MOVDconst [c])) 1392 // result: (ADDSconstflags [c] x) 1393 for { 1394 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1395 x := v_0 1396 if v_1.Op != OpARM64MOVDconst { 1397 continue 1398 } 1399 c := auxIntToInt64(v_1.AuxInt) 1400 v.reset(OpARM64ADDSconstflags) 1401 v.AuxInt = int64ToAuxInt(c) 1402 v.AddArg(x) 1403 return true 1404 } 1405 break 1406 } 1407 return false 1408 } 1409 func rewriteValueARM64_OpARM64ADDconst(v *Value) bool { 1410 v_0 := v.Args[0] 1411 // match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr)) 1412 // cond: is32Bit(off1+int64(off2)) 1413 // result: (MOVDaddr [int32(off1)+off2] {sym} ptr) 1414 for { 1415 off1 := auxIntToInt64(v.AuxInt) 1416 if v_0.Op != OpARM64MOVDaddr { 1417 break 1418 } 1419 off2 := auxIntToInt32(v_0.AuxInt) 1420 sym := auxToSym(v_0.Aux) 1421 ptr := v_0.Args[0] 1422 if !(is32Bit(off1 + int64(off2))) { 1423 break 1424 } 1425 v.reset(OpARM64MOVDaddr) 1426 v.AuxInt = int32ToAuxInt(int32(off1) + off2) 1427 v.Aux = symToAux(sym) 1428 v.AddArg(ptr) 1429 return true 1430 } 1431 // match: (ADDconst [c] y) 1432 // cond: c < 0 1433 // result: (SUBconst [-c] y) 1434 for { 1435 c := auxIntToInt64(v.AuxInt) 1436 y := v_0 1437 if !(c < 0) { 1438 break 1439 } 1440 v.reset(OpARM64SUBconst) 1441 v.AuxInt = int64ToAuxInt(-c) 1442 v.AddArg(y) 1443 return true 1444 } 1445 // match: (ADDconst [0] x) 1446 // result: x 1447 for { 1448 if auxIntToInt64(v.AuxInt) != 0 { 1449 break 1450 } 1451 x := v_0 1452 v.copyOf(x) 1453 return true 1454 } 1455 // match: (ADDconst [c] (MOVDconst [d])) 1456 // result: (MOVDconst [c+d]) 1457 for { 1458 c := auxIntToInt64(v.AuxInt) 1459 if v_0.Op != OpARM64MOVDconst { 1460 break 1461 } 1462 d := auxIntToInt64(v_0.AuxInt) 1463 v.reset(OpARM64MOVDconst) 1464 v.AuxInt = int64ToAuxInt(c + d) 1465 return true 1466 } 1467 // match: (ADDconst [c] (ADDconst [d] x)) 1468 // result: (ADDconst [c+d] x) 1469 for { 1470 c := auxIntToInt64(v.AuxInt) 1471 if v_0.Op != OpARM64ADDconst { 1472 break 1473 } 1474 d := auxIntToInt64(v_0.AuxInt) 1475 x := v_0.Args[0] 1476 v.reset(OpARM64ADDconst) 1477 v.AuxInt = int64ToAuxInt(c + d) 1478 v.AddArg(x) 1479 return true 1480 } 1481 // match: (ADDconst [c] (SUBconst [d] x)) 1482 // result: (ADDconst [c-d] x) 1483 for { 1484 c := auxIntToInt64(v.AuxInt) 1485 if v_0.Op != OpARM64SUBconst { 1486 break 1487 } 1488 d := auxIntToInt64(v_0.AuxInt) 1489 x := v_0.Args[0] 1490 v.reset(OpARM64ADDconst) 1491 v.AuxInt = int64ToAuxInt(c - d) 1492 v.AddArg(x) 1493 return true 1494 } 1495 return false 1496 } 1497 func rewriteValueARM64_OpARM64ADDshiftLL(v *Value) bool { 1498 v_1 := v.Args[1] 1499 v_0 := v.Args[0] 1500 b := v.Block 1501 typ := &b.Func.Config.Types 1502 // match: (ADDshiftLL (MOVDconst [c]) x [d]) 1503 // result: (ADDconst [c] (SLLconst <x.Type> x [d])) 1504 for { 1505 d := auxIntToInt64(v.AuxInt) 1506 if v_0.Op != OpARM64MOVDconst { 1507 break 1508 } 1509 c := auxIntToInt64(v_0.AuxInt) 1510 x := v_1 1511 v.reset(OpARM64ADDconst) 1512 v.AuxInt = int64ToAuxInt(c) 1513 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 1514 v0.AuxInt = int64ToAuxInt(d) 1515 v0.AddArg(x) 1516 v.AddArg(v0) 1517 return true 1518 } 1519 // match: (ADDshiftLL x (MOVDconst [c]) [d]) 1520 // result: (ADDconst x [int64(uint64(c)<<uint64(d))]) 1521 for { 1522 d := auxIntToInt64(v.AuxInt) 1523 x := v_0 1524 if v_1.Op != OpARM64MOVDconst { 1525 break 1526 } 1527 c := auxIntToInt64(v_1.AuxInt) 1528 v.reset(OpARM64ADDconst) 1529 v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d))) 1530 v.AddArg(x) 1531 return true 1532 } 1533 // match: (ADDshiftLL <typ.UInt16> [8] (UBFX <typ.UInt16> [armBFAuxInt(8, 8)] x) x) 1534 // result: (REV16W x) 1535 for { 1536 if v.Type != typ.UInt16 || auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || v_0.Type != typ.UInt16 || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 8) { 1537 break 1538 } 1539 x := v_0.Args[0] 1540 if x != v_1 { 1541 break 1542 } 1543 v.reset(OpARM64REV16W) 1544 v.AddArg(x) 1545 return true 1546 } 1547 // match: (ADDshiftLL [8] (UBFX [armBFAuxInt(8, 24)] (ANDconst [c1] x)) (ANDconst [c2] x)) 1548 // cond: uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff 1549 // result: (REV16W x) 1550 for { 1551 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 24) { 1552 break 1553 } 1554 v_0_0 := v_0.Args[0] 1555 if v_0_0.Op != OpARM64ANDconst { 1556 break 1557 } 1558 c1 := auxIntToInt64(v_0_0.AuxInt) 1559 x := v_0_0.Args[0] 1560 if v_1.Op != OpARM64ANDconst { 1561 break 1562 } 1563 c2 := auxIntToInt64(v_1.AuxInt) 1564 if x != v_1.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) { 1565 break 1566 } 1567 v.reset(OpARM64REV16W) 1568 v.AddArg(x) 1569 return true 1570 } 1571 // match: (ADDshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x)) 1572 // cond: (uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) 1573 // result: (REV16 x) 1574 for { 1575 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 { 1576 break 1577 } 1578 v_0_0 := v_0.Args[0] 1579 if v_0_0.Op != OpARM64ANDconst { 1580 break 1581 } 1582 c1 := auxIntToInt64(v_0_0.AuxInt) 1583 x := v_0_0.Args[0] 1584 if v_1.Op != OpARM64ANDconst { 1585 break 1586 } 1587 c2 := auxIntToInt64(v_1.AuxInt) 1588 if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) { 1589 break 1590 } 1591 v.reset(OpARM64REV16) 1592 v.AddArg(x) 1593 return true 1594 } 1595 // match: (ADDshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x)) 1596 // cond: (uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) 1597 // result: (REV16 (ANDconst <x.Type> [0xffffffff] x)) 1598 for { 1599 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 { 1600 break 1601 } 1602 v_0_0 := v_0.Args[0] 1603 if v_0_0.Op != OpARM64ANDconst { 1604 break 1605 } 1606 c1 := auxIntToInt64(v_0_0.AuxInt) 1607 x := v_0_0.Args[0] 1608 if v_1.Op != OpARM64ANDconst { 1609 break 1610 } 1611 c2 := auxIntToInt64(v_1.AuxInt) 1612 if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) { 1613 break 1614 } 1615 v.reset(OpARM64REV16) 1616 v0 := b.NewValue0(v.Pos, OpARM64ANDconst, x.Type) 1617 v0.AuxInt = int64ToAuxInt(0xffffffff) 1618 v0.AddArg(x) 1619 v.AddArg(v0) 1620 return true 1621 } 1622 // match: (ADDshiftLL [c] (SRLconst x [64-c]) x2) 1623 // result: (EXTRconst [64-c] x2 x) 1624 for { 1625 c := auxIntToInt64(v.AuxInt) 1626 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 64-c { 1627 break 1628 } 1629 x := v_0.Args[0] 1630 x2 := v_1 1631 v.reset(OpARM64EXTRconst) 1632 v.AuxInt = int64ToAuxInt(64 - c) 1633 v.AddArg2(x2, x) 1634 return true 1635 } 1636 // match: (ADDshiftLL <t> [c] (UBFX [bfc] x) x2) 1637 // cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c) 1638 // result: (EXTRWconst [32-c] x2 x) 1639 for { 1640 t := v.Type 1641 c := auxIntToInt64(v.AuxInt) 1642 if v_0.Op != OpARM64UBFX { 1643 break 1644 } 1645 bfc := auxIntToArm64BitField(v_0.AuxInt) 1646 x := v_0.Args[0] 1647 x2 := v_1 1648 if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) { 1649 break 1650 } 1651 v.reset(OpARM64EXTRWconst) 1652 v.AuxInt = int64ToAuxInt(32 - c) 1653 v.AddArg2(x2, x) 1654 return true 1655 } 1656 return false 1657 } 1658 func rewriteValueARM64_OpARM64ADDshiftRA(v *Value) bool { 1659 v_1 := v.Args[1] 1660 v_0 := v.Args[0] 1661 b := v.Block 1662 // match: (ADDshiftRA (MOVDconst [c]) x [d]) 1663 // result: (ADDconst [c] (SRAconst <x.Type> x [d])) 1664 for { 1665 d := auxIntToInt64(v.AuxInt) 1666 if v_0.Op != OpARM64MOVDconst { 1667 break 1668 } 1669 c := auxIntToInt64(v_0.AuxInt) 1670 x := v_1 1671 v.reset(OpARM64ADDconst) 1672 v.AuxInt = int64ToAuxInt(c) 1673 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 1674 v0.AuxInt = int64ToAuxInt(d) 1675 v0.AddArg(x) 1676 v.AddArg(v0) 1677 return true 1678 } 1679 // match: (ADDshiftRA x (MOVDconst [c]) [d]) 1680 // result: (ADDconst x [c>>uint64(d)]) 1681 for { 1682 d := auxIntToInt64(v.AuxInt) 1683 x := v_0 1684 if v_1.Op != OpARM64MOVDconst { 1685 break 1686 } 1687 c := auxIntToInt64(v_1.AuxInt) 1688 v.reset(OpARM64ADDconst) 1689 v.AuxInt = int64ToAuxInt(c >> uint64(d)) 1690 v.AddArg(x) 1691 return true 1692 } 1693 return false 1694 } 1695 func rewriteValueARM64_OpARM64ADDshiftRL(v *Value) bool { 1696 v_1 := v.Args[1] 1697 v_0 := v.Args[0] 1698 b := v.Block 1699 // match: (ADDshiftRL (MOVDconst [c]) x [d]) 1700 // result: (ADDconst [c] (SRLconst <x.Type> x [d])) 1701 for { 1702 d := auxIntToInt64(v.AuxInt) 1703 if v_0.Op != OpARM64MOVDconst { 1704 break 1705 } 1706 c := auxIntToInt64(v_0.AuxInt) 1707 x := v_1 1708 v.reset(OpARM64ADDconst) 1709 v.AuxInt = int64ToAuxInt(c) 1710 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 1711 v0.AuxInt = int64ToAuxInt(d) 1712 v0.AddArg(x) 1713 v.AddArg(v0) 1714 return true 1715 } 1716 // match: (ADDshiftRL x (MOVDconst [c]) [d]) 1717 // result: (ADDconst x [int64(uint64(c)>>uint64(d))]) 1718 for { 1719 d := auxIntToInt64(v.AuxInt) 1720 x := v_0 1721 if v_1.Op != OpARM64MOVDconst { 1722 break 1723 } 1724 c := auxIntToInt64(v_1.AuxInt) 1725 v.reset(OpARM64ADDconst) 1726 v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d))) 1727 v.AddArg(x) 1728 return true 1729 } 1730 return false 1731 } 1732 func rewriteValueARM64_OpARM64AND(v *Value) bool { 1733 v_1 := v.Args[1] 1734 v_0 := v.Args[0] 1735 // match: (AND x (MOVDconst [c])) 1736 // result: (ANDconst [c] x) 1737 for { 1738 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1739 x := v_0 1740 if v_1.Op != OpARM64MOVDconst { 1741 continue 1742 } 1743 c := auxIntToInt64(v_1.AuxInt) 1744 v.reset(OpARM64ANDconst) 1745 v.AuxInt = int64ToAuxInt(c) 1746 v.AddArg(x) 1747 return true 1748 } 1749 break 1750 } 1751 // match: (AND x x) 1752 // result: x 1753 for { 1754 x := v_0 1755 if x != v_1 { 1756 break 1757 } 1758 v.copyOf(x) 1759 return true 1760 } 1761 // match: (AND x (MVN y)) 1762 // result: (BIC x y) 1763 for { 1764 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1765 x := v_0 1766 if v_1.Op != OpARM64MVN { 1767 continue 1768 } 1769 y := v_1.Args[0] 1770 v.reset(OpARM64BIC) 1771 v.AddArg2(x, y) 1772 return true 1773 } 1774 break 1775 } 1776 // match: (AND x0 x1:(SLLconst [c] y)) 1777 // cond: clobberIfDead(x1) 1778 // result: (ANDshiftLL x0 y [c]) 1779 for { 1780 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1781 x0 := v_0 1782 x1 := v_1 1783 if x1.Op != OpARM64SLLconst { 1784 continue 1785 } 1786 c := auxIntToInt64(x1.AuxInt) 1787 y := x1.Args[0] 1788 if !(clobberIfDead(x1)) { 1789 continue 1790 } 1791 v.reset(OpARM64ANDshiftLL) 1792 v.AuxInt = int64ToAuxInt(c) 1793 v.AddArg2(x0, y) 1794 return true 1795 } 1796 break 1797 } 1798 // match: (AND x0 x1:(SRLconst [c] y)) 1799 // cond: clobberIfDead(x1) 1800 // result: (ANDshiftRL x0 y [c]) 1801 for { 1802 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1803 x0 := v_0 1804 x1 := v_1 1805 if x1.Op != OpARM64SRLconst { 1806 continue 1807 } 1808 c := auxIntToInt64(x1.AuxInt) 1809 y := x1.Args[0] 1810 if !(clobberIfDead(x1)) { 1811 continue 1812 } 1813 v.reset(OpARM64ANDshiftRL) 1814 v.AuxInt = int64ToAuxInt(c) 1815 v.AddArg2(x0, y) 1816 return true 1817 } 1818 break 1819 } 1820 // match: (AND x0 x1:(SRAconst [c] y)) 1821 // cond: clobberIfDead(x1) 1822 // result: (ANDshiftRA x0 y [c]) 1823 for { 1824 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1825 x0 := v_0 1826 x1 := v_1 1827 if x1.Op != OpARM64SRAconst { 1828 continue 1829 } 1830 c := auxIntToInt64(x1.AuxInt) 1831 y := x1.Args[0] 1832 if !(clobberIfDead(x1)) { 1833 continue 1834 } 1835 v.reset(OpARM64ANDshiftRA) 1836 v.AuxInt = int64ToAuxInt(c) 1837 v.AddArg2(x0, y) 1838 return true 1839 } 1840 break 1841 } 1842 // match: (AND x0 x1:(RORconst [c] y)) 1843 // cond: clobberIfDead(x1) 1844 // result: (ANDshiftRO x0 y [c]) 1845 for { 1846 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1847 x0 := v_0 1848 x1 := v_1 1849 if x1.Op != OpARM64RORconst { 1850 continue 1851 } 1852 c := auxIntToInt64(x1.AuxInt) 1853 y := x1.Args[0] 1854 if !(clobberIfDead(x1)) { 1855 continue 1856 } 1857 v.reset(OpARM64ANDshiftRO) 1858 v.AuxInt = int64ToAuxInt(c) 1859 v.AddArg2(x0, y) 1860 return true 1861 } 1862 break 1863 } 1864 return false 1865 } 1866 func rewriteValueARM64_OpARM64ANDconst(v *Value) bool { 1867 v_0 := v.Args[0] 1868 // match: (ANDconst [0] _) 1869 // result: (MOVDconst [0]) 1870 for { 1871 if auxIntToInt64(v.AuxInt) != 0 { 1872 break 1873 } 1874 v.reset(OpARM64MOVDconst) 1875 v.AuxInt = int64ToAuxInt(0) 1876 return true 1877 } 1878 // match: (ANDconst [-1] x) 1879 // result: x 1880 for { 1881 if auxIntToInt64(v.AuxInt) != -1 { 1882 break 1883 } 1884 x := v_0 1885 v.copyOf(x) 1886 return true 1887 } 1888 // match: (ANDconst [c] (MOVDconst [d])) 1889 // result: (MOVDconst [c&d]) 1890 for { 1891 c := auxIntToInt64(v.AuxInt) 1892 if v_0.Op != OpARM64MOVDconst { 1893 break 1894 } 1895 d := auxIntToInt64(v_0.AuxInt) 1896 v.reset(OpARM64MOVDconst) 1897 v.AuxInt = int64ToAuxInt(c & d) 1898 return true 1899 } 1900 // match: (ANDconst [c] (ANDconst [d] x)) 1901 // result: (ANDconst [c&d] x) 1902 for { 1903 c := auxIntToInt64(v.AuxInt) 1904 if v_0.Op != OpARM64ANDconst { 1905 break 1906 } 1907 d := auxIntToInt64(v_0.AuxInt) 1908 x := v_0.Args[0] 1909 v.reset(OpARM64ANDconst) 1910 v.AuxInt = int64ToAuxInt(c & d) 1911 v.AddArg(x) 1912 return true 1913 } 1914 // match: (ANDconst [c] (MOVWUreg x)) 1915 // result: (ANDconst [c&(1<<32-1)] x) 1916 for { 1917 c := auxIntToInt64(v.AuxInt) 1918 if v_0.Op != OpARM64MOVWUreg { 1919 break 1920 } 1921 x := v_0.Args[0] 1922 v.reset(OpARM64ANDconst) 1923 v.AuxInt = int64ToAuxInt(c & (1<<32 - 1)) 1924 v.AddArg(x) 1925 return true 1926 } 1927 // match: (ANDconst [c] (MOVHUreg x)) 1928 // result: (ANDconst [c&(1<<16-1)] x) 1929 for { 1930 c := auxIntToInt64(v.AuxInt) 1931 if v_0.Op != OpARM64MOVHUreg { 1932 break 1933 } 1934 x := v_0.Args[0] 1935 v.reset(OpARM64ANDconst) 1936 v.AuxInt = int64ToAuxInt(c & (1<<16 - 1)) 1937 v.AddArg(x) 1938 return true 1939 } 1940 // match: (ANDconst [c] (MOVBUreg x)) 1941 // result: (ANDconst [c&(1<<8-1)] x) 1942 for { 1943 c := auxIntToInt64(v.AuxInt) 1944 if v_0.Op != OpARM64MOVBUreg { 1945 break 1946 } 1947 x := v_0.Args[0] 1948 v.reset(OpARM64ANDconst) 1949 v.AuxInt = int64ToAuxInt(c & (1<<8 - 1)) 1950 v.AddArg(x) 1951 return true 1952 } 1953 // match: (ANDconst [ac] (SLLconst [sc] x)) 1954 // cond: isARM64BFMask(sc, ac, sc) 1955 // result: (UBFIZ [armBFAuxInt(sc, arm64BFWidth(ac, sc))] x) 1956 for { 1957 ac := auxIntToInt64(v.AuxInt) 1958 if v_0.Op != OpARM64SLLconst { 1959 break 1960 } 1961 sc := auxIntToInt64(v_0.AuxInt) 1962 x := v_0.Args[0] 1963 if !(isARM64BFMask(sc, ac, sc)) { 1964 break 1965 } 1966 v.reset(OpARM64UBFIZ) 1967 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, sc))) 1968 v.AddArg(x) 1969 return true 1970 } 1971 // match: (ANDconst [ac] (SRLconst [sc] x)) 1972 // cond: isARM64BFMask(sc, ac, 0) 1973 // result: (UBFX [armBFAuxInt(sc, arm64BFWidth(ac, 0))] x) 1974 for { 1975 ac := auxIntToInt64(v.AuxInt) 1976 if v_0.Op != OpARM64SRLconst { 1977 break 1978 } 1979 sc := auxIntToInt64(v_0.AuxInt) 1980 x := v_0.Args[0] 1981 if !(isARM64BFMask(sc, ac, 0)) { 1982 break 1983 } 1984 v.reset(OpARM64UBFX) 1985 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, 0))) 1986 v.AddArg(x) 1987 return true 1988 } 1989 // match: (ANDconst [c] (UBFX [bfc] x)) 1990 // cond: isARM64BFMask(0, c, 0) 1991 // result: (UBFX [armBFAuxInt(bfc.getARM64BFlsb(), min(bfc.getARM64BFwidth(), arm64BFWidth(c, 0)))] x) 1992 for { 1993 c := auxIntToInt64(v.AuxInt) 1994 if v_0.Op != OpARM64UBFX { 1995 break 1996 } 1997 bfc := auxIntToArm64BitField(v_0.AuxInt) 1998 x := v_0.Args[0] 1999 if !(isARM64BFMask(0, c, 0)) { 2000 break 2001 } 2002 v.reset(OpARM64UBFX) 2003 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb(), min(bfc.getARM64BFwidth(), arm64BFWidth(c, 0)))) 2004 v.AddArg(x) 2005 return true 2006 } 2007 return false 2008 } 2009 func rewriteValueARM64_OpARM64ANDshiftLL(v *Value) bool { 2010 v_1 := v.Args[1] 2011 v_0 := v.Args[0] 2012 b := v.Block 2013 // match: (ANDshiftLL (MOVDconst [c]) x [d]) 2014 // result: (ANDconst [c] (SLLconst <x.Type> x [d])) 2015 for { 2016 d := auxIntToInt64(v.AuxInt) 2017 if v_0.Op != OpARM64MOVDconst { 2018 break 2019 } 2020 c := auxIntToInt64(v_0.AuxInt) 2021 x := v_1 2022 v.reset(OpARM64ANDconst) 2023 v.AuxInt = int64ToAuxInt(c) 2024 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 2025 v0.AuxInt = int64ToAuxInt(d) 2026 v0.AddArg(x) 2027 v.AddArg(v0) 2028 return true 2029 } 2030 // match: (ANDshiftLL x (MOVDconst [c]) [d]) 2031 // result: (ANDconst x [int64(uint64(c)<<uint64(d))]) 2032 for { 2033 d := auxIntToInt64(v.AuxInt) 2034 x := v_0 2035 if v_1.Op != OpARM64MOVDconst { 2036 break 2037 } 2038 c := auxIntToInt64(v_1.AuxInt) 2039 v.reset(OpARM64ANDconst) 2040 v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d))) 2041 v.AddArg(x) 2042 return true 2043 } 2044 // match: (ANDshiftLL y:(SLLconst x [c]) x [c]) 2045 // result: y 2046 for { 2047 c := auxIntToInt64(v.AuxInt) 2048 y := v_0 2049 if y.Op != OpARM64SLLconst || auxIntToInt64(y.AuxInt) != c { 2050 break 2051 } 2052 x := y.Args[0] 2053 if x != v_1 { 2054 break 2055 } 2056 v.copyOf(y) 2057 return true 2058 } 2059 return false 2060 } 2061 func rewriteValueARM64_OpARM64ANDshiftRA(v *Value) bool { 2062 v_1 := v.Args[1] 2063 v_0 := v.Args[0] 2064 b := v.Block 2065 // match: (ANDshiftRA (MOVDconst [c]) x [d]) 2066 // result: (ANDconst [c] (SRAconst <x.Type> x [d])) 2067 for { 2068 d := auxIntToInt64(v.AuxInt) 2069 if v_0.Op != OpARM64MOVDconst { 2070 break 2071 } 2072 c := auxIntToInt64(v_0.AuxInt) 2073 x := v_1 2074 v.reset(OpARM64ANDconst) 2075 v.AuxInt = int64ToAuxInt(c) 2076 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 2077 v0.AuxInt = int64ToAuxInt(d) 2078 v0.AddArg(x) 2079 v.AddArg(v0) 2080 return true 2081 } 2082 // match: (ANDshiftRA x (MOVDconst [c]) [d]) 2083 // result: (ANDconst x [c>>uint64(d)]) 2084 for { 2085 d := auxIntToInt64(v.AuxInt) 2086 x := v_0 2087 if v_1.Op != OpARM64MOVDconst { 2088 break 2089 } 2090 c := auxIntToInt64(v_1.AuxInt) 2091 v.reset(OpARM64ANDconst) 2092 v.AuxInt = int64ToAuxInt(c >> uint64(d)) 2093 v.AddArg(x) 2094 return true 2095 } 2096 // match: (ANDshiftRA y:(SRAconst x [c]) x [c]) 2097 // result: y 2098 for { 2099 c := auxIntToInt64(v.AuxInt) 2100 y := v_0 2101 if y.Op != OpARM64SRAconst || auxIntToInt64(y.AuxInt) != c { 2102 break 2103 } 2104 x := y.Args[0] 2105 if x != v_1 { 2106 break 2107 } 2108 v.copyOf(y) 2109 return true 2110 } 2111 return false 2112 } 2113 func rewriteValueARM64_OpARM64ANDshiftRL(v *Value) bool { 2114 v_1 := v.Args[1] 2115 v_0 := v.Args[0] 2116 b := v.Block 2117 // match: (ANDshiftRL (MOVDconst [c]) x [d]) 2118 // result: (ANDconst [c] (SRLconst <x.Type> x [d])) 2119 for { 2120 d := auxIntToInt64(v.AuxInt) 2121 if v_0.Op != OpARM64MOVDconst { 2122 break 2123 } 2124 c := auxIntToInt64(v_0.AuxInt) 2125 x := v_1 2126 v.reset(OpARM64ANDconst) 2127 v.AuxInt = int64ToAuxInt(c) 2128 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 2129 v0.AuxInt = int64ToAuxInt(d) 2130 v0.AddArg(x) 2131 v.AddArg(v0) 2132 return true 2133 } 2134 // match: (ANDshiftRL x (MOVDconst [c]) [d]) 2135 // result: (ANDconst x [int64(uint64(c)>>uint64(d))]) 2136 for { 2137 d := auxIntToInt64(v.AuxInt) 2138 x := v_0 2139 if v_1.Op != OpARM64MOVDconst { 2140 break 2141 } 2142 c := auxIntToInt64(v_1.AuxInt) 2143 v.reset(OpARM64ANDconst) 2144 v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d))) 2145 v.AddArg(x) 2146 return true 2147 } 2148 // match: (ANDshiftRL y:(SRLconst x [c]) x [c]) 2149 // result: y 2150 for { 2151 c := auxIntToInt64(v.AuxInt) 2152 y := v_0 2153 if y.Op != OpARM64SRLconst || auxIntToInt64(y.AuxInt) != c { 2154 break 2155 } 2156 x := y.Args[0] 2157 if x != v_1 { 2158 break 2159 } 2160 v.copyOf(y) 2161 return true 2162 } 2163 return false 2164 } 2165 func rewriteValueARM64_OpARM64ANDshiftRO(v *Value) bool { 2166 v_1 := v.Args[1] 2167 v_0 := v.Args[0] 2168 b := v.Block 2169 // match: (ANDshiftRO (MOVDconst [c]) x [d]) 2170 // result: (ANDconst [c] (RORconst <x.Type> x [d])) 2171 for { 2172 d := auxIntToInt64(v.AuxInt) 2173 if v_0.Op != OpARM64MOVDconst { 2174 break 2175 } 2176 c := auxIntToInt64(v_0.AuxInt) 2177 x := v_1 2178 v.reset(OpARM64ANDconst) 2179 v.AuxInt = int64ToAuxInt(c) 2180 v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type) 2181 v0.AuxInt = int64ToAuxInt(d) 2182 v0.AddArg(x) 2183 v.AddArg(v0) 2184 return true 2185 } 2186 // match: (ANDshiftRO x (MOVDconst [c]) [d]) 2187 // result: (ANDconst x [rotateRight64(c, d)]) 2188 for { 2189 d := auxIntToInt64(v.AuxInt) 2190 x := v_0 2191 if v_1.Op != OpARM64MOVDconst { 2192 break 2193 } 2194 c := auxIntToInt64(v_1.AuxInt) 2195 v.reset(OpARM64ANDconst) 2196 v.AuxInt = int64ToAuxInt(rotateRight64(c, d)) 2197 v.AddArg(x) 2198 return true 2199 } 2200 // match: (ANDshiftRO y:(RORconst x [c]) x [c]) 2201 // result: y 2202 for { 2203 c := auxIntToInt64(v.AuxInt) 2204 y := v_0 2205 if y.Op != OpARM64RORconst || auxIntToInt64(y.AuxInt) != c { 2206 break 2207 } 2208 x := y.Args[0] 2209 if x != v_1 { 2210 break 2211 } 2212 v.copyOf(y) 2213 return true 2214 } 2215 return false 2216 } 2217 func rewriteValueARM64_OpARM64BIC(v *Value) bool { 2218 v_1 := v.Args[1] 2219 v_0 := v.Args[0] 2220 // match: (BIC x (MOVDconst [c])) 2221 // result: (ANDconst [^c] x) 2222 for { 2223 x := v_0 2224 if v_1.Op != OpARM64MOVDconst { 2225 break 2226 } 2227 c := auxIntToInt64(v_1.AuxInt) 2228 v.reset(OpARM64ANDconst) 2229 v.AuxInt = int64ToAuxInt(^c) 2230 v.AddArg(x) 2231 return true 2232 } 2233 // match: (BIC x x) 2234 // result: (MOVDconst [0]) 2235 for { 2236 x := v_0 2237 if x != v_1 { 2238 break 2239 } 2240 v.reset(OpARM64MOVDconst) 2241 v.AuxInt = int64ToAuxInt(0) 2242 return true 2243 } 2244 // match: (BIC x0 x1:(SLLconst [c] y)) 2245 // cond: clobberIfDead(x1) 2246 // result: (BICshiftLL x0 y [c]) 2247 for { 2248 x0 := v_0 2249 x1 := v_1 2250 if x1.Op != OpARM64SLLconst { 2251 break 2252 } 2253 c := auxIntToInt64(x1.AuxInt) 2254 y := x1.Args[0] 2255 if !(clobberIfDead(x1)) { 2256 break 2257 } 2258 v.reset(OpARM64BICshiftLL) 2259 v.AuxInt = int64ToAuxInt(c) 2260 v.AddArg2(x0, y) 2261 return true 2262 } 2263 // match: (BIC x0 x1:(SRLconst [c] y)) 2264 // cond: clobberIfDead(x1) 2265 // result: (BICshiftRL x0 y [c]) 2266 for { 2267 x0 := v_0 2268 x1 := v_1 2269 if x1.Op != OpARM64SRLconst { 2270 break 2271 } 2272 c := auxIntToInt64(x1.AuxInt) 2273 y := x1.Args[0] 2274 if !(clobberIfDead(x1)) { 2275 break 2276 } 2277 v.reset(OpARM64BICshiftRL) 2278 v.AuxInt = int64ToAuxInt(c) 2279 v.AddArg2(x0, y) 2280 return true 2281 } 2282 // match: (BIC x0 x1:(SRAconst [c] y)) 2283 // cond: clobberIfDead(x1) 2284 // result: (BICshiftRA x0 y [c]) 2285 for { 2286 x0 := v_0 2287 x1 := v_1 2288 if x1.Op != OpARM64SRAconst { 2289 break 2290 } 2291 c := auxIntToInt64(x1.AuxInt) 2292 y := x1.Args[0] 2293 if !(clobberIfDead(x1)) { 2294 break 2295 } 2296 v.reset(OpARM64BICshiftRA) 2297 v.AuxInt = int64ToAuxInt(c) 2298 v.AddArg2(x0, y) 2299 return true 2300 } 2301 // match: (BIC x0 x1:(RORconst [c] y)) 2302 // cond: clobberIfDead(x1) 2303 // result: (BICshiftRO x0 y [c]) 2304 for { 2305 x0 := v_0 2306 x1 := v_1 2307 if x1.Op != OpARM64RORconst { 2308 break 2309 } 2310 c := auxIntToInt64(x1.AuxInt) 2311 y := x1.Args[0] 2312 if !(clobberIfDead(x1)) { 2313 break 2314 } 2315 v.reset(OpARM64BICshiftRO) 2316 v.AuxInt = int64ToAuxInt(c) 2317 v.AddArg2(x0, y) 2318 return true 2319 } 2320 return false 2321 } 2322 func rewriteValueARM64_OpARM64BICshiftLL(v *Value) bool { 2323 v_1 := v.Args[1] 2324 v_0 := v.Args[0] 2325 // match: (BICshiftLL x (MOVDconst [c]) [d]) 2326 // result: (ANDconst x [^int64(uint64(c)<<uint64(d))]) 2327 for { 2328 d := auxIntToInt64(v.AuxInt) 2329 x := v_0 2330 if v_1.Op != OpARM64MOVDconst { 2331 break 2332 } 2333 c := auxIntToInt64(v_1.AuxInt) 2334 v.reset(OpARM64ANDconst) 2335 v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d))) 2336 v.AddArg(x) 2337 return true 2338 } 2339 // match: (BICshiftLL (SLLconst x [c]) x [c]) 2340 // result: (MOVDconst [0]) 2341 for { 2342 c := auxIntToInt64(v.AuxInt) 2343 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c { 2344 break 2345 } 2346 x := v_0.Args[0] 2347 if x != v_1 { 2348 break 2349 } 2350 v.reset(OpARM64MOVDconst) 2351 v.AuxInt = int64ToAuxInt(0) 2352 return true 2353 } 2354 return false 2355 } 2356 func rewriteValueARM64_OpARM64BICshiftRA(v *Value) bool { 2357 v_1 := v.Args[1] 2358 v_0 := v.Args[0] 2359 // match: (BICshiftRA x (MOVDconst [c]) [d]) 2360 // result: (ANDconst x [^(c>>uint64(d))]) 2361 for { 2362 d := auxIntToInt64(v.AuxInt) 2363 x := v_0 2364 if v_1.Op != OpARM64MOVDconst { 2365 break 2366 } 2367 c := auxIntToInt64(v_1.AuxInt) 2368 v.reset(OpARM64ANDconst) 2369 v.AuxInt = int64ToAuxInt(^(c >> uint64(d))) 2370 v.AddArg(x) 2371 return true 2372 } 2373 // match: (BICshiftRA (SRAconst x [c]) x [c]) 2374 // result: (MOVDconst [0]) 2375 for { 2376 c := auxIntToInt64(v.AuxInt) 2377 if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c { 2378 break 2379 } 2380 x := v_0.Args[0] 2381 if x != v_1 { 2382 break 2383 } 2384 v.reset(OpARM64MOVDconst) 2385 v.AuxInt = int64ToAuxInt(0) 2386 return true 2387 } 2388 return false 2389 } 2390 func rewriteValueARM64_OpARM64BICshiftRL(v *Value) bool { 2391 v_1 := v.Args[1] 2392 v_0 := v.Args[0] 2393 // match: (BICshiftRL x (MOVDconst [c]) [d]) 2394 // result: (ANDconst x [^int64(uint64(c)>>uint64(d))]) 2395 for { 2396 d := auxIntToInt64(v.AuxInt) 2397 x := v_0 2398 if v_1.Op != OpARM64MOVDconst { 2399 break 2400 } 2401 c := auxIntToInt64(v_1.AuxInt) 2402 v.reset(OpARM64ANDconst) 2403 v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d))) 2404 v.AddArg(x) 2405 return true 2406 } 2407 // match: (BICshiftRL (SRLconst x [c]) x [c]) 2408 // result: (MOVDconst [0]) 2409 for { 2410 c := auxIntToInt64(v.AuxInt) 2411 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c { 2412 break 2413 } 2414 x := v_0.Args[0] 2415 if x != v_1 { 2416 break 2417 } 2418 v.reset(OpARM64MOVDconst) 2419 v.AuxInt = int64ToAuxInt(0) 2420 return true 2421 } 2422 return false 2423 } 2424 func rewriteValueARM64_OpARM64BICshiftRO(v *Value) bool { 2425 v_1 := v.Args[1] 2426 v_0 := v.Args[0] 2427 // match: (BICshiftRO x (MOVDconst [c]) [d]) 2428 // result: (ANDconst x [^rotateRight64(c, d)]) 2429 for { 2430 d := auxIntToInt64(v.AuxInt) 2431 x := v_0 2432 if v_1.Op != OpARM64MOVDconst { 2433 break 2434 } 2435 c := auxIntToInt64(v_1.AuxInt) 2436 v.reset(OpARM64ANDconst) 2437 v.AuxInt = int64ToAuxInt(^rotateRight64(c, d)) 2438 v.AddArg(x) 2439 return true 2440 } 2441 // match: (BICshiftRO (RORconst x [c]) x [c]) 2442 // result: (MOVDconst [0]) 2443 for { 2444 c := auxIntToInt64(v.AuxInt) 2445 if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c { 2446 break 2447 } 2448 x := v_0.Args[0] 2449 if x != v_1 { 2450 break 2451 } 2452 v.reset(OpARM64MOVDconst) 2453 v.AuxInt = int64ToAuxInt(0) 2454 return true 2455 } 2456 return false 2457 } 2458 func rewriteValueARM64_OpARM64CMN(v *Value) bool { 2459 v_1 := v.Args[1] 2460 v_0 := v.Args[0] 2461 // match: (CMN x (MOVDconst [c])) 2462 // result: (CMNconst [c] x) 2463 for { 2464 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 2465 x := v_0 2466 if v_1.Op != OpARM64MOVDconst { 2467 continue 2468 } 2469 c := auxIntToInt64(v_1.AuxInt) 2470 v.reset(OpARM64CMNconst) 2471 v.AuxInt = int64ToAuxInt(c) 2472 v.AddArg(x) 2473 return true 2474 } 2475 break 2476 } 2477 // match: (CMN x0 x1:(SLLconst [c] y)) 2478 // cond: clobberIfDead(x1) 2479 // result: (CMNshiftLL x0 y [c]) 2480 for { 2481 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 2482 x0 := v_0 2483 x1 := v_1 2484 if x1.Op != OpARM64SLLconst { 2485 continue 2486 } 2487 c := auxIntToInt64(x1.AuxInt) 2488 y := x1.Args[0] 2489 if !(clobberIfDead(x1)) { 2490 continue 2491 } 2492 v.reset(OpARM64CMNshiftLL) 2493 v.AuxInt = int64ToAuxInt(c) 2494 v.AddArg2(x0, y) 2495 return true 2496 } 2497 break 2498 } 2499 // match: (CMN x0 x1:(SRLconst [c] y)) 2500 // cond: clobberIfDead(x1) 2501 // result: (CMNshiftRL x0 y [c]) 2502 for { 2503 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 2504 x0 := v_0 2505 x1 := v_1 2506 if x1.Op != OpARM64SRLconst { 2507 continue 2508 } 2509 c := auxIntToInt64(x1.AuxInt) 2510 y := x1.Args[0] 2511 if !(clobberIfDead(x1)) { 2512 continue 2513 } 2514 v.reset(OpARM64CMNshiftRL) 2515 v.AuxInt = int64ToAuxInt(c) 2516 v.AddArg2(x0, y) 2517 return true 2518 } 2519 break 2520 } 2521 // match: (CMN x0 x1:(SRAconst [c] y)) 2522 // cond: clobberIfDead(x1) 2523 // result: (CMNshiftRA x0 y [c]) 2524 for { 2525 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 2526 x0 := v_0 2527 x1 := v_1 2528 if x1.Op != OpARM64SRAconst { 2529 continue 2530 } 2531 c := auxIntToInt64(x1.AuxInt) 2532 y := x1.Args[0] 2533 if !(clobberIfDead(x1)) { 2534 continue 2535 } 2536 v.reset(OpARM64CMNshiftRA) 2537 v.AuxInt = int64ToAuxInt(c) 2538 v.AddArg2(x0, y) 2539 return true 2540 } 2541 break 2542 } 2543 return false 2544 } 2545 func rewriteValueARM64_OpARM64CMNW(v *Value) bool { 2546 v_1 := v.Args[1] 2547 v_0 := v.Args[0] 2548 // match: (CMNW x (MOVDconst [c])) 2549 // result: (CMNWconst [int32(c)] x) 2550 for { 2551 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 2552 x := v_0 2553 if v_1.Op != OpARM64MOVDconst { 2554 continue 2555 } 2556 c := auxIntToInt64(v_1.AuxInt) 2557 v.reset(OpARM64CMNWconst) 2558 v.AuxInt = int32ToAuxInt(int32(c)) 2559 v.AddArg(x) 2560 return true 2561 } 2562 break 2563 } 2564 return false 2565 } 2566 func rewriteValueARM64_OpARM64CMNWconst(v *Value) bool { 2567 v_0 := v.Args[0] 2568 // match: (CMNWconst [c] y) 2569 // cond: c < 0 && c != -1<<31 2570 // result: (CMPWconst [-c] y) 2571 for { 2572 c := auxIntToInt32(v.AuxInt) 2573 y := v_0 2574 if !(c < 0 && c != -1<<31) { 2575 break 2576 } 2577 v.reset(OpARM64CMPWconst) 2578 v.AuxInt = int32ToAuxInt(-c) 2579 v.AddArg(y) 2580 return true 2581 } 2582 // match: (CMNWconst (MOVDconst [x]) [y]) 2583 // result: (FlagConstant [addFlags32(int32(x),y)]) 2584 for { 2585 y := auxIntToInt32(v.AuxInt) 2586 if v_0.Op != OpARM64MOVDconst { 2587 break 2588 } 2589 x := auxIntToInt64(v_0.AuxInt) 2590 v.reset(OpARM64FlagConstant) 2591 v.AuxInt = flagConstantToAuxInt(addFlags32(int32(x), y)) 2592 return true 2593 } 2594 return false 2595 } 2596 func rewriteValueARM64_OpARM64CMNconst(v *Value) bool { 2597 v_0 := v.Args[0] 2598 // match: (CMNconst [c] y) 2599 // cond: c < 0 && c != -1<<63 2600 // result: (CMPconst [-c] y) 2601 for { 2602 c := auxIntToInt64(v.AuxInt) 2603 y := v_0 2604 if !(c < 0 && c != -1<<63) { 2605 break 2606 } 2607 v.reset(OpARM64CMPconst) 2608 v.AuxInt = int64ToAuxInt(-c) 2609 v.AddArg(y) 2610 return true 2611 } 2612 // match: (CMNconst (MOVDconst [x]) [y]) 2613 // result: (FlagConstant [addFlags64(x,y)]) 2614 for { 2615 y := auxIntToInt64(v.AuxInt) 2616 if v_0.Op != OpARM64MOVDconst { 2617 break 2618 } 2619 x := auxIntToInt64(v_0.AuxInt) 2620 v.reset(OpARM64FlagConstant) 2621 v.AuxInt = flagConstantToAuxInt(addFlags64(x, y)) 2622 return true 2623 } 2624 return false 2625 } 2626 func rewriteValueARM64_OpARM64CMNshiftLL(v *Value) bool { 2627 v_1 := v.Args[1] 2628 v_0 := v.Args[0] 2629 b := v.Block 2630 // match: (CMNshiftLL (MOVDconst [c]) x [d]) 2631 // result: (CMNconst [c] (SLLconst <x.Type> x [d])) 2632 for { 2633 d := auxIntToInt64(v.AuxInt) 2634 if v_0.Op != OpARM64MOVDconst { 2635 break 2636 } 2637 c := auxIntToInt64(v_0.AuxInt) 2638 x := v_1 2639 v.reset(OpARM64CMNconst) 2640 v.AuxInt = int64ToAuxInt(c) 2641 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 2642 v0.AuxInt = int64ToAuxInt(d) 2643 v0.AddArg(x) 2644 v.AddArg(v0) 2645 return true 2646 } 2647 // match: (CMNshiftLL x (MOVDconst [c]) [d]) 2648 // result: (CMNconst x [int64(uint64(c)<<uint64(d))]) 2649 for { 2650 d := auxIntToInt64(v.AuxInt) 2651 x := v_0 2652 if v_1.Op != OpARM64MOVDconst { 2653 break 2654 } 2655 c := auxIntToInt64(v_1.AuxInt) 2656 v.reset(OpARM64CMNconst) 2657 v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d))) 2658 v.AddArg(x) 2659 return true 2660 } 2661 return false 2662 } 2663 func rewriteValueARM64_OpARM64CMNshiftRA(v *Value) bool { 2664 v_1 := v.Args[1] 2665 v_0 := v.Args[0] 2666 b := v.Block 2667 // match: (CMNshiftRA (MOVDconst [c]) x [d]) 2668 // result: (CMNconst [c] (SRAconst <x.Type> x [d])) 2669 for { 2670 d := auxIntToInt64(v.AuxInt) 2671 if v_0.Op != OpARM64MOVDconst { 2672 break 2673 } 2674 c := auxIntToInt64(v_0.AuxInt) 2675 x := v_1 2676 v.reset(OpARM64CMNconst) 2677 v.AuxInt = int64ToAuxInt(c) 2678 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 2679 v0.AuxInt = int64ToAuxInt(d) 2680 v0.AddArg(x) 2681 v.AddArg(v0) 2682 return true 2683 } 2684 // match: (CMNshiftRA x (MOVDconst [c]) [d]) 2685 // result: (CMNconst x [c>>uint64(d)]) 2686 for { 2687 d := auxIntToInt64(v.AuxInt) 2688 x := v_0 2689 if v_1.Op != OpARM64MOVDconst { 2690 break 2691 } 2692 c := auxIntToInt64(v_1.AuxInt) 2693 v.reset(OpARM64CMNconst) 2694 v.AuxInt = int64ToAuxInt(c >> uint64(d)) 2695 v.AddArg(x) 2696 return true 2697 } 2698 return false 2699 } 2700 func rewriteValueARM64_OpARM64CMNshiftRL(v *Value) bool { 2701 v_1 := v.Args[1] 2702 v_0 := v.Args[0] 2703 b := v.Block 2704 // match: (CMNshiftRL (MOVDconst [c]) x [d]) 2705 // result: (CMNconst [c] (SRLconst <x.Type> x [d])) 2706 for { 2707 d := auxIntToInt64(v.AuxInt) 2708 if v_0.Op != OpARM64MOVDconst { 2709 break 2710 } 2711 c := auxIntToInt64(v_0.AuxInt) 2712 x := v_1 2713 v.reset(OpARM64CMNconst) 2714 v.AuxInt = int64ToAuxInt(c) 2715 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 2716 v0.AuxInt = int64ToAuxInt(d) 2717 v0.AddArg(x) 2718 v.AddArg(v0) 2719 return true 2720 } 2721 // match: (CMNshiftRL x (MOVDconst [c]) [d]) 2722 // result: (CMNconst x [int64(uint64(c)>>uint64(d))]) 2723 for { 2724 d := auxIntToInt64(v.AuxInt) 2725 x := v_0 2726 if v_1.Op != OpARM64MOVDconst { 2727 break 2728 } 2729 c := auxIntToInt64(v_1.AuxInt) 2730 v.reset(OpARM64CMNconst) 2731 v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d))) 2732 v.AddArg(x) 2733 return true 2734 } 2735 return false 2736 } 2737 func rewriteValueARM64_OpARM64CMP(v *Value) bool { 2738 v_1 := v.Args[1] 2739 v_0 := v.Args[0] 2740 b := v.Block 2741 // match: (CMP x (MOVDconst [c])) 2742 // result: (CMPconst [c] x) 2743 for { 2744 x := v_0 2745 if v_1.Op != OpARM64MOVDconst { 2746 break 2747 } 2748 c := auxIntToInt64(v_1.AuxInt) 2749 v.reset(OpARM64CMPconst) 2750 v.AuxInt = int64ToAuxInt(c) 2751 v.AddArg(x) 2752 return true 2753 } 2754 // match: (CMP (MOVDconst [c]) x) 2755 // result: (InvertFlags (CMPconst [c] x)) 2756 for { 2757 if v_0.Op != OpARM64MOVDconst { 2758 break 2759 } 2760 c := auxIntToInt64(v_0.AuxInt) 2761 x := v_1 2762 v.reset(OpARM64InvertFlags) 2763 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 2764 v0.AuxInt = int64ToAuxInt(c) 2765 v0.AddArg(x) 2766 v.AddArg(v0) 2767 return true 2768 } 2769 // match: (CMP x y) 2770 // cond: canonLessThan(x,y) 2771 // result: (InvertFlags (CMP y x)) 2772 for { 2773 x := v_0 2774 y := v_1 2775 if !(canonLessThan(x, y)) { 2776 break 2777 } 2778 v.reset(OpARM64InvertFlags) 2779 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 2780 v0.AddArg2(y, x) 2781 v.AddArg(v0) 2782 return true 2783 } 2784 // match: (CMP x0 x1:(SLLconst [c] y)) 2785 // cond: clobberIfDead(x1) 2786 // result: (CMPshiftLL x0 y [c]) 2787 for { 2788 x0 := v_0 2789 x1 := v_1 2790 if x1.Op != OpARM64SLLconst { 2791 break 2792 } 2793 c := auxIntToInt64(x1.AuxInt) 2794 y := x1.Args[0] 2795 if !(clobberIfDead(x1)) { 2796 break 2797 } 2798 v.reset(OpARM64CMPshiftLL) 2799 v.AuxInt = int64ToAuxInt(c) 2800 v.AddArg2(x0, y) 2801 return true 2802 } 2803 // match: (CMP x0:(SLLconst [c] y) x1) 2804 // cond: clobberIfDead(x0) 2805 // result: (InvertFlags (CMPshiftLL x1 y [c])) 2806 for { 2807 x0 := v_0 2808 if x0.Op != OpARM64SLLconst { 2809 break 2810 } 2811 c := auxIntToInt64(x0.AuxInt) 2812 y := x0.Args[0] 2813 x1 := v_1 2814 if !(clobberIfDead(x0)) { 2815 break 2816 } 2817 v.reset(OpARM64InvertFlags) 2818 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftLL, types.TypeFlags) 2819 v0.AuxInt = int64ToAuxInt(c) 2820 v0.AddArg2(x1, y) 2821 v.AddArg(v0) 2822 return true 2823 } 2824 // match: (CMP x0 x1:(SRLconst [c] y)) 2825 // cond: clobberIfDead(x1) 2826 // result: (CMPshiftRL x0 y [c]) 2827 for { 2828 x0 := v_0 2829 x1 := v_1 2830 if x1.Op != OpARM64SRLconst { 2831 break 2832 } 2833 c := auxIntToInt64(x1.AuxInt) 2834 y := x1.Args[0] 2835 if !(clobberIfDead(x1)) { 2836 break 2837 } 2838 v.reset(OpARM64CMPshiftRL) 2839 v.AuxInt = int64ToAuxInt(c) 2840 v.AddArg2(x0, y) 2841 return true 2842 } 2843 // match: (CMP x0:(SRLconst [c] y) x1) 2844 // cond: clobberIfDead(x0) 2845 // result: (InvertFlags (CMPshiftRL x1 y [c])) 2846 for { 2847 x0 := v_0 2848 if x0.Op != OpARM64SRLconst { 2849 break 2850 } 2851 c := auxIntToInt64(x0.AuxInt) 2852 y := x0.Args[0] 2853 x1 := v_1 2854 if !(clobberIfDead(x0)) { 2855 break 2856 } 2857 v.reset(OpARM64InvertFlags) 2858 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRL, types.TypeFlags) 2859 v0.AuxInt = int64ToAuxInt(c) 2860 v0.AddArg2(x1, y) 2861 v.AddArg(v0) 2862 return true 2863 } 2864 // match: (CMP x0 x1:(SRAconst [c] y)) 2865 // cond: clobberIfDead(x1) 2866 // result: (CMPshiftRA x0 y [c]) 2867 for { 2868 x0 := v_0 2869 x1 := v_1 2870 if x1.Op != OpARM64SRAconst { 2871 break 2872 } 2873 c := auxIntToInt64(x1.AuxInt) 2874 y := x1.Args[0] 2875 if !(clobberIfDead(x1)) { 2876 break 2877 } 2878 v.reset(OpARM64CMPshiftRA) 2879 v.AuxInt = int64ToAuxInt(c) 2880 v.AddArg2(x0, y) 2881 return true 2882 } 2883 // match: (CMP x0:(SRAconst [c] y) x1) 2884 // cond: clobberIfDead(x0) 2885 // result: (InvertFlags (CMPshiftRA x1 y [c])) 2886 for { 2887 x0 := v_0 2888 if x0.Op != OpARM64SRAconst { 2889 break 2890 } 2891 c := auxIntToInt64(x0.AuxInt) 2892 y := x0.Args[0] 2893 x1 := v_1 2894 if !(clobberIfDead(x0)) { 2895 break 2896 } 2897 v.reset(OpARM64InvertFlags) 2898 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRA, types.TypeFlags) 2899 v0.AuxInt = int64ToAuxInt(c) 2900 v0.AddArg2(x1, y) 2901 v.AddArg(v0) 2902 return true 2903 } 2904 return false 2905 } 2906 func rewriteValueARM64_OpARM64CMPW(v *Value) bool { 2907 v_1 := v.Args[1] 2908 v_0 := v.Args[0] 2909 b := v.Block 2910 // match: (CMPW x (MOVDconst [c])) 2911 // result: (CMPWconst [int32(c)] x) 2912 for { 2913 x := v_0 2914 if v_1.Op != OpARM64MOVDconst { 2915 break 2916 } 2917 c := auxIntToInt64(v_1.AuxInt) 2918 v.reset(OpARM64CMPWconst) 2919 v.AuxInt = int32ToAuxInt(int32(c)) 2920 v.AddArg(x) 2921 return true 2922 } 2923 // match: (CMPW (MOVDconst [c]) x) 2924 // result: (InvertFlags (CMPWconst [int32(c)] x)) 2925 for { 2926 if v_0.Op != OpARM64MOVDconst { 2927 break 2928 } 2929 c := auxIntToInt64(v_0.AuxInt) 2930 x := v_1 2931 v.reset(OpARM64InvertFlags) 2932 v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags) 2933 v0.AuxInt = int32ToAuxInt(int32(c)) 2934 v0.AddArg(x) 2935 v.AddArg(v0) 2936 return true 2937 } 2938 // match: (CMPW x y) 2939 // cond: canonLessThan(x,y) 2940 // result: (InvertFlags (CMPW y x)) 2941 for { 2942 x := v_0 2943 y := v_1 2944 if !(canonLessThan(x, y)) { 2945 break 2946 } 2947 v.reset(OpARM64InvertFlags) 2948 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 2949 v0.AddArg2(y, x) 2950 v.AddArg(v0) 2951 return true 2952 } 2953 return false 2954 } 2955 func rewriteValueARM64_OpARM64CMPWconst(v *Value) bool { 2956 v_0 := v.Args[0] 2957 // match: (CMPWconst [c] y) 2958 // cond: c < 0 && c != -1<<31 2959 // result: (CMNWconst [-c] y) 2960 for { 2961 c := auxIntToInt32(v.AuxInt) 2962 y := v_0 2963 if !(c < 0 && c != -1<<31) { 2964 break 2965 } 2966 v.reset(OpARM64CMNWconst) 2967 v.AuxInt = int32ToAuxInt(-c) 2968 v.AddArg(y) 2969 return true 2970 } 2971 // match: (CMPWconst (MOVDconst [x]) [y]) 2972 // result: (FlagConstant [subFlags32(int32(x),y)]) 2973 for { 2974 y := auxIntToInt32(v.AuxInt) 2975 if v_0.Op != OpARM64MOVDconst { 2976 break 2977 } 2978 x := auxIntToInt64(v_0.AuxInt) 2979 v.reset(OpARM64FlagConstant) 2980 v.AuxInt = flagConstantToAuxInt(subFlags32(int32(x), y)) 2981 return true 2982 } 2983 // match: (CMPWconst (MOVBUreg _) [c]) 2984 // cond: 0xff < c 2985 // result: (FlagConstant [subFlags64(0,1)]) 2986 for { 2987 c := auxIntToInt32(v.AuxInt) 2988 if v_0.Op != OpARM64MOVBUreg || !(0xff < c) { 2989 break 2990 } 2991 v.reset(OpARM64FlagConstant) 2992 v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1)) 2993 return true 2994 } 2995 // match: (CMPWconst (MOVHUreg _) [c]) 2996 // cond: 0xffff < c 2997 // result: (FlagConstant [subFlags64(0,1)]) 2998 for { 2999 c := auxIntToInt32(v.AuxInt) 3000 if v_0.Op != OpARM64MOVHUreg || !(0xffff < c) { 3001 break 3002 } 3003 v.reset(OpARM64FlagConstant) 3004 v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1)) 3005 return true 3006 } 3007 return false 3008 } 3009 func rewriteValueARM64_OpARM64CMPconst(v *Value) bool { 3010 v_0 := v.Args[0] 3011 // match: (CMPconst [c] y) 3012 // cond: c < 0 && c != -1<<63 3013 // result: (CMNconst [-c] y) 3014 for { 3015 c := auxIntToInt64(v.AuxInt) 3016 y := v_0 3017 if !(c < 0 && c != -1<<63) { 3018 break 3019 } 3020 v.reset(OpARM64CMNconst) 3021 v.AuxInt = int64ToAuxInt(-c) 3022 v.AddArg(y) 3023 return true 3024 } 3025 // match: (CMPconst (MOVDconst [x]) [y]) 3026 // result: (FlagConstant [subFlags64(x,y)]) 3027 for { 3028 y := auxIntToInt64(v.AuxInt) 3029 if v_0.Op != OpARM64MOVDconst { 3030 break 3031 } 3032 x := auxIntToInt64(v_0.AuxInt) 3033 v.reset(OpARM64FlagConstant) 3034 v.AuxInt = flagConstantToAuxInt(subFlags64(x, y)) 3035 return true 3036 } 3037 // match: (CMPconst (MOVBUreg _) [c]) 3038 // cond: 0xff < c 3039 // result: (FlagConstant [subFlags64(0,1)]) 3040 for { 3041 c := auxIntToInt64(v.AuxInt) 3042 if v_0.Op != OpARM64MOVBUreg || !(0xff < c) { 3043 break 3044 } 3045 v.reset(OpARM64FlagConstant) 3046 v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1)) 3047 return true 3048 } 3049 // match: (CMPconst (MOVHUreg _) [c]) 3050 // cond: 0xffff < c 3051 // result: (FlagConstant [subFlags64(0,1)]) 3052 for { 3053 c := auxIntToInt64(v.AuxInt) 3054 if v_0.Op != OpARM64MOVHUreg || !(0xffff < c) { 3055 break 3056 } 3057 v.reset(OpARM64FlagConstant) 3058 v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1)) 3059 return true 3060 } 3061 // match: (CMPconst (MOVWUreg _) [c]) 3062 // cond: 0xffffffff < c 3063 // result: (FlagConstant [subFlags64(0,1)]) 3064 for { 3065 c := auxIntToInt64(v.AuxInt) 3066 if v_0.Op != OpARM64MOVWUreg || !(0xffffffff < c) { 3067 break 3068 } 3069 v.reset(OpARM64FlagConstant) 3070 v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1)) 3071 return true 3072 } 3073 // match: (CMPconst (ANDconst _ [m]) [n]) 3074 // cond: 0 <= m && m < n 3075 // result: (FlagConstant [subFlags64(0,1)]) 3076 for { 3077 n := auxIntToInt64(v.AuxInt) 3078 if v_0.Op != OpARM64ANDconst { 3079 break 3080 } 3081 m := auxIntToInt64(v_0.AuxInt) 3082 if !(0 <= m && m < n) { 3083 break 3084 } 3085 v.reset(OpARM64FlagConstant) 3086 v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1)) 3087 return true 3088 } 3089 // match: (CMPconst (SRLconst _ [c]) [n]) 3090 // cond: 0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n) 3091 // result: (FlagConstant [subFlags64(0,1)]) 3092 for { 3093 n := auxIntToInt64(v.AuxInt) 3094 if v_0.Op != OpARM64SRLconst { 3095 break 3096 } 3097 c := auxIntToInt64(v_0.AuxInt) 3098 if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) { 3099 break 3100 } 3101 v.reset(OpARM64FlagConstant) 3102 v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1)) 3103 return true 3104 } 3105 return false 3106 } 3107 func rewriteValueARM64_OpARM64CMPshiftLL(v *Value) bool { 3108 v_1 := v.Args[1] 3109 v_0 := v.Args[0] 3110 b := v.Block 3111 // match: (CMPshiftLL (MOVDconst [c]) x [d]) 3112 // result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d]))) 3113 for { 3114 d := auxIntToInt64(v.AuxInt) 3115 if v_0.Op != OpARM64MOVDconst { 3116 break 3117 } 3118 c := auxIntToInt64(v_0.AuxInt) 3119 x := v_1 3120 v.reset(OpARM64InvertFlags) 3121 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 3122 v0.AuxInt = int64ToAuxInt(c) 3123 v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 3124 v1.AuxInt = int64ToAuxInt(d) 3125 v1.AddArg(x) 3126 v0.AddArg(v1) 3127 v.AddArg(v0) 3128 return true 3129 } 3130 // match: (CMPshiftLL x (MOVDconst [c]) [d]) 3131 // result: (CMPconst x [int64(uint64(c)<<uint64(d))]) 3132 for { 3133 d := auxIntToInt64(v.AuxInt) 3134 x := v_0 3135 if v_1.Op != OpARM64MOVDconst { 3136 break 3137 } 3138 c := auxIntToInt64(v_1.AuxInt) 3139 v.reset(OpARM64CMPconst) 3140 v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d))) 3141 v.AddArg(x) 3142 return true 3143 } 3144 return false 3145 } 3146 func rewriteValueARM64_OpARM64CMPshiftRA(v *Value) bool { 3147 v_1 := v.Args[1] 3148 v_0 := v.Args[0] 3149 b := v.Block 3150 // match: (CMPshiftRA (MOVDconst [c]) x [d]) 3151 // result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d]))) 3152 for { 3153 d := auxIntToInt64(v.AuxInt) 3154 if v_0.Op != OpARM64MOVDconst { 3155 break 3156 } 3157 c := auxIntToInt64(v_0.AuxInt) 3158 x := v_1 3159 v.reset(OpARM64InvertFlags) 3160 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 3161 v0.AuxInt = int64ToAuxInt(c) 3162 v1 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 3163 v1.AuxInt = int64ToAuxInt(d) 3164 v1.AddArg(x) 3165 v0.AddArg(v1) 3166 v.AddArg(v0) 3167 return true 3168 } 3169 // match: (CMPshiftRA x (MOVDconst [c]) [d]) 3170 // result: (CMPconst x [c>>uint64(d)]) 3171 for { 3172 d := auxIntToInt64(v.AuxInt) 3173 x := v_0 3174 if v_1.Op != OpARM64MOVDconst { 3175 break 3176 } 3177 c := auxIntToInt64(v_1.AuxInt) 3178 v.reset(OpARM64CMPconst) 3179 v.AuxInt = int64ToAuxInt(c >> uint64(d)) 3180 v.AddArg(x) 3181 return true 3182 } 3183 return false 3184 } 3185 func rewriteValueARM64_OpARM64CMPshiftRL(v *Value) bool { 3186 v_1 := v.Args[1] 3187 v_0 := v.Args[0] 3188 b := v.Block 3189 // match: (CMPshiftRL (MOVDconst [c]) x [d]) 3190 // result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d]))) 3191 for { 3192 d := auxIntToInt64(v.AuxInt) 3193 if v_0.Op != OpARM64MOVDconst { 3194 break 3195 } 3196 c := auxIntToInt64(v_0.AuxInt) 3197 x := v_1 3198 v.reset(OpARM64InvertFlags) 3199 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 3200 v0.AuxInt = int64ToAuxInt(c) 3201 v1 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 3202 v1.AuxInt = int64ToAuxInt(d) 3203 v1.AddArg(x) 3204 v0.AddArg(v1) 3205 v.AddArg(v0) 3206 return true 3207 } 3208 // match: (CMPshiftRL x (MOVDconst [c]) [d]) 3209 // result: (CMPconst x [int64(uint64(c)>>uint64(d))]) 3210 for { 3211 d := auxIntToInt64(v.AuxInt) 3212 x := v_0 3213 if v_1.Op != OpARM64MOVDconst { 3214 break 3215 } 3216 c := auxIntToInt64(v_1.AuxInt) 3217 v.reset(OpARM64CMPconst) 3218 v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d))) 3219 v.AddArg(x) 3220 return true 3221 } 3222 return false 3223 } 3224 func rewriteValueARM64_OpARM64CSEL(v *Value) bool { 3225 v_2 := v.Args[2] 3226 v_1 := v.Args[1] 3227 v_0 := v.Args[0] 3228 // match: (CSEL [cc] (MOVDconst [-1]) (MOVDconst [0]) flag) 3229 // result: (CSETM [cc] flag) 3230 for { 3231 cc := auxIntToOp(v.AuxInt) 3232 if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != -1 || v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 { 3233 break 3234 } 3235 flag := v_2 3236 v.reset(OpARM64CSETM) 3237 v.AuxInt = opToAuxInt(cc) 3238 v.AddArg(flag) 3239 return true 3240 } 3241 // match: (CSEL [cc] (MOVDconst [0]) (MOVDconst [-1]) flag) 3242 // result: (CSETM [arm64Negate(cc)] flag) 3243 for { 3244 cc := auxIntToOp(v.AuxInt) 3245 if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 || v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 { 3246 break 3247 } 3248 flag := v_2 3249 v.reset(OpARM64CSETM) 3250 v.AuxInt = opToAuxInt(arm64Negate(cc)) 3251 v.AddArg(flag) 3252 return true 3253 } 3254 // match: (CSEL [cc] x (MOVDconst [0]) flag) 3255 // result: (CSEL0 [cc] x flag) 3256 for { 3257 cc := auxIntToOp(v.AuxInt) 3258 x := v_0 3259 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 { 3260 break 3261 } 3262 flag := v_2 3263 v.reset(OpARM64CSEL0) 3264 v.AuxInt = opToAuxInt(cc) 3265 v.AddArg2(x, flag) 3266 return true 3267 } 3268 // match: (CSEL [cc] (MOVDconst [0]) y flag) 3269 // result: (CSEL0 [arm64Negate(cc)] y flag) 3270 for { 3271 cc := auxIntToOp(v.AuxInt) 3272 if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 { 3273 break 3274 } 3275 y := v_1 3276 flag := v_2 3277 v.reset(OpARM64CSEL0) 3278 v.AuxInt = opToAuxInt(arm64Negate(cc)) 3279 v.AddArg2(y, flag) 3280 return true 3281 } 3282 // match: (CSEL [cc] x (ADDconst [1] a) flag) 3283 // result: (CSINC [cc] x a flag) 3284 for { 3285 cc := auxIntToOp(v.AuxInt) 3286 x := v_0 3287 if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 1 { 3288 break 3289 } 3290 a := v_1.Args[0] 3291 flag := v_2 3292 v.reset(OpARM64CSINC) 3293 v.AuxInt = opToAuxInt(cc) 3294 v.AddArg3(x, a, flag) 3295 return true 3296 } 3297 // match: (CSEL [cc] (ADDconst [1] a) x flag) 3298 // result: (CSINC [arm64Negate(cc)] x a flag) 3299 for { 3300 cc := auxIntToOp(v.AuxInt) 3301 if v_0.Op != OpARM64ADDconst || auxIntToInt64(v_0.AuxInt) != 1 { 3302 break 3303 } 3304 a := v_0.Args[0] 3305 x := v_1 3306 flag := v_2 3307 v.reset(OpARM64CSINC) 3308 v.AuxInt = opToAuxInt(arm64Negate(cc)) 3309 v.AddArg3(x, a, flag) 3310 return true 3311 } 3312 // match: (CSEL [cc] x (MVN a) flag) 3313 // result: (CSINV [cc] x a flag) 3314 for { 3315 cc := auxIntToOp(v.AuxInt) 3316 x := v_0 3317 if v_1.Op != OpARM64MVN { 3318 break 3319 } 3320 a := v_1.Args[0] 3321 flag := v_2 3322 v.reset(OpARM64CSINV) 3323 v.AuxInt = opToAuxInt(cc) 3324 v.AddArg3(x, a, flag) 3325 return true 3326 } 3327 // match: (CSEL [cc] (MVN a) x flag) 3328 // result: (CSINV [arm64Negate(cc)] x a flag) 3329 for { 3330 cc := auxIntToOp(v.AuxInt) 3331 if v_0.Op != OpARM64MVN { 3332 break 3333 } 3334 a := v_0.Args[0] 3335 x := v_1 3336 flag := v_2 3337 v.reset(OpARM64CSINV) 3338 v.AuxInt = opToAuxInt(arm64Negate(cc)) 3339 v.AddArg3(x, a, flag) 3340 return true 3341 } 3342 // match: (CSEL [cc] x (NEG a) flag) 3343 // result: (CSNEG [cc] x a flag) 3344 for { 3345 cc := auxIntToOp(v.AuxInt) 3346 x := v_0 3347 if v_1.Op != OpARM64NEG { 3348 break 3349 } 3350 a := v_1.Args[0] 3351 flag := v_2 3352 v.reset(OpARM64CSNEG) 3353 v.AuxInt = opToAuxInt(cc) 3354 v.AddArg3(x, a, flag) 3355 return true 3356 } 3357 // match: (CSEL [cc] (NEG a) x flag) 3358 // result: (CSNEG [arm64Negate(cc)] x a flag) 3359 for { 3360 cc := auxIntToOp(v.AuxInt) 3361 if v_0.Op != OpARM64NEG { 3362 break 3363 } 3364 a := v_0.Args[0] 3365 x := v_1 3366 flag := v_2 3367 v.reset(OpARM64CSNEG) 3368 v.AuxInt = opToAuxInt(arm64Negate(cc)) 3369 v.AddArg3(x, a, flag) 3370 return true 3371 } 3372 // match: (CSEL [cc] x y (InvertFlags cmp)) 3373 // result: (CSEL [arm64Invert(cc)] x y cmp) 3374 for { 3375 cc := auxIntToOp(v.AuxInt) 3376 x := v_0 3377 y := v_1 3378 if v_2.Op != OpARM64InvertFlags { 3379 break 3380 } 3381 cmp := v_2.Args[0] 3382 v.reset(OpARM64CSEL) 3383 v.AuxInt = opToAuxInt(arm64Invert(cc)) 3384 v.AddArg3(x, y, cmp) 3385 return true 3386 } 3387 // match: (CSEL [cc] x _ flag) 3388 // cond: ccARM64Eval(cc, flag) > 0 3389 // result: x 3390 for { 3391 cc := auxIntToOp(v.AuxInt) 3392 x := v_0 3393 flag := v_2 3394 if !(ccARM64Eval(cc, flag) > 0) { 3395 break 3396 } 3397 v.copyOf(x) 3398 return true 3399 } 3400 // match: (CSEL [cc] _ y flag) 3401 // cond: ccARM64Eval(cc, flag) < 0 3402 // result: y 3403 for { 3404 cc := auxIntToOp(v.AuxInt) 3405 y := v_1 3406 flag := v_2 3407 if !(ccARM64Eval(cc, flag) < 0) { 3408 break 3409 } 3410 v.copyOf(y) 3411 return true 3412 } 3413 // match: (CSEL [cc] x y (CMPWconst [0] boolval)) 3414 // cond: cc == OpARM64NotEqual && flagArg(boolval) != nil 3415 // result: (CSEL [boolval.Op] x y flagArg(boolval)) 3416 for { 3417 cc := auxIntToOp(v.AuxInt) 3418 x := v_0 3419 y := v_1 3420 if v_2.Op != OpARM64CMPWconst || auxIntToInt32(v_2.AuxInt) != 0 { 3421 break 3422 } 3423 boolval := v_2.Args[0] 3424 if !(cc == OpARM64NotEqual && flagArg(boolval) != nil) { 3425 break 3426 } 3427 v.reset(OpARM64CSEL) 3428 v.AuxInt = opToAuxInt(boolval.Op) 3429 v.AddArg3(x, y, flagArg(boolval)) 3430 return true 3431 } 3432 // match: (CSEL [cc] x y (CMPWconst [0] boolval)) 3433 // cond: cc == OpARM64Equal && flagArg(boolval) != nil 3434 // result: (CSEL [arm64Negate(boolval.Op)] x y flagArg(boolval)) 3435 for { 3436 cc := auxIntToOp(v.AuxInt) 3437 x := v_0 3438 y := v_1 3439 if v_2.Op != OpARM64CMPWconst || auxIntToInt32(v_2.AuxInt) != 0 { 3440 break 3441 } 3442 boolval := v_2.Args[0] 3443 if !(cc == OpARM64Equal && flagArg(boolval) != nil) { 3444 break 3445 } 3446 v.reset(OpARM64CSEL) 3447 v.AuxInt = opToAuxInt(arm64Negate(boolval.Op)) 3448 v.AddArg3(x, y, flagArg(boolval)) 3449 return true 3450 } 3451 return false 3452 } 3453 func rewriteValueARM64_OpARM64CSEL0(v *Value) bool { 3454 v_1 := v.Args[1] 3455 v_0 := v.Args[0] 3456 // match: (CSEL0 [cc] x (InvertFlags cmp)) 3457 // result: (CSEL0 [arm64Invert(cc)] x cmp) 3458 for { 3459 cc := auxIntToOp(v.AuxInt) 3460 x := v_0 3461 if v_1.Op != OpARM64InvertFlags { 3462 break 3463 } 3464 cmp := v_1.Args[0] 3465 v.reset(OpARM64CSEL0) 3466 v.AuxInt = opToAuxInt(arm64Invert(cc)) 3467 v.AddArg2(x, cmp) 3468 return true 3469 } 3470 // match: (CSEL0 [cc] x flag) 3471 // cond: ccARM64Eval(cc, flag) > 0 3472 // result: x 3473 for { 3474 cc := auxIntToOp(v.AuxInt) 3475 x := v_0 3476 flag := v_1 3477 if !(ccARM64Eval(cc, flag) > 0) { 3478 break 3479 } 3480 v.copyOf(x) 3481 return true 3482 } 3483 // match: (CSEL0 [cc] _ flag) 3484 // cond: ccARM64Eval(cc, flag) < 0 3485 // result: (MOVDconst [0]) 3486 for { 3487 cc := auxIntToOp(v.AuxInt) 3488 flag := v_1 3489 if !(ccARM64Eval(cc, flag) < 0) { 3490 break 3491 } 3492 v.reset(OpARM64MOVDconst) 3493 v.AuxInt = int64ToAuxInt(0) 3494 return true 3495 } 3496 // match: (CSEL0 [cc] x (CMPWconst [0] boolval)) 3497 // cond: cc == OpARM64NotEqual && flagArg(boolval) != nil 3498 // result: (CSEL0 [boolval.Op] x flagArg(boolval)) 3499 for { 3500 cc := auxIntToOp(v.AuxInt) 3501 x := v_0 3502 if v_1.Op != OpARM64CMPWconst || auxIntToInt32(v_1.AuxInt) != 0 { 3503 break 3504 } 3505 boolval := v_1.Args[0] 3506 if !(cc == OpARM64NotEqual && flagArg(boolval) != nil) { 3507 break 3508 } 3509 v.reset(OpARM64CSEL0) 3510 v.AuxInt = opToAuxInt(boolval.Op) 3511 v.AddArg2(x, flagArg(boolval)) 3512 return true 3513 } 3514 // match: (CSEL0 [cc] x (CMPWconst [0] boolval)) 3515 // cond: cc == OpARM64Equal && flagArg(boolval) != nil 3516 // result: (CSEL0 [arm64Negate(boolval.Op)] x flagArg(boolval)) 3517 for { 3518 cc := auxIntToOp(v.AuxInt) 3519 x := v_0 3520 if v_1.Op != OpARM64CMPWconst || auxIntToInt32(v_1.AuxInt) != 0 { 3521 break 3522 } 3523 boolval := v_1.Args[0] 3524 if !(cc == OpARM64Equal && flagArg(boolval) != nil) { 3525 break 3526 } 3527 v.reset(OpARM64CSEL0) 3528 v.AuxInt = opToAuxInt(arm64Negate(boolval.Op)) 3529 v.AddArg2(x, flagArg(boolval)) 3530 return true 3531 } 3532 return false 3533 } 3534 func rewriteValueARM64_OpARM64CSETM(v *Value) bool { 3535 v_0 := v.Args[0] 3536 // match: (CSETM [cc] (InvertFlags cmp)) 3537 // result: (CSETM [arm64Invert(cc)] cmp) 3538 for { 3539 cc := auxIntToOp(v.AuxInt) 3540 if v_0.Op != OpARM64InvertFlags { 3541 break 3542 } 3543 cmp := v_0.Args[0] 3544 v.reset(OpARM64CSETM) 3545 v.AuxInt = opToAuxInt(arm64Invert(cc)) 3546 v.AddArg(cmp) 3547 return true 3548 } 3549 // match: (CSETM [cc] flag) 3550 // cond: ccARM64Eval(cc, flag) > 0 3551 // result: (MOVDconst [-1]) 3552 for { 3553 cc := auxIntToOp(v.AuxInt) 3554 flag := v_0 3555 if !(ccARM64Eval(cc, flag) > 0) { 3556 break 3557 } 3558 v.reset(OpARM64MOVDconst) 3559 v.AuxInt = int64ToAuxInt(-1) 3560 return true 3561 } 3562 // match: (CSETM [cc] flag) 3563 // cond: ccARM64Eval(cc, flag) < 0 3564 // result: (MOVDconst [0]) 3565 for { 3566 cc := auxIntToOp(v.AuxInt) 3567 flag := v_0 3568 if !(ccARM64Eval(cc, flag) < 0) { 3569 break 3570 } 3571 v.reset(OpARM64MOVDconst) 3572 v.AuxInt = int64ToAuxInt(0) 3573 return true 3574 } 3575 return false 3576 } 3577 func rewriteValueARM64_OpARM64CSINC(v *Value) bool { 3578 v_2 := v.Args[2] 3579 v_1 := v.Args[1] 3580 v_0 := v.Args[0] 3581 // match: (CSINC [cc] x y (InvertFlags cmp)) 3582 // result: (CSINC [arm64Invert(cc)] x y cmp) 3583 for { 3584 cc := auxIntToOp(v.AuxInt) 3585 x := v_0 3586 y := v_1 3587 if v_2.Op != OpARM64InvertFlags { 3588 break 3589 } 3590 cmp := v_2.Args[0] 3591 v.reset(OpARM64CSINC) 3592 v.AuxInt = opToAuxInt(arm64Invert(cc)) 3593 v.AddArg3(x, y, cmp) 3594 return true 3595 } 3596 // match: (CSINC [cc] x _ flag) 3597 // cond: ccARM64Eval(cc, flag) > 0 3598 // result: x 3599 for { 3600 cc := auxIntToOp(v.AuxInt) 3601 x := v_0 3602 flag := v_2 3603 if !(ccARM64Eval(cc, flag) > 0) { 3604 break 3605 } 3606 v.copyOf(x) 3607 return true 3608 } 3609 // match: (CSINC [cc] _ y flag) 3610 // cond: ccARM64Eval(cc, flag) < 0 3611 // result: (ADDconst [1] y) 3612 for { 3613 cc := auxIntToOp(v.AuxInt) 3614 y := v_1 3615 flag := v_2 3616 if !(ccARM64Eval(cc, flag) < 0) { 3617 break 3618 } 3619 v.reset(OpARM64ADDconst) 3620 v.AuxInt = int64ToAuxInt(1) 3621 v.AddArg(y) 3622 return true 3623 } 3624 return false 3625 } 3626 func rewriteValueARM64_OpARM64CSINV(v *Value) bool { 3627 v_2 := v.Args[2] 3628 v_1 := v.Args[1] 3629 v_0 := v.Args[0] 3630 // match: (CSINV [cc] x y (InvertFlags cmp)) 3631 // result: (CSINV [arm64Invert(cc)] x y cmp) 3632 for { 3633 cc := auxIntToOp(v.AuxInt) 3634 x := v_0 3635 y := v_1 3636 if v_2.Op != OpARM64InvertFlags { 3637 break 3638 } 3639 cmp := v_2.Args[0] 3640 v.reset(OpARM64CSINV) 3641 v.AuxInt = opToAuxInt(arm64Invert(cc)) 3642 v.AddArg3(x, y, cmp) 3643 return true 3644 } 3645 // match: (CSINV [cc] x _ flag) 3646 // cond: ccARM64Eval(cc, flag) > 0 3647 // result: x 3648 for { 3649 cc := auxIntToOp(v.AuxInt) 3650 x := v_0 3651 flag := v_2 3652 if !(ccARM64Eval(cc, flag) > 0) { 3653 break 3654 } 3655 v.copyOf(x) 3656 return true 3657 } 3658 // match: (CSINV [cc] _ y flag) 3659 // cond: ccARM64Eval(cc, flag) < 0 3660 // result: (Not y) 3661 for { 3662 cc := auxIntToOp(v.AuxInt) 3663 y := v_1 3664 flag := v_2 3665 if !(ccARM64Eval(cc, flag) < 0) { 3666 break 3667 } 3668 v.reset(OpNot) 3669 v.AddArg(y) 3670 return true 3671 } 3672 return false 3673 } 3674 func rewriteValueARM64_OpARM64CSNEG(v *Value) bool { 3675 v_2 := v.Args[2] 3676 v_1 := v.Args[1] 3677 v_0 := v.Args[0] 3678 // match: (CSNEG [cc] x y (InvertFlags cmp)) 3679 // result: (CSNEG [arm64Invert(cc)] x y cmp) 3680 for { 3681 cc := auxIntToOp(v.AuxInt) 3682 x := v_0 3683 y := v_1 3684 if v_2.Op != OpARM64InvertFlags { 3685 break 3686 } 3687 cmp := v_2.Args[0] 3688 v.reset(OpARM64CSNEG) 3689 v.AuxInt = opToAuxInt(arm64Invert(cc)) 3690 v.AddArg3(x, y, cmp) 3691 return true 3692 } 3693 // match: (CSNEG [cc] x _ flag) 3694 // cond: ccARM64Eval(cc, flag) > 0 3695 // result: x 3696 for { 3697 cc := auxIntToOp(v.AuxInt) 3698 x := v_0 3699 flag := v_2 3700 if !(ccARM64Eval(cc, flag) > 0) { 3701 break 3702 } 3703 v.copyOf(x) 3704 return true 3705 } 3706 // match: (CSNEG [cc] _ y flag) 3707 // cond: ccARM64Eval(cc, flag) < 0 3708 // result: (NEG y) 3709 for { 3710 cc := auxIntToOp(v.AuxInt) 3711 y := v_1 3712 flag := v_2 3713 if !(ccARM64Eval(cc, flag) < 0) { 3714 break 3715 } 3716 v.reset(OpARM64NEG) 3717 v.AddArg(y) 3718 return true 3719 } 3720 return false 3721 } 3722 func rewriteValueARM64_OpARM64DIV(v *Value) bool { 3723 v_1 := v.Args[1] 3724 v_0 := v.Args[0] 3725 // match: (DIV (MOVDconst [c]) (MOVDconst [d])) 3726 // cond: d != 0 3727 // result: (MOVDconst [c/d]) 3728 for { 3729 if v_0.Op != OpARM64MOVDconst { 3730 break 3731 } 3732 c := auxIntToInt64(v_0.AuxInt) 3733 if v_1.Op != OpARM64MOVDconst { 3734 break 3735 } 3736 d := auxIntToInt64(v_1.AuxInt) 3737 if !(d != 0) { 3738 break 3739 } 3740 v.reset(OpARM64MOVDconst) 3741 v.AuxInt = int64ToAuxInt(c / d) 3742 return true 3743 } 3744 return false 3745 } 3746 func rewriteValueARM64_OpARM64DIVW(v *Value) bool { 3747 v_1 := v.Args[1] 3748 v_0 := v.Args[0] 3749 // match: (DIVW (MOVDconst [c]) (MOVDconst [d])) 3750 // cond: d != 0 3751 // result: (MOVDconst [int64(uint32(int32(c)/int32(d)))]) 3752 for { 3753 if v_0.Op != OpARM64MOVDconst { 3754 break 3755 } 3756 c := auxIntToInt64(v_0.AuxInt) 3757 if v_1.Op != OpARM64MOVDconst { 3758 break 3759 } 3760 d := auxIntToInt64(v_1.AuxInt) 3761 if !(d != 0) { 3762 break 3763 } 3764 v.reset(OpARM64MOVDconst) 3765 v.AuxInt = int64ToAuxInt(int64(uint32(int32(c) / int32(d)))) 3766 return true 3767 } 3768 return false 3769 } 3770 func rewriteValueARM64_OpARM64EON(v *Value) bool { 3771 v_1 := v.Args[1] 3772 v_0 := v.Args[0] 3773 // match: (EON x (MOVDconst [c])) 3774 // result: (XORconst [^c] x) 3775 for { 3776 x := v_0 3777 if v_1.Op != OpARM64MOVDconst { 3778 break 3779 } 3780 c := auxIntToInt64(v_1.AuxInt) 3781 v.reset(OpARM64XORconst) 3782 v.AuxInt = int64ToAuxInt(^c) 3783 v.AddArg(x) 3784 return true 3785 } 3786 // match: (EON x x) 3787 // result: (MOVDconst [-1]) 3788 for { 3789 x := v_0 3790 if x != v_1 { 3791 break 3792 } 3793 v.reset(OpARM64MOVDconst) 3794 v.AuxInt = int64ToAuxInt(-1) 3795 return true 3796 } 3797 // match: (EON x0 x1:(SLLconst [c] y)) 3798 // cond: clobberIfDead(x1) 3799 // result: (EONshiftLL x0 y [c]) 3800 for { 3801 x0 := v_0 3802 x1 := v_1 3803 if x1.Op != OpARM64SLLconst { 3804 break 3805 } 3806 c := auxIntToInt64(x1.AuxInt) 3807 y := x1.Args[0] 3808 if !(clobberIfDead(x1)) { 3809 break 3810 } 3811 v.reset(OpARM64EONshiftLL) 3812 v.AuxInt = int64ToAuxInt(c) 3813 v.AddArg2(x0, y) 3814 return true 3815 } 3816 // match: (EON x0 x1:(SRLconst [c] y)) 3817 // cond: clobberIfDead(x1) 3818 // result: (EONshiftRL x0 y [c]) 3819 for { 3820 x0 := v_0 3821 x1 := v_1 3822 if x1.Op != OpARM64SRLconst { 3823 break 3824 } 3825 c := auxIntToInt64(x1.AuxInt) 3826 y := x1.Args[0] 3827 if !(clobberIfDead(x1)) { 3828 break 3829 } 3830 v.reset(OpARM64EONshiftRL) 3831 v.AuxInt = int64ToAuxInt(c) 3832 v.AddArg2(x0, y) 3833 return true 3834 } 3835 // match: (EON x0 x1:(SRAconst [c] y)) 3836 // cond: clobberIfDead(x1) 3837 // result: (EONshiftRA x0 y [c]) 3838 for { 3839 x0 := v_0 3840 x1 := v_1 3841 if x1.Op != OpARM64SRAconst { 3842 break 3843 } 3844 c := auxIntToInt64(x1.AuxInt) 3845 y := x1.Args[0] 3846 if !(clobberIfDead(x1)) { 3847 break 3848 } 3849 v.reset(OpARM64EONshiftRA) 3850 v.AuxInt = int64ToAuxInt(c) 3851 v.AddArg2(x0, y) 3852 return true 3853 } 3854 // match: (EON x0 x1:(RORconst [c] y)) 3855 // cond: clobberIfDead(x1) 3856 // result: (EONshiftRO x0 y [c]) 3857 for { 3858 x0 := v_0 3859 x1 := v_1 3860 if x1.Op != OpARM64RORconst { 3861 break 3862 } 3863 c := auxIntToInt64(x1.AuxInt) 3864 y := x1.Args[0] 3865 if !(clobberIfDead(x1)) { 3866 break 3867 } 3868 v.reset(OpARM64EONshiftRO) 3869 v.AuxInt = int64ToAuxInt(c) 3870 v.AddArg2(x0, y) 3871 return true 3872 } 3873 return false 3874 } 3875 func rewriteValueARM64_OpARM64EONshiftLL(v *Value) bool { 3876 v_1 := v.Args[1] 3877 v_0 := v.Args[0] 3878 // match: (EONshiftLL x (MOVDconst [c]) [d]) 3879 // result: (XORconst x [^int64(uint64(c)<<uint64(d))]) 3880 for { 3881 d := auxIntToInt64(v.AuxInt) 3882 x := v_0 3883 if v_1.Op != OpARM64MOVDconst { 3884 break 3885 } 3886 c := auxIntToInt64(v_1.AuxInt) 3887 v.reset(OpARM64XORconst) 3888 v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d))) 3889 v.AddArg(x) 3890 return true 3891 } 3892 // match: (EONshiftLL (SLLconst x [c]) x [c]) 3893 // result: (MOVDconst [-1]) 3894 for { 3895 c := auxIntToInt64(v.AuxInt) 3896 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c { 3897 break 3898 } 3899 x := v_0.Args[0] 3900 if x != v_1 { 3901 break 3902 } 3903 v.reset(OpARM64MOVDconst) 3904 v.AuxInt = int64ToAuxInt(-1) 3905 return true 3906 } 3907 return false 3908 } 3909 func rewriteValueARM64_OpARM64EONshiftRA(v *Value) bool { 3910 v_1 := v.Args[1] 3911 v_0 := v.Args[0] 3912 // match: (EONshiftRA x (MOVDconst [c]) [d]) 3913 // result: (XORconst x [^(c>>uint64(d))]) 3914 for { 3915 d := auxIntToInt64(v.AuxInt) 3916 x := v_0 3917 if v_1.Op != OpARM64MOVDconst { 3918 break 3919 } 3920 c := auxIntToInt64(v_1.AuxInt) 3921 v.reset(OpARM64XORconst) 3922 v.AuxInt = int64ToAuxInt(^(c >> uint64(d))) 3923 v.AddArg(x) 3924 return true 3925 } 3926 // match: (EONshiftRA (SRAconst x [c]) x [c]) 3927 // result: (MOVDconst [-1]) 3928 for { 3929 c := auxIntToInt64(v.AuxInt) 3930 if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c { 3931 break 3932 } 3933 x := v_0.Args[0] 3934 if x != v_1 { 3935 break 3936 } 3937 v.reset(OpARM64MOVDconst) 3938 v.AuxInt = int64ToAuxInt(-1) 3939 return true 3940 } 3941 return false 3942 } 3943 func rewriteValueARM64_OpARM64EONshiftRL(v *Value) bool { 3944 v_1 := v.Args[1] 3945 v_0 := v.Args[0] 3946 // match: (EONshiftRL x (MOVDconst [c]) [d]) 3947 // result: (XORconst x [^int64(uint64(c)>>uint64(d))]) 3948 for { 3949 d := auxIntToInt64(v.AuxInt) 3950 x := v_0 3951 if v_1.Op != OpARM64MOVDconst { 3952 break 3953 } 3954 c := auxIntToInt64(v_1.AuxInt) 3955 v.reset(OpARM64XORconst) 3956 v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d))) 3957 v.AddArg(x) 3958 return true 3959 } 3960 // match: (EONshiftRL (SRLconst x [c]) x [c]) 3961 // result: (MOVDconst [-1]) 3962 for { 3963 c := auxIntToInt64(v.AuxInt) 3964 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c { 3965 break 3966 } 3967 x := v_0.Args[0] 3968 if x != v_1 { 3969 break 3970 } 3971 v.reset(OpARM64MOVDconst) 3972 v.AuxInt = int64ToAuxInt(-1) 3973 return true 3974 } 3975 return false 3976 } 3977 func rewriteValueARM64_OpARM64EONshiftRO(v *Value) bool { 3978 v_1 := v.Args[1] 3979 v_0 := v.Args[0] 3980 // match: (EONshiftRO x (MOVDconst [c]) [d]) 3981 // result: (XORconst x [^rotateRight64(c, d)]) 3982 for { 3983 d := auxIntToInt64(v.AuxInt) 3984 x := v_0 3985 if v_1.Op != OpARM64MOVDconst { 3986 break 3987 } 3988 c := auxIntToInt64(v_1.AuxInt) 3989 v.reset(OpARM64XORconst) 3990 v.AuxInt = int64ToAuxInt(^rotateRight64(c, d)) 3991 v.AddArg(x) 3992 return true 3993 } 3994 // match: (EONshiftRO (RORconst x [c]) x [c]) 3995 // result: (MOVDconst [-1]) 3996 for { 3997 c := auxIntToInt64(v.AuxInt) 3998 if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c { 3999 break 4000 } 4001 x := v_0.Args[0] 4002 if x != v_1 { 4003 break 4004 } 4005 v.reset(OpARM64MOVDconst) 4006 v.AuxInt = int64ToAuxInt(-1) 4007 return true 4008 } 4009 return false 4010 } 4011 func rewriteValueARM64_OpARM64Equal(v *Value) bool { 4012 v_0 := v.Args[0] 4013 b := v.Block 4014 // match: (Equal (CMPconst [0] z:(AND x y))) 4015 // cond: z.Uses == 1 4016 // result: (Equal (TST x y)) 4017 for { 4018 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 4019 break 4020 } 4021 z := v_0.Args[0] 4022 if z.Op != OpARM64AND { 4023 break 4024 } 4025 y := z.Args[1] 4026 x := z.Args[0] 4027 if !(z.Uses == 1) { 4028 break 4029 } 4030 v.reset(OpARM64Equal) 4031 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 4032 v0.AddArg2(x, y) 4033 v.AddArg(v0) 4034 return true 4035 } 4036 // match: (Equal (CMPWconst [0] x:(ANDconst [c] y))) 4037 // cond: x.Uses == 1 4038 // result: (Equal (TSTWconst [int32(c)] y)) 4039 for { 4040 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 4041 break 4042 } 4043 x := v_0.Args[0] 4044 if x.Op != OpARM64ANDconst { 4045 break 4046 } 4047 c := auxIntToInt64(x.AuxInt) 4048 y := x.Args[0] 4049 if !(x.Uses == 1) { 4050 break 4051 } 4052 v.reset(OpARM64Equal) 4053 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 4054 v0.AuxInt = int32ToAuxInt(int32(c)) 4055 v0.AddArg(y) 4056 v.AddArg(v0) 4057 return true 4058 } 4059 // match: (Equal (CMPWconst [0] z:(AND x y))) 4060 // cond: z.Uses == 1 4061 // result: (Equal (TSTW x y)) 4062 for { 4063 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 4064 break 4065 } 4066 z := v_0.Args[0] 4067 if z.Op != OpARM64AND { 4068 break 4069 } 4070 y := z.Args[1] 4071 x := z.Args[0] 4072 if !(z.Uses == 1) { 4073 break 4074 } 4075 v.reset(OpARM64Equal) 4076 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 4077 v0.AddArg2(x, y) 4078 v.AddArg(v0) 4079 return true 4080 } 4081 // match: (Equal (CMPconst [0] x:(ANDconst [c] y))) 4082 // cond: x.Uses == 1 4083 // result: (Equal (TSTconst [c] y)) 4084 for { 4085 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 4086 break 4087 } 4088 x := v_0.Args[0] 4089 if x.Op != OpARM64ANDconst { 4090 break 4091 } 4092 c := auxIntToInt64(x.AuxInt) 4093 y := x.Args[0] 4094 if !(x.Uses == 1) { 4095 break 4096 } 4097 v.reset(OpARM64Equal) 4098 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 4099 v0.AuxInt = int64ToAuxInt(c) 4100 v0.AddArg(y) 4101 v.AddArg(v0) 4102 return true 4103 } 4104 // match: (Equal (CMP x z:(NEG y))) 4105 // cond: z.Uses == 1 4106 // result: (Equal (CMN x y)) 4107 for { 4108 if v_0.Op != OpARM64CMP { 4109 break 4110 } 4111 _ = v_0.Args[1] 4112 x := v_0.Args[0] 4113 z := v_0.Args[1] 4114 if z.Op != OpARM64NEG { 4115 break 4116 } 4117 y := z.Args[0] 4118 if !(z.Uses == 1) { 4119 break 4120 } 4121 v.reset(OpARM64Equal) 4122 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 4123 v0.AddArg2(x, y) 4124 v.AddArg(v0) 4125 return true 4126 } 4127 // match: (Equal (CMPW x z:(NEG y))) 4128 // cond: z.Uses == 1 4129 // result: (Equal (CMNW x y)) 4130 for { 4131 if v_0.Op != OpARM64CMPW { 4132 break 4133 } 4134 _ = v_0.Args[1] 4135 x := v_0.Args[0] 4136 z := v_0.Args[1] 4137 if z.Op != OpARM64NEG { 4138 break 4139 } 4140 y := z.Args[0] 4141 if !(z.Uses == 1) { 4142 break 4143 } 4144 v.reset(OpARM64Equal) 4145 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 4146 v0.AddArg2(x, y) 4147 v.AddArg(v0) 4148 return true 4149 } 4150 // match: (Equal (CMPconst [0] x:(ADDconst [c] y))) 4151 // cond: x.Uses == 1 4152 // result: (Equal (CMNconst [c] y)) 4153 for { 4154 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 4155 break 4156 } 4157 x := v_0.Args[0] 4158 if x.Op != OpARM64ADDconst { 4159 break 4160 } 4161 c := auxIntToInt64(x.AuxInt) 4162 y := x.Args[0] 4163 if !(x.Uses == 1) { 4164 break 4165 } 4166 v.reset(OpARM64Equal) 4167 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags) 4168 v0.AuxInt = int64ToAuxInt(c) 4169 v0.AddArg(y) 4170 v.AddArg(v0) 4171 return true 4172 } 4173 // match: (Equal (CMPWconst [0] x:(ADDconst [c] y))) 4174 // cond: x.Uses == 1 4175 // result: (Equal (CMNWconst [int32(c)] y)) 4176 for { 4177 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 4178 break 4179 } 4180 x := v_0.Args[0] 4181 if x.Op != OpARM64ADDconst { 4182 break 4183 } 4184 c := auxIntToInt64(x.AuxInt) 4185 y := x.Args[0] 4186 if !(x.Uses == 1) { 4187 break 4188 } 4189 v.reset(OpARM64Equal) 4190 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags) 4191 v0.AuxInt = int32ToAuxInt(int32(c)) 4192 v0.AddArg(y) 4193 v.AddArg(v0) 4194 return true 4195 } 4196 // match: (Equal (CMPconst [0] z:(ADD x y))) 4197 // cond: z.Uses == 1 4198 // result: (Equal (CMN x y)) 4199 for { 4200 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 4201 break 4202 } 4203 z := v_0.Args[0] 4204 if z.Op != OpARM64ADD { 4205 break 4206 } 4207 y := z.Args[1] 4208 x := z.Args[0] 4209 if !(z.Uses == 1) { 4210 break 4211 } 4212 v.reset(OpARM64Equal) 4213 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 4214 v0.AddArg2(x, y) 4215 v.AddArg(v0) 4216 return true 4217 } 4218 // match: (Equal (CMPWconst [0] z:(ADD x y))) 4219 // cond: z.Uses == 1 4220 // result: (Equal (CMNW x y)) 4221 for { 4222 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 4223 break 4224 } 4225 z := v_0.Args[0] 4226 if z.Op != OpARM64ADD { 4227 break 4228 } 4229 y := z.Args[1] 4230 x := z.Args[0] 4231 if !(z.Uses == 1) { 4232 break 4233 } 4234 v.reset(OpARM64Equal) 4235 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 4236 v0.AddArg2(x, y) 4237 v.AddArg(v0) 4238 return true 4239 } 4240 // match: (Equal (CMPconst [0] z:(MADD a x y))) 4241 // cond: z.Uses == 1 4242 // result: (Equal (CMN a (MUL <x.Type> x y))) 4243 for { 4244 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 4245 break 4246 } 4247 z := v_0.Args[0] 4248 if z.Op != OpARM64MADD { 4249 break 4250 } 4251 y := z.Args[2] 4252 a := z.Args[0] 4253 x := z.Args[1] 4254 if !(z.Uses == 1) { 4255 break 4256 } 4257 v.reset(OpARM64Equal) 4258 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 4259 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 4260 v1.AddArg2(x, y) 4261 v0.AddArg2(a, v1) 4262 v.AddArg(v0) 4263 return true 4264 } 4265 // match: (Equal (CMPconst [0] z:(MSUB a x y))) 4266 // cond: z.Uses == 1 4267 // result: (Equal (CMP a (MUL <x.Type> x y))) 4268 for { 4269 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 4270 break 4271 } 4272 z := v_0.Args[0] 4273 if z.Op != OpARM64MSUB { 4274 break 4275 } 4276 y := z.Args[2] 4277 a := z.Args[0] 4278 x := z.Args[1] 4279 if !(z.Uses == 1) { 4280 break 4281 } 4282 v.reset(OpARM64Equal) 4283 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 4284 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 4285 v1.AddArg2(x, y) 4286 v0.AddArg2(a, v1) 4287 v.AddArg(v0) 4288 return true 4289 } 4290 // match: (Equal (CMPWconst [0] z:(MADDW a x y))) 4291 // cond: z.Uses == 1 4292 // result: (Equal (CMNW a (MULW <x.Type> x y))) 4293 for { 4294 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 4295 break 4296 } 4297 z := v_0.Args[0] 4298 if z.Op != OpARM64MADDW { 4299 break 4300 } 4301 y := z.Args[2] 4302 a := z.Args[0] 4303 x := z.Args[1] 4304 if !(z.Uses == 1) { 4305 break 4306 } 4307 v.reset(OpARM64Equal) 4308 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 4309 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 4310 v1.AddArg2(x, y) 4311 v0.AddArg2(a, v1) 4312 v.AddArg(v0) 4313 return true 4314 } 4315 // match: (Equal (CMPWconst [0] z:(MSUBW a x y))) 4316 // cond: z.Uses == 1 4317 // result: (Equal (CMPW a (MULW <x.Type> x y))) 4318 for { 4319 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 4320 break 4321 } 4322 z := v_0.Args[0] 4323 if z.Op != OpARM64MSUBW { 4324 break 4325 } 4326 y := z.Args[2] 4327 a := z.Args[0] 4328 x := z.Args[1] 4329 if !(z.Uses == 1) { 4330 break 4331 } 4332 v.reset(OpARM64Equal) 4333 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 4334 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 4335 v1.AddArg2(x, y) 4336 v0.AddArg2(a, v1) 4337 v.AddArg(v0) 4338 return true 4339 } 4340 // match: (Equal (FlagConstant [fc])) 4341 // result: (MOVDconst [b2i(fc.eq())]) 4342 for { 4343 if v_0.Op != OpARM64FlagConstant { 4344 break 4345 } 4346 fc := auxIntToFlagConstant(v_0.AuxInt) 4347 v.reset(OpARM64MOVDconst) 4348 v.AuxInt = int64ToAuxInt(b2i(fc.eq())) 4349 return true 4350 } 4351 // match: (Equal (InvertFlags x)) 4352 // result: (Equal x) 4353 for { 4354 if v_0.Op != OpARM64InvertFlags { 4355 break 4356 } 4357 x := v_0.Args[0] 4358 v.reset(OpARM64Equal) 4359 v.AddArg(x) 4360 return true 4361 } 4362 return false 4363 } 4364 func rewriteValueARM64_OpARM64FADDD(v *Value) bool { 4365 v_1 := v.Args[1] 4366 v_0 := v.Args[0] 4367 // match: (FADDD a (FMULD x y)) 4368 // cond: a.Block.Func.useFMA(v) 4369 // result: (FMADDD a x y) 4370 for { 4371 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 4372 a := v_0 4373 if v_1.Op != OpARM64FMULD { 4374 continue 4375 } 4376 y := v_1.Args[1] 4377 x := v_1.Args[0] 4378 if !(a.Block.Func.useFMA(v)) { 4379 continue 4380 } 4381 v.reset(OpARM64FMADDD) 4382 v.AddArg3(a, x, y) 4383 return true 4384 } 4385 break 4386 } 4387 // match: (FADDD a (FNMULD x y)) 4388 // cond: a.Block.Func.useFMA(v) 4389 // result: (FMSUBD a x y) 4390 for { 4391 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 4392 a := v_0 4393 if v_1.Op != OpARM64FNMULD { 4394 continue 4395 } 4396 y := v_1.Args[1] 4397 x := v_1.Args[0] 4398 if !(a.Block.Func.useFMA(v)) { 4399 continue 4400 } 4401 v.reset(OpARM64FMSUBD) 4402 v.AddArg3(a, x, y) 4403 return true 4404 } 4405 break 4406 } 4407 return false 4408 } 4409 func rewriteValueARM64_OpARM64FADDS(v *Value) bool { 4410 v_1 := v.Args[1] 4411 v_0 := v.Args[0] 4412 // match: (FADDS a (FMULS x y)) 4413 // cond: a.Block.Func.useFMA(v) 4414 // result: (FMADDS a x y) 4415 for { 4416 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 4417 a := v_0 4418 if v_1.Op != OpARM64FMULS { 4419 continue 4420 } 4421 y := v_1.Args[1] 4422 x := v_1.Args[0] 4423 if !(a.Block.Func.useFMA(v)) { 4424 continue 4425 } 4426 v.reset(OpARM64FMADDS) 4427 v.AddArg3(a, x, y) 4428 return true 4429 } 4430 break 4431 } 4432 // match: (FADDS a (FNMULS x y)) 4433 // cond: a.Block.Func.useFMA(v) 4434 // result: (FMSUBS a x y) 4435 for { 4436 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 4437 a := v_0 4438 if v_1.Op != OpARM64FNMULS { 4439 continue 4440 } 4441 y := v_1.Args[1] 4442 x := v_1.Args[0] 4443 if !(a.Block.Func.useFMA(v)) { 4444 continue 4445 } 4446 v.reset(OpARM64FMSUBS) 4447 v.AddArg3(a, x, y) 4448 return true 4449 } 4450 break 4451 } 4452 return false 4453 } 4454 func rewriteValueARM64_OpARM64FCMPD(v *Value) bool { 4455 v_1 := v.Args[1] 4456 v_0 := v.Args[0] 4457 b := v.Block 4458 // match: (FCMPD x (FMOVDconst [0])) 4459 // result: (FCMPD0 x) 4460 for { 4461 x := v_0 4462 if v_1.Op != OpARM64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != 0 { 4463 break 4464 } 4465 v.reset(OpARM64FCMPD0) 4466 v.AddArg(x) 4467 return true 4468 } 4469 // match: (FCMPD (FMOVDconst [0]) x) 4470 // result: (InvertFlags (FCMPD0 x)) 4471 for { 4472 if v_0.Op != OpARM64FMOVDconst || auxIntToFloat64(v_0.AuxInt) != 0 { 4473 break 4474 } 4475 x := v_1 4476 v.reset(OpARM64InvertFlags) 4477 v0 := b.NewValue0(v.Pos, OpARM64FCMPD0, types.TypeFlags) 4478 v0.AddArg(x) 4479 v.AddArg(v0) 4480 return true 4481 } 4482 return false 4483 } 4484 func rewriteValueARM64_OpARM64FCMPS(v *Value) bool { 4485 v_1 := v.Args[1] 4486 v_0 := v.Args[0] 4487 b := v.Block 4488 // match: (FCMPS x (FMOVSconst [0])) 4489 // result: (FCMPS0 x) 4490 for { 4491 x := v_0 4492 if v_1.Op != OpARM64FMOVSconst || auxIntToFloat64(v_1.AuxInt) != 0 { 4493 break 4494 } 4495 v.reset(OpARM64FCMPS0) 4496 v.AddArg(x) 4497 return true 4498 } 4499 // match: (FCMPS (FMOVSconst [0]) x) 4500 // result: (InvertFlags (FCMPS0 x)) 4501 for { 4502 if v_0.Op != OpARM64FMOVSconst || auxIntToFloat64(v_0.AuxInt) != 0 { 4503 break 4504 } 4505 x := v_1 4506 v.reset(OpARM64InvertFlags) 4507 v0 := b.NewValue0(v.Pos, OpARM64FCMPS0, types.TypeFlags) 4508 v0.AddArg(x) 4509 v.AddArg(v0) 4510 return true 4511 } 4512 return false 4513 } 4514 func rewriteValueARM64_OpARM64FMOVDfpgp(v *Value) bool { 4515 v_0 := v.Args[0] 4516 b := v.Block 4517 // match: (FMOVDfpgp <t> (Arg [off] {sym})) 4518 // result: @b.Func.Entry (Arg <t> [off] {sym}) 4519 for { 4520 t := v.Type 4521 if v_0.Op != OpArg { 4522 break 4523 } 4524 off := auxIntToInt32(v_0.AuxInt) 4525 sym := auxToSym(v_0.Aux) 4526 b = b.Func.Entry 4527 v0 := b.NewValue0(v.Pos, OpArg, t) 4528 v.copyOf(v0) 4529 v0.AuxInt = int32ToAuxInt(off) 4530 v0.Aux = symToAux(sym) 4531 return true 4532 } 4533 return false 4534 } 4535 func rewriteValueARM64_OpARM64FMOVDgpfp(v *Value) bool { 4536 v_0 := v.Args[0] 4537 b := v.Block 4538 // match: (FMOVDgpfp <t> (Arg [off] {sym})) 4539 // result: @b.Func.Entry (Arg <t> [off] {sym}) 4540 for { 4541 t := v.Type 4542 if v_0.Op != OpArg { 4543 break 4544 } 4545 off := auxIntToInt32(v_0.AuxInt) 4546 sym := auxToSym(v_0.Aux) 4547 b = b.Func.Entry 4548 v0 := b.NewValue0(v.Pos, OpArg, t) 4549 v.copyOf(v0) 4550 v0.AuxInt = int32ToAuxInt(off) 4551 v0.Aux = symToAux(sym) 4552 return true 4553 } 4554 return false 4555 } 4556 func rewriteValueARM64_OpARM64FMOVDload(v *Value) bool { 4557 v_1 := v.Args[1] 4558 v_0 := v.Args[0] 4559 b := v.Block 4560 config := b.Func.Config 4561 // match: (FMOVDload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _)) 4562 // result: (FMOVDgpfp val) 4563 for { 4564 off := auxIntToInt32(v.AuxInt) 4565 sym := auxToSym(v.Aux) 4566 ptr := v_0 4567 if v_1.Op != OpARM64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym { 4568 break 4569 } 4570 val := v_1.Args[1] 4571 if ptr != v_1.Args[0] { 4572 break 4573 } 4574 v.reset(OpARM64FMOVDgpfp) 4575 v.AddArg(val) 4576 return true 4577 } 4578 // match: (FMOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 4579 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 4580 // result: (FMOVDload [off1+int32(off2)] {sym} ptr mem) 4581 for { 4582 off1 := auxIntToInt32(v.AuxInt) 4583 sym := auxToSym(v.Aux) 4584 if v_0.Op != OpARM64ADDconst { 4585 break 4586 } 4587 off2 := auxIntToInt64(v_0.AuxInt) 4588 ptr := v_0.Args[0] 4589 mem := v_1 4590 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 4591 break 4592 } 4593 v.reset(OpARM64FMOVDload) 4594 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 4595 v.Aux = symToAux(sym) 4596 v.AddArg2(ptr, mem) 4597 return true 4598 } 4599 // match: (FMOVDload [off] {sym} (ADD ptr idx) mem) 4600 // cond: off == 0 && sym == nil 4601 // result: (FMOVDloadidx ptr idx mem) 4602 for { 4603 off := auxIntToInt32(v.AuxInt) 4604 sym := auxToSym(v.Aux) 4605 if v_0.Op != OpARM64ADD { 4606 break 4607 } 4608 idx := v_0.Args[1] 4609 ptr := v_0.Args[0] 4610 mem := v_1 4611 if !(off == 0 && sym == nil) { 4612 break 4613 } 4614 v.reset(OpARM64FMOVDloadidx) 4615 v.AddArg3(ptr, idx, mem) 4616 return true 4617 } 4618 // match: (FMOVDload [off] {sym} (ADDshiftLL [3] ptr idx) mem) 4619 // cond: off == 0 && sym == nil 4620 // result: (FMOVDloadidx8 ptr idx mem) 4621 for { 4622 off := auxIntToInt32(v.AuxInt) 4623 sym := auxToSym(v.Aux) 4624 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 { 4625 break 4626 } 4627 idx := v_0.Args[1] 4628 ptr := v_0.Args[0] 4629 mem := v_1 4630 if !(off == 0 && sym == nil) { 4631 break 4632 } 4633 v.reset(OpARM64FMOVDloadidx8) 4634 v.AddArg3(ptr, idx, mem) 4635 return true 4636 } 4637 // match: (FMOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4638 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 4639 // result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4640 for { 4641 off1 := auxIntToInt32(v.AuxInt) 4642 sym1 := auxToSym(v.Aux) 4643 if v_0.Op != OpARM64MOVDaddr { 4644 break 4645 } 4646 off2 := auxIntToInt32(v_0.AuxInt) 4647 sym2 := auxToSym(v_0.Aux) 4648 ptr := v_0.Args[0] 4649 mem := v_1 4650 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 4651 break 4652 } 4653 v.reset(OpARM64FMOVDload) 4654 v.AuxInt = int32ToAuxInt(off1 + off2) 4655 v.Aux = symToAux(mergeSym(sym1, sym2)) 4656 v.AddArg2(ptr, mem) 4657 return true 4658 } 4659 return false 4660 } 4661 func rewriteValueARM64_OpARM64FMOVDloadidx(v *Value) bool { 4662 v_2 := v.Args[2] 4663 v_1 := v.Args[1] 4664 v_0 := v.Args[0] 4665 // match: (FMOVDloadidx ptr (MOVDconst [c]) mem) 4666 // cond: is32Bit(c) 4667 // result: (FMOVDload [int32(c)] ptr mem) 4668 for { 4669 ptr := v_0 4670 if v_1.Op != OpARM64MOVDconst { 4671 break 4672 } 4673 c := auxIntToInt64(v_1.AuxInt) 4674 mem := v_2 4675 if !(is32Bit(c)) { 4676 break 4677 } 4678 v.reset(OpARM64FMOVDload) 4679 v.AuxInt = int32ToAuxInt(int32(c)) 4680 v.AddArg2(ptr, mem) 4681 return true 4682 } 4683 // match: (FMOVDloadidx (MOVDconst [c]) ptr mem) 4684 // cond: is32Bit(c) 4685 // result: (FMOVDload [int32(c)] ptr mem) 4686 for { 4687 if v_0.Op != OpARM64MOVDconst { 4688 break 4689 } 4690 c := auxIntToInt64(v_0.AuxInt) 4691 ptr := v_1 4692 mem := v_2 4693 if !(is32Bit(c)) { 4694 break 4695 } 4696 v.reset(OpARM64FMOVDload) 4697 v.AuxInt = int32ToAuxInt(int32(c)) 4698 v.AddArg2(ptr, mem) 4699 return true 4700 } 4701 // match: (FMOVDloadidx ptr (SLLconst [3] idx) mem) 4702 // result: (FMOVDloadidx8 ptr idx mem) 4703 for { 4704 ptr := v_0 4705 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 { 4706 break 4707 } 4708 idx := v_1.Args[0] 4709 mem := v_2 4710 v.reset(OpARM64FMOVDloadidx8) 4711 v.AddArg3(ptr, idx, mem) 4712 return true 4713 } 4714 // match: (FMOVDloadidx (SLLconst [3] idx) ptr mem) 4715 // result: (FMOVDloadidx8 ptr idx mem) 4716 for { 4717 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 { 4718 break 4719 } 4720 idx := v_0.Args[0] 4721 ptr := v_1 4722 mem := v_2 4723 v.reset(OpARM64FMOVDloadidx8) 4724 v.AddArg3(ptr, idx, mem) 4725 return true 4726 } 4727 return false 4728 } 4729 func rewriteValueARM64_OpARM64FMOVDloadidx8(v *Value) bool { 4730 v_2 := v.Args[2] 4731 v_1 := v.Args[1] 4732 v_0 := v.Args[0] 4733 // match: (FMOVDloadidx8 ptr (MOVDconst [c]) mem) 4734 // cond: is32Bit(c<<3) 4735 // result: (FMOVDload ptr [int32(c)<<3] mem) 4736 for { 4737 ptr := v_0 4738 if v_1.Op != OpARM64MOVDconst { 4739 break 4740 } 4741 c := auxIntToInt64(v_1.AuxInt) 4742 mem := v_2 4743 if !(is32Bit(c << 3)) { 4744 break 4745 } 4746 v.reset(OpARM64FMOVDload) 4747 v.AuxInt = int32ToAuxInt(int32(c) << 3) 4748 v.AddArg2(ptr, mem) 4749 return true 4750 } 4751 return false 4752 } 4753 func rewriteValueARM64_OpARM64FMOVDstore(v *Value) bool { 4754 v_2 := v.Args[2] 4755 v_1 := v.Args[1] 4756 v_0 := v.Args[0] 4757 b := v.Block 4758 config := b.Func.Config 4759 // match: (FMOVDstore [off] {sym} ptr (FMOVDgpfp val) mem) 4760 // result: (MOVDstore [off] {sym} ptr val mem) 4761 for { 4762 off := auxIntToInt32(v.AuxInt) 4763 sym := auxToSym(v.Aux) 4764 ptr := v_0 4765 if v_1.Op != OpARM64FMOVDgpfp { 4766 break 4767 } 4768 val := v_1.Args[0] 4769 mem := v_2 4770 v.reset(OpARM64MOVDstore) 4771 v.AuxInt = int32ToAuxInt(off) 4772 v.Aux = symToAux(sym) 4773 v.AddArg3(ptr, val, mem) 4774 return true 4775 } 4776 // match: (FMOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 4777 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 4778 // result: (FMOVDstore [off1+int32(off2)] {sym} ptr val mem) 4779 for { 4780 off1 := auxIntToInt32(v.AuxInt) 4781 sym := auxToSym(v.Aux) 4782 if v_0.Op != OpARM64ADDconst { 4783 break 4784 } 4785 off2 := auxIntToInt64(v_0.AuxInt) 4786 ptr := v_0.Args[0] 4787 val := v_1 4788 mem := v_2 4789 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 4790 break 4791 } 4792 v.reset(OpARM64FMOVDstore) 4793 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 4794 v.Aux = symToAux(sym) 4795 v.AddArg3(ptr, val, mem) 4796 return true 4797 } 4798 // match: (FMOVDstore [off] {sym} (ADD ptr idx) val mem) 4799 // cond: off == 0 && sym == nil 4800 // result: (FMOVDstoreidx ptr idx val mem) 4801 for { 4802 off := auxIntToInt32(v.AuxInt) 4803 sym := auxToSym(v.Aux) 4804 if v_0.Op != OpARM64ADD { 4805 break 4806 } 4807 idx := v_0.Args[1] 4808 ptr := v_0.Args[0] 4809 val := v_1 4810 mem := v_2 4811 if !(off == 0 && sym == nil) { 4812 break 4813 } 4814 v.reset(OpARM64FMOVDstoreidx) 4815 v.AddArg4(ptr, idx, val, mem) 4816 return true 4817 } 4818 // match: (FMOVDstore [off] {sym} (ADDshiftLL [3] ptr idx) val mem) 4819 // cond: off == 0 && sym == nil 4820 // result: (FMOVDstoreidx8 ptr idx val mem) 4821 for { 4822 off := auxIntToInt32(v.AuxInt) 4823 sym := auxToSym(v.Aux) 4824 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 { 4825 break 4826 } 4827 idx := v_0.Args[1] 4828 ptr := v_0.Args[0] 4829 val := v_1 4830 mem := v_2 4831 if !(off == 0 && sym == nil) { 4832 break 4833 } 4834 v.reset(OpARM64FMOVDstoreidx8) 4835 v.AddArg4(ptr, idx, val, mem) 4836 return true 4837 } 4838 // match: (FMOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 4839 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 4840 // result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4841 for { 4842 off1 := auxIntToInt32(v.AuxInt) 4843 sym1 := auxToSym(v.Aux) 4844 if v_0.Op != OpARM64MOVDaddr { 4845 break 4846 } 4847 off2 := auxIntToInt32(v_0.AuxInt) 4848 sym2 := auxToSym(v_0.Aux) 4849 ptr := v_0.Args[0] 4850 val := v_1 4851 mem := v_2 4852 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 4853 break 4854 } 4855 v.reset(OpARM64FMOVDstore) 4856 v.AuxInt = int32ToAuxInt(off1 + off2) 4857 v.Aux = symToAux(mergeSym(sym1, sym2)) 4858 v.AddArg3(ptr, val, mem) 4859 return true 4860 } 4861 return false 4862 } 4863 func rewriteValueARM64_OpARM64FMOVDstoreidx(v *Value) bool { 4864 v_3 := v.Args[3] 4865 v_2 := v.Args[2] 4866 v_1 := v.Args[1] 4867 v_0 := v.Args[0] 4868 // match: (FMOVDstoreidx ptr (MOVDconst [c]) val mem) 4869 // cond: is32Bit(c) 4870 // result: (FMOVDstore [int32(c)] ptr val mem) 4871 for { 4872 ptr := v_0 4873 if v_1.Op != OpARM64MOVDconst { 4874 break 4875 } 4876 c := auxIntToInt64(v_1.AuxInt) 4877 val := v_2 4878 mem := v_3 4879 if !(is32Bit(c)) { 4880 break 4881 } 4882 v.reset(OpARM64FMOVDstore) 4883 v.AuxInt = int32ToAuxInt(int32(c)) 4884 v.AddArg3(ptr, val, mem) 4885 return true 4886 } 4887 // match: (FMOVDstoreidx (MOVDconst [c]) idx val mem) 4888 // cond: is32Bit(c) 4889 // result: (FMOVDstore [int32(c)] idx val mem) 4890 for { 4891 if v_0.Op != OpARM64MOVDconst { 4892 break 4893 } 4894 c := auxIntToInt64(v_0.AuxInt) 4895 idx := v_1 4896 val := v_2 4897 mem := v_3 4898 if !(is32Bit(c)) { 4899 break 4900 } 4901 v.reset(OpARM64FMOVDstore) 4902 v.AuxInt = int32ToAuxInt(int32(c)) 4903 v.AddArg3(idx, val, mem) 4904 return true 4905 } 4906 // match: (FMOVDstoreidx ptr (SLLconst [3] idx) val mem) 4907 // result: (FMOVDstoreidx8 ptr idx val mem) 4908 for { 4909 ptr := v_0 4910 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 { 4911 break 4912 } 4913 idx := v_1.Args[0] 4914 val := v_2 4915 mem := v_3 4916 v.reset(OpARM64FMOVDstoreidx8) 4917 v.AddArg4(ptr, idx, val, mem) 4918 return true 4919 } 4920 // match: (FMOVDstoreidx (SLLconst [3] idx) ptr val mem) 4921 // result: (FMOVDstoreidx8 ptr idx val mem) 4922 for { 4923 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 { 4924 break 4925 } 4926 idx := v_0.Args[0] 4927 ptr := v_1 4928 val := v_2 4929 mem := v_3 4930 v.reset(OpARM64FMOVDstoreidx8) 4931 v.AddArg4(ptr, idx, val, mem) 4932 return true 4933 } 4934 return false 4935 } 4936 func rewriteValueARM64_OpARM64FMOVDstoreidx8(v *Value) bool { 4937 v_3 := v.Args[3] 4938 v_2 := v.Args[2] 4939 v_1 := v.Args[1] 4940 v_0 := v.Args[0] 4941 // match: (FMOVDstoreidx8 ptr (MOVDconst [c]) val mem) 4942 // cond: is32Bit(c<<3) 4943 // result: (FMOVDstore [int32(c)<<3] ptr val mem) 4944 for { 4945 ptr := v_0 4946 if v_1.Op != OpARM64MOVDconst { 4947 break 4948 } 4949 c := auxIntToInt64(v_1.AuxInt) 4950 val := v_2 4951 mem := v_3 4952 if !(is32Bit(c << 3)) { 4953 break 4954 } 4955 v.reset(OpARM64FMOVDstore) 4956 v.AuxInt = int32ToAuxInt(int32(c) << 3) 4957 v.AddArg3(ptr, val, mem) 4958 return true 4959 } 4960 return false 4961 } 4962 func rewriteValueARM64_OpARM64FMOVSload(v *Value) bool { 4963 v_1 := v.Args[1] 4964 v_0 := v.Args[0] 4965 b := v.Block 4966 config := b.Func.Config 4967 // match: (FMOVSload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _)) 4968 // result: (FMOVSgpfp val) 4969 for { 4970 off := auxIntToInt32(v.AuxInt) 4971 sym := auxToSym(v.Aux) 4972 ptr := v_0 4973 if v_1.Op != OpARM64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym { 4974 break 4975 } 4976 val := v_1.Args[1] 4977 if ptr != v_1.Args[0] { 4978 break 4979 } 4980 v.reset(OpARM64FMOVSgpfp) 4981 v.AddArg(val) 4982 return true 4983 } 4984 // match: (FMOVSload [off1] {sym} (ADDconst [off2] ptr) mem) 4985 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 4986 // result: (FMOVSload [off1+int32(off2)] {sym} ptr mem) 4987 for { 4988 off1 := auxIntToInt32(v.AuxInt) 4989 sym := auxToSym(v.Aux) 4990 if v_0.Op != OpARM64ADDconst { 4991 break 4992 } 4993 off2 := auxIntToInt64(v_0.AuxInt) 4994 ptr := v_0.Args[0] 4995 mem := v_1 4996 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 4997 break 4998 } 4999 v.reset(OpARM64FMOVSload) 5000 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 5001 v.Aux = symToAux(sym) 5002 v.AddArg2(ptr, mem) 5003 return true 5004 } 5005 // match: (FMOVSload [off] {sym} (ADD ptr idx) mem) 5006 // cond: off == 0 && sym == nil 5007 // result: (FMOVSloadidx ptr idx mem) 5008 for { 5009 off := auxIntToInt32(v.AuxInt) 5010 sym := auxToSym(v.Aux) 5011 if v_0.Op != OpARM64ADD { 5012 break 5013 } 5014 idx := v_0.Args[1] 5015 ptr := v_0.Args[0] 5016 mem := v_1 5017 if !(off == 0 && sym == nil) { 5018 break 5019 } 5020 v.reset(OpARM64FMOVSloadidx) 5021 v.AddArg3(ptr, idx, mem) 5022 return true 5023 } 5024 // match: (FMOVSload [off] {sym} (ADDshiftLL [2] ptr idx) mem) 5025 // cond: off == 0 && sym == nil 5026 // result: (FMOVSloadidx4 ptr idx mem) 5027 for { 5028 off := auxIntToInt32(v.AuxInt) 5029 sym := auxToSym(v.Aux) 5030 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 { 5031 break 5032 } 5033 idx := v_0.Args[1] 5034 ptr := v_0.Args[0] 5035 mem := v_1 5036 if !(off == 0 && sym == nil) { 5037 break 5038 } 5039 v.reset(OpARM64FMOVSloadidx4) 5040 v.AddArg3(ptr, idx, mem) 5041 return true 5042 } 5043 // match: (FMOVSload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5044 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 5045 // result: (FMOVSload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5046 for { 5047 off1 := auxIntToInt32(v.AuxInt) 5048 sym1 := auxToSym(v.Aux) 5049 if v_0.Op != OpARM64MOVDaddr { 5050 break 5051 } 5052 off2 := auxIntToInt32(v_0.AuxInt) 5053 sym2 := auxToSym(v_0.Aux) 5054 ptr := v_0.Args[0] 5055 mem := v_1 5056 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 5057 break 5058 } 5059 v.reset(OpARM64FMOVSload) 5060 v.AuxInt = int32ToAuxInt(off1 + off2) 5061 v.Aux = symToAux(mergeSym(sym1, sym2)) 5062 v.AddArg2(ptr, mem) 5063 return true 5064 } 5065 return false 5066 } 5067 func rewriteValueARM64_OpARM64FMOVSloadidx(v *Value) bool { 5068 v_2 := v.Args[2] 5069 v_1 := v.Args[1] 5070 v_0 := v.Args[0] 5071 // match: (FMOVSloadidx ptr (MOVDconst [c]) mem) 5072 // cond: is32Bit(c) 5073 // result: (FMOVSload [int32(c)] ptr mem) 5074 for { 5075 ptr := v_0 5076 if v_1.Op != OpARM64MOVDconst { 5077 break 5078 } 5079 c := auxIntToInt64(v_1.AuxInt) 5080 mem := v_2 5081 if !(is32Bit(c)) { 5082 break 5083 } 5084 v.reset(OpARM64FMOVSload) 5085 v.AuxInt = int32ToAuxInt(int32(c)) 5086 v.AddArg2(ptr, mem) 5087 return true 5088 } 5089 // match: (FMOVSloadidx (MOVDconst [c]) ptr mem) 5090 // cond: is32Bit(c) 5091 // result: (FMOVSload [int32(c)] ptr mem) 5092 for { 5093 if v_0.Op != OpARM64MOVDconst { 5094 break 5095 } 5096 c := auxIntToInt64(v_0.AuxInt) 5097 ptr := v_1 5098 mem := v_2 5099 if !(is32Bit(c)) { 5100 break 5101 } 5102 v.reset(OpARM64FMOVSload) 5103 v.AuxInt = int32ToAuxInt(int32(c)) 5104 v.AddArg2(ptr, mem) 5105 return true 5106 } 5107 // match: (FMOVSloadidx ptr (SLLconst [2] idx) mem) 5108 // result: (FMOVSloadidx4 ptr idx mem) 5109 for { 5110 ptr := v_0 5111 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 { 5112 break 5113 } 5114 idx := v_1.Args[0] 5115 mem := v_2 5116 v.reset(OpARM64FMOVSloadidx4) 5117 v.AddArg3(ptr, idx, mem) 5118 return true 5119 } 5120 // match: (FMOVSloadidx (SLLconst [2] idx) ptr mem) 5121 // result: (FMOVSloadidx4 ptr idx mem) 5122 for { 5123 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 { 5124 break 5125 } 5126 idx := v_0.Args[0] 5127 ptr := v_1 5128 mem := v_2 5129 v.reset(OpARM64FMOVSloadidx4) 5130 v.AddArg3(ptr, idx, mem) 5131 return true 5132 } 5133 return false 5134 } 5135 func rewriteValueARM64_OpARM64FMOVSloadidx4(v *Value) bool { 5136 v_2 := v.Args[2] 5137 v_1 := v.Args[1] 5138 v_0 := v.Args[0] 5139 // match: (FMOVSloadidx4 ptr (MOVDconst [c]) mem) 5140 // cond: is32Bit(c<<2) 5141 // result: (FMOVSload ptr [int32(c)<<2] mem) 5142 for { 5143 ptr := v_0 5144 if v_1.Op != OpARM64MOVDconst { 5145 break 5146 } 5147 c := auxIntToInt64(v_1.AuxInt) 5148 mem := v_2 5149 if !(is32Bit(c << 2)) { 5150 break 5151 } 5152 v.reset(OpARM64FMOVSload) 5153 v.AuxInt = int32ToAuxInt(int32(c) << 2) 5154 v.AddArg2(ptr, mem) 5155 return true 5156 } 5157 return false 5158 } 5159 func rewriteValueARM64_OpARM64FMOVSstore(v *Value) bool { 5160 v_2 := v.Args[2] 5161 v_1 := v.Args[1] 5162 v_0 := v.Args[0] 5163 b := v.Block 5164 config := b.Func.Config 5165 // match: (FMOVSstore [off] {sym} ptr (FMOVSgpfp val) mem) 5166 // result: (MOVWstore [off] {sym} ptr val mem) 5167 for { 5168 off := auxIntToInt32(v.AuxInt) 5169 sym := auxToSym(v.Aux) 5170 ptr := v_0 5171 if v_1.Op != OpARM64FMOVSgpfp { 5172 break 5173 } 5174 val := v_1.Args[0] 5175 mem := v_2 5176 v.reset(OpARM64MOVWstore) 5177 v.AuxInt = int32ToAuxInt(off) 5178 v.Aux = symToAux(sym) 5179 v.AddArg3(ptr, val, mem) 5180 return true 5181 } 5182 // match: (FMOVSstore [off1] {sym} (ADDconst [off2] ptr) val mem) 5183 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 5184 // result: (FMOVSstore [off1+int32(off2)] {sym} ptr val mem) 5185 for { 5186 off1 := auxIntToInt32(v.AuxInt) 5187 sym := auxToSym(v.Aux) 5188 if v_0.Op != OpARM64ADDconst { 5189 break 5190 } 5191 off2 := auxIntToInt64(v_0.AuxInt) 5192 ptr := v_0.Args[0] 5193 val := v_1 5194 mem := v_2 5195 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 5196 break 5197 } 5198 v.reset(OpARM64FMOVSstore) 5199 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 5200 v.Aux = symToAux(sym) 5201 v.AddArg3(ptr, val, mem) 5202 return true 5203 } 5204 // match: (FMOVSstore [off] {sym} (ADD ptr idx) val mem) 5205 // cond: off == 0 && sym == nil 5206 // result: (FMOVSstoreidx ptr idx val mem) 5207 for { 5208 off := auxIntToInt32(v.AuxInt) 5209 sym := auxToSym(v.Aux) 5210 if v_0.Op != OpARM64ADD { 5211 break 5212 } 5213 idx := v_0.Args[1] 5214 ptr := v_0.Args[0] 5215 val := v_1 5216 mem := v_2 5217 if !(off == 0 && sym == nil) { 5218 break 5219 } 5220 v.reset(OpARM64FMOVSstoreidx) 5221 v.AddArg4(ptr, idx, val, mem) 5222 return true 5223 } 5224 // match: (FMOVSstore [off] {sym} (ADDshiftLL [2] ptr idx) val mem) 5225 // cond: off == 0 && sym == nil 5226 // result: (FMOVSstoreidx4 ptr idx val mem) 5227 for { 5228 off := auxIntToInt32(v.AuxInt) 5229 sym := auxToSym(v.Aux) 5230 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 { 5231 break 5232 } 5233 idx := v_0.Args[1] 5234 ptr := v_0.Args[0] 5235 val := v_1 5236 mem := v_2 5237 if !(off == 0 && sym == nil) { 5238 break 5239 } 5240 v.reset(OpARM64FMOVSstoreidx4) 5241 v.AddArg4(ptr, idx, val, mem) 5242 return true 5243 } 5244 // match: (FMOVSstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 5245 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 5246 // result: (FMOVSstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 5247 for { 5248 off1 := auxIntToInt32(v.AuxInt) 5249 sym1 := auxToSym(v.Aux) 5250 if v_0.Op != OpARM64MOVDaddr { 5251 break 5252 } 5253 off2 := auxIntToInt32(v_0.AuxInt) 5254 sym2 := auxToSym(v_0.Aux) 5255 ptr := v_0.Args[0] 5256 val := v_1 5257 mem := v_2 5258 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 5259 break 5260 } 5261 v.reset(OpARM64FMOVSstore) 5262 v.AuxInt = int32ToAuxInt(off1 + off2) 5263 v.Aux = symToAux(mergeSym(sym1, sym2)) 5264 v.AddArg3(ptr, val, mem) 5265 return true 5266 } 5267 return false 5268 } 5269 func rewriteValueARM64_OpARM64FMOVSstoreidx(v *Value) bool { 5270 v_3 := v.Args[3] 5271 v_2 := v.Args[2] 5272 v_1 := v.Args[1] 5273 v_0 := v.Args[0] 5274 // match: (FMOVSstoreidx ptr (MOVDconst [c]) val mem) 5275 // cond: is32Bit(c) 5276 // result: (FMOVSstore [int32(c)] ptr val mem) 5277 for { 5278 ptr := v_0 5279 if v_1.Op != OpARM64MOVDconst { 5280 break 5281 } 5282 c := auxIntToInt64(v_1.AuxInt) 5283 val := v_2 5284 mem := v_3 5285 if !(is32Bit(c)) { 5286 break 5287 } 5288 v.reset(OpARM64FMOVSstore) 5289 v.AuxInt = int32ToAuxInt(int32(c)) 5290 v.AddArg3(ptr, val, mem) 5291 return true 5292 } 5293 // match: (FMOVSstoreidx (MOVDconst [c]) idx val mem) 5294 // cond: is32Bit(c) 5295 // result: (FMOVSstore [int32(c)] idx val mem) 5296 for { 5297 if v_0.Op != OpARM64MOVDconst { 5298 break 5299 } 5300 c := auxIntToInt64(v_0.AuxInt) 5301 idx := v_1 5302 val := v_2 5303 mem := v_3 5304 if !(is32Bit(c)) { 5305 break 5306 } 5307 v.reset(OpARM64FMOVSstore) 5308 v.AuxInt = int32ToAuxInt(int32(c)) 5309 v.AddArg3(idx, val, mem) 5310 return true 5311 } 5312 // match: (FMOVSstoreidx ptr (SLLconst [2] idx) val mem) 5313 // result: (FMOVSstoreidx4 ptr idx val mem) 5314 for { 5315 ptr := v_0 5316 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 { 5317 break 5318 } 5319 idx := v_1.Args[0] 5320 val := v_2 5321 mem := v_3 5322 v.reset(OpARM64FMOVSstoreidx4) 5323 v.AddArg4(ptr, idx, val, mem) 5324 return true 5325 } 5326 // match: (FMOVSstoreidx (SLLconst [2] idx) ptr val mem) 5327 // result: (FMOVSstoreidx4 ptr idx val mem) 5328 for { 5329 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 { 5330 break 5331 } 5332 idx := v_0.Args[0] 5333 ptr := v_1 5334 val := v_2 5335 mem := v_3 5336 v.reset(OpARM64FMOVSstoreidx4) 5337 v.AddArg4(ptr, idx, val, mem) 5338 return true 5339 } 5340 return false 5341 } 5342 func rewriteValueARM64_OpARM64FMOVSstoreidx4(v *Value) bool { 5343 v_3 := v.Args[3] 5344 v_2 := v.Args[2] 5345 v_1 := v.Args[1] 5346 v_0 := v.Args[0] 5347 // match: (FMOVSstoreidx4 ptr (MOVDconst [c]) val mem) 5348 // cond: is32Bit(c<<2) 5349 // result: (FMOVSstore [int32(c)<<2] ptr val mem) 5350 for { 5351 ptr := v_0 5352 if v_1.Op != OpARM64MOVDconst { 5353 break 5354 } 5355 c := auxIntToInt64(v_1.AuxInt) 5356 val := v_2 5357 mem := v_3 5358 if !(is32Bit(c << 2)) { 5359 break 5360 } 5361 v.reset(OpARM64FMOVSstore) 5362 v.AuxInt = int32ToAuxInt(int32(c) << 2) 5363 v.AddArg3(ptr, val, mem) 5364 return true 5365 } 5366 return false 5367 } 5368 func rewriteValueARM64_OpARM64FMULD(v *Value) bool { 5369 v_1 := v.Args[1] 5370 v_0 := v.Args[0] 5371 // match: (FMULD (FNEGD x) y) 5372 // result: (FNMULD x y) 5373 for { 5374 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 5375 if v_0.Op != OpARM64FNEGD { 5376 continue 5377 } 5378 x := v_0.Args[0] 5379 y := v_1 5380 v.reset(OpARM64FNMULD) 5381 v.AddArg2(x, y) 5382 return true 5383 } 5384 break 5385 } 5386 return false 5387 } 5388 func rewriteValueARM64_OpARM64FMULS(v *Value) bool { 5389 v_1 := v.Args[1] 5390 v_0 := v.Args[0] 5391 // match: (FMULS (FNEGS x) y) 5392 // result: (FNMULS x y) 5393 for { 5394 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 5395 if v_0.Op != OpARM64FNEGS { 5396 continue 5397 } 5398 x := v_0.Args[0] 5399 y := v_1 5400 v.reset(OpARM64FNMULS) 5401 v.AddArg2(x, y) 5402 return true 5403 } 5404 break 5405 } 5406 return false 5407 } 5408 func rewriteValueARM64_OpARM64FNEGD(v *Value) bool { 5409 v_0 := v.Args[0] 5410 // match: (FNEGD (FMULD x y)) 5411 // result: (FNMULD x y) 5412 for { 5413 if v_0.Op != OpARM64FMULD { 5414 break 5415 } 5416 y := v_0.Args[1] 5417 x := v_0.Args[0] 5418 v.reset(OpARM64FNMULD) 5419 v.AddArg2(x, y) 5420 return true 5421 } 5422 // match: (FNEGD (FNMULD x y)) 5423 // result: (FMULD x y) 5424 for { 5425 if v_0.Op != OpARM64FNMULD { 5426 break 5427 } 5428 y := v_0.Args[1] 5429 x := v_0.Args[0] 5430 v.reset(OpARM64FMULD) 5431 v.AddArg2(x, y) 5432 return true 5433 } 5434 return false 5435 } 5436 func rewriteValueARM64_OpARM64FNEGS(v *Value) bool { 5437 v_0 := v.Args[0] 5438 // match: (FNEGS (FMULS x y)) 5439 // result: (FNMULS x y) 5440 for { 5441 if v_0.Op != OpARM64FMULS { 5442 break 5443 } 5444 y := v_0.Args[1] 5445 x := v_0.Args[0] 5446 v.reset(OpARM64FNMULS) 5447 v.AddArg2(x, y) 5448 return true 5449 } 5450 // match: (FNEGS (FNMULS x y)) 5451 // result: (FMULS x y) 5452 for { 5453 if v_0.Op != OpARM64FNMULS { 5454 break 5455 } 5456 y := v_0.Args[1] 5457 x := v_0.Args[0] 5458 v.reset(OpARM64FMULS) 5459 v.AddArg2(x, y) 5460 return true 5461 } 5462 return false 5463 } 5464 func rewriteValueARM64_OpARM64FNMULD(v *Value) bool { 5465 v_1 := v.Args[1] 5466 v_0 := v.Args[0] 5467 // match: (FNMULD (FNEGD x) y) 5468 // result: (FMULD x y) 5469 for { 5470 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 5471 if v_0.Op != OpARM64FNEGD { 5472 continue 5473 } 5474 x := v_0.Args[0] 5475 y := v_1 5476 v.reset(OpARM64FMULD) 5477 v.AddArg2(x, y) 5478 return true 5479 } 5480 break 5481 } 5482 return false 5483 } 5484 func rewriteValueARM64_OpARM64FNMULS(v *Value) bool { 5485 v_1 := v.Args[1] 5486 v_0 := v.Args[0] 5487 // match: (FNMULS (FNEGS x) y) 5488 // result: (FMULS x y) 5489 for { 5490 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 5491 if v_0.Op != OpARM64FNEGS { 5492 continue 5493 } 5494 x := v_0.Args[0] 5495 y := v_1 5496 v.reset(OpARM64FMULS) 5497 v.AddArg2(x, y) 5498 return true 5499 } 5500 break 5501 } 5502 return false 5503 } 5504 func rewriteValueARM64_OpARM64FSUBD(v *Value) bool { 5505 v_1 := v.Args[1] 5506 v_0 := v.Args[0] 5507 // match: (FSUBD a (FMULD x y)) 5508 // cond: a.Block.Func.useFMA(v) 5509 // result: (FMSUBD a x y) 5510 for { 5511 a := v_0 5512 if v_1.Op != OpARM64FMULD { 5513 break 5514 } 5515 y := v_1.Args[1] 5516 x := v_1.Args[0] 5517 if !(a.Block.Func.useFMA(v)) { 5518 break 5519 } 5520 v.reset(OpARM64FMSUBD) 5521 v.AddArg3(a, x, y) 5522 return true 5523 } 5524 // match: (FSUBD (FMULD x y) a) 5525 // cond: a.Block.Func.useFMA(v) 5526 // result: (FNMSUBD a x y) 5527 for { 5528 if v_0.Op != OpARM64FMULD { 5529 break 5530 } 5531 y := v_0.Args[1] 5532 x := v_0.Args[0] 5533 a := v_1 5534 if !(a.Block.Func.useFMA(v)) { 5535 break 5536 } 5537 v.reset(OpARM64FNMSUBD) 5538 v.AddArg3(a, x, y) 5539 return true 5540 } 5541 // match: (FSUBD a (FNMULD x y)) 5542 // cond: a.Block.Func.useFMA(v) 5543 // result: (FMADDD a x y) 5544 for { 5545 a := v_0 5546 if v_1.Op != OpARM64FNMULD { 5547 break 5548 } 5549 y := v_1.Args[1] 5550 x := v_1.Args[0] 5551 if !(a.Block.Func.useFMA(v)) { 5552 break 5553 } 5554 v.reset(OpARM64FMADDD) 5555 v.AddArg3(a, x, y) 5556 return true 5557 } 5558 // match: (FSUBD (FNMULD x y) a) 5559 // cond: a.Block.Func.useFMA(v) 5560 // result: (FNMADDD a x y) 5561 for { 5562 if v_0.Op != OpARM64FNMULD { 5563 break 5564 } 5565 y := v_0.Args[1] 5566 x := v_0.Args[0] 5567 a := v_1 5568 if !(a.Block.Func.useFMA(v)) { 5569 break 5570 } 5571 v.reset(OpARM64FNMADDD) 5572 v.AddArg3(a, x, y) 5573 return true 5574 } 5575 return false 5576 } 5577 func rewriteValueARM64_OpARM64FSUBS(v *Value) bool { 5578 v_1 := v.Args[1] 5579 v_0 := v.Args[0] 5580 // match: (FSUBS a (FMULS x y)) 5581 // cond: a.Block.Func.useFMA(v) 5582 // result: (FMSUBS a x y) 5583 for { 5584 a := v_0 5585 if v_1.Op != OpARM64FMULS { 5586 break 5587 } 5588 y := v_1.Args[1] 5589 x := v_1.Args[0] 5590 if !(a.Block.Func.useFMA(v)) { 5591 break 5592 } 5593 v.reset(OpARM64FMSUBS) 5594 v.AddArg3(a, x, y) 5595 return true 5596 } 5597 // match: (FSUBS (FMULS x y) a) 5598 // cond: a.Block.Func.useFMA(v) 5599 // result: (FNMSUBS a x y) 5600 for { 5601 if v_0.Op != OpARM64FMULS { 5602 break 5603 } 5604 y := v_0.Args[1] 5605 x := v_0.Args[0] 5606 a := v_1 5607 if !(a.Block.Func.useFMA(v)) { 5608 break 5609 } 5610 v.reset(OpARM64FNMSUBS) 5611 v.AddArg3(a, x, y) 5612 return true 5613 } 5614 // match: (FSUBS a (FNMULS x y)) 5615 // cond: a.Block.Func.useFMA(v) 5616 // result: (FMADDS a x y) 5617 for { 5618 a := v_0 5619 if v_1.Op != OpARM64FNMULS { 5620 break 5621 } 5622 y := v_1.Args[1] 5623 x := v_1.Args[0] 5624 if !(a.Block.Func.useFMA(v)) { 5625 break 5626 } 5627 v.reset(OpARM64FMADDS) 5628 v.AddArg3(a, x, y) 5629 return true 5630 } 5631 // match: (FSUBS (FNMULS x y) a) 5632 // cond: a.Block.Func.useFMA(v) 5633 // result: (FNMADDS a x y) 5634 for { 5635 if v_0.Op != OpARM64FNMULS { 5636 break 5637 } 5638 y := v_0.Args[1] 5639 x := v_0.Args[0] 5640 a := v_1 5641 if !(a.Block.Func.useFMA(v)) { 5642 break 5643 } 5644 v.reset(OpARM64FNMADDS) 5645 v.AddArg3(a, x, y) 5646 return true 5647 } 5648 return false 5649 } 5650 func rewriteValueARM64_OpARM64GreaterEqual(v *Value) bool { 5651 v_0 := v.Args[0] 5652 b := v.Block 5653 // match: (GreaterEqual (CMPconst [0] z:(AND x y))) 5654 // cond: z.Uses == 1 5655 // result: (GreaterEqual (TST x y)) 5656 for { 5657 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 5658 break 5659 } 5660 z := v_0.Args[0] 5661 if z.Op != OpARM64AND { 5662 break 5663 } 5664 y := z.Args[1] 5665 x := z.Args[0] 5666 if !(z.Uses == 1) { 5667 break 5668 } 5669 v.reset(OpARM64GreaterEqual) 5670 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 5671 v0.AddArg2(x, y) 5672 v.AddArg(v0) 5673 return true 5674 } 5675 // match: (GreaterEqual (CMPWconst [0] x:(ANDconst [c] y))) 5676 // cond: x.Uses == 1 5677 // result: (GreaterEqual (TSTWconst [int32(c)] y)) 5678 for { 5679 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 5680 break 5681 } 5682 x := v_0.Args[0] 5683 if x.Op != OpARM64ANDconst { 5684 break 5685 } 5686 c := auxIntToInt64(x.AuxInt) 5687 y := x.Args[0] 5688 if !(x.Uses == 1) { 5689 break 5690 } 5691 v.reset(OpARM64GreaterEqual) 5692 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 5693 v0.AuxInt = int32ToAuxInt(int32(c)) 5694 v0.AddArg(y) 5695 v.AddArg(v0) 5696 return true 5697 } 5698 // match: (GreaterEqual (CMPWconst [0] z:(AND x y))) 5699 // cond: z.Uses == 1 5700 // result: (GreaterEqual (TSTW x y)) 5701 for { 5702 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 5703 break 5704 } 5705 z := v_0.Args[0] 5706 if z.Op != OpARM64AND { 5707 break 5708 } 5709 y := z.Args[1] 5710 x := z.Args[0] 5711 if !(z.Uses == 1) { 5712 break 5713 } 5714 v.reset(OpARM64GreaterEqual) 5715 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 5716 v0.AddArg2(x, y) 5717 v.AddArg(v0) 5718 return true 5719 } 5720 // match: (GreaterEqual (CMPconst [0] x:(ANDconst [c] y))) 5721 // cond: x.Uses == 1 5722 // result: (GreaterEqual (TSTconst [c] y)) 5723 for { 5724 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 5725 break 5726 } 5727 x := v_0.Args[0] 5728 if x.Op != OpARM64ANDconst { 5729 break 5730 } 5731 c := auxIntToInt64(x.AuxInt) 5732 y := x.Args[0] 5733 if !(x.Uses == 1) { 5734 break 5735 } 5736 v.reset(OpARM64GreaterEqual) 5737 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 5738 v0.AuxInt = int64ToAuxInt(c) 5739 v0.AddArg(y) 5740 v.AddArg(v0) 5741 return true 5742 } 5743 // match: (GreaterEqual (CMPconst [0] x:(ADDconst [c] y))) 5744 // cond: x.Uses == 1 5745 // result: (GreaterEqualNoov (CMNconst [c] y)) 5746 for { 5747 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 5748 break 5749 } 5750 x := v_0.Args[0] 5751 if x.Op != OpARM64ADDconst { 5752 break 5753 } 5754 c := auxIntToInt64(x.AuxInt) 5755 y := x.Args[0] 5756 if !(x.Uses == 1) { 5757 break 5758 } 5759 v.reset(OpARM64GreaterEqualNoov) 5760 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags) 5761 v0.AuxInt = int64ToAuxInt(c) 5762 v0.AddArg(y) 5763 v.AddArg(v0) 5764 return true 5765 } 5766 // match: (GreaterEqual (CMPWconst [0] x:(ADDconst [c] y))) 5767 // cond: x.Uses == 1 5768 // result: (GreaterEqualNoov (CMNWconst [int32(c)] y)) 5769 for { 5770 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 5771 break 5772 } 5773 x := v_0.Args[0] 5774 if x.Op != OpARM64ADDconst { 5775 break 5776 } 5777 c := auxIntToInt64(x.AuxInt) 5778 y := x.Args[0] 5779 if !(x.Uses == 1) { 5780 break 5781 } 5782 v.reset(OpARM64GreaterEqualNoov) 5783 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags) 5784 v0.AuxInt = int32ToAuxInt(int32(c)) 5785 v0.AddArg(y) 5786 v.AddArg(v0) 5787 return true 5788 } 5789 // match: (GreaterEqual (CMPconst [0] z:(ADD x y))) 5790 // cond: z.Uses == 1 5791 // result: (GreaterEqualNoov (CMN x y)) 5792 for { 5793 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 5794 break 5795 } 5796 z := v_0.Args[0] 5797 if z.Op != OpARM64ADD { 5798 break 5799 } 5800 y := z.Args[1] 5801 x := z.Args[0] 5802 if !(z.Uses == 1) { 5803 break 5804 } 5805 v.reset(OpARM64GreaterEqualNoov) 5806 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 5807 v0.AddArg2(x, y) 5808 v.AddArg(v0) 5809 return true 5810 } 5811 // match: (GreaterEqual (CMPWconst [0] z:(ADD x y))) 5812 // cond: z.Uses == 1 5813 // result: (GreaterEqualNoov (CMNW x y)) 5814 for { 5815 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 5816 break 5817 } 5818 z := v_0.Args[0] 5819 if z.Op != OpARM64ADD { 5820 break 5821 } 5822 y := z.Args[1] 5823 x := z.Args[0] 5824 if !(z.Uses == 1) { 5825 break 5826 } 5827 v.reset(OpARM64GreaterEqualNoov) 5828 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 5829 v0.AddArg2(x, y) 5830 v.AddArg(v0) 5831 return true 5832 } 5833 // match: (GreaterEqual (CMPconst [0] z:(MADD a x y))) 5834 // cond: z.Uses == 1 5835 // result: (GreaterEqualNoov (CMN a (MUL <x.Type> x y))) 5836 for { 5837 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 5838 break 5839 } 5840 z := v_0.Args[0] 5841 if z.Op != OpARM64MADD { 5842 break 5843 } 5844 y := z.Args[2] 5845 a := z.Args[0] 5846 x := z.Args[1] 5847 if !(z.Uses == 1) { 5848 break 5849 } 5850 v.reset(OpARM64GreaterEqualNoov) 5851 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 5852 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 5853 v1.AddArg2(x, y) 5854 v0.AddArg2(a, v1) 5855 v.AddArg(v0) 5856 return true 5857 } 5858 // match: (GreaterEqual (CMPconst [0] z:(MSUB a x y))) 5859 // cond: z.Uses == 1 5860 // result: (GreaterEqualNoov (CMP a (MUL <x.Type> x y))) 5861 for { 5862 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 5863 break 5864 } 5865 z := v_0.Args[0] 5866 if z.Op != OpARM64MSUB { 5867 break 5868 } 5869 y := z.Args[2] 5870 a := z.Args[0] 5871 x := z.Args[1] 5872 if !(z.Uses == 1) { 5873 break 5874 } 5875 v.reset(OpARM64GreaterEqualNoov) 5876 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 5877 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 5878 v1.AddArg2(x, y) 5879 v0.AddArg2(a, v1) 5880 v.AddArg(v0) 5881 return true 5882 } 5883 // match: (GreaterEqual (CMPWconst [0] z:(MADDW a x y))) 5884 // cond: z.Uses == 1 5885 // result: (GreaterEqualNoov (CMNW a (MULW <x.Type> x y))) 5886 for { 5887 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 5888 break 5889 } 5890 z := v_0.Args[0] 5891 if z.Op != OpARM64MADDW { 5892 break 5893 } 5894 y := z.Args[2] 5895 a := z.Args[0] 5896 x := z.Args[1] 5897 if !(z.Uses == 1) { 5898 break 5899 } 5900 v.reset(OpARM64GreaterEqualNoov) 5901 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 5902 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 5903 v1.AddArg2(x, y) 5904 v0.AddArg2(a, v1) 5905 v.AddArg(v0) 5906 return true 5907 } 5908 // match: (GreaterEqual (CMPWconst [0] z:(MSUBW a x y))) 5909 // cond: z.Uses == 1 5910 // result: (GreaterEqualNoov (CMPW a (MULW <x.Type> x y))) 5911 for { 5912 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 5913 break 5914 } 5915 z := v_0.Args[0] 5916 if z.Op != OpARM64MSUBW { 5917 break 5918 } 5919 y := z.Args[2] 5920 a := z.Args[0] 5921 x := z.Args[1] 5922 if !(z.Uses == 1) { 5923 break 5924 } 5925 v.reset(OpARM64GreaterEqualNoov) 5926 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 5927 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 5928 v1.AddArg2(x, y) 5929 v0.AddArg2(a, v1) 5930 v.AddArg(v0) 5931 return true 5932 } 5933 // match: (GreaterEqual (FlagConstant [fc])) 5934 // result: (MOVDconst [b2i(fc.ge())]) 5935 for { 5936 if v_0.Op != OpARM64FlagConstant { 5937 break 5938 } 5939 fc := auxIntToFlagConstant(v_0.AuxInt) 5940 v.reset(OpARM64MOVDconst) 5941 v.AuxInt = int64ToAuxInt(b2i(fc.ge())) 5942 return true 5943 } 5944 // match: (GreaterEqual (InvertFlags x)) 5945 // result: (LessEqual x) 5946 for { 5947 if v_0.Op != OpARM64InvertFlags { 5948 break 5949 } 5950 x := v_0.Args[0] 5951 v.reset(OpARM64LessEqual) 5952 v.AddArg(x) 5953 return true 5954 } 5955 return false 5956 } 5957 func rewriteValueARM64_OpARM64GreaterEqualF(v *Value) bool { 5958 v_0 := v.Args[0] 5959 // match: (GreaterEqualF (InvertFlags x)) 5960 // result: (LessEqualF x) 5961 for { 5962 if v_0.Op != OpARM64InvertFlags { 5963 break 5964 } 5965 x := v_0.Args[0] 5966 v.reset(OpARM64LessEqualF) 5967 v.AddArg(x) 5968 return true 5969 } 5970 return false 5971 } 5972 func rewriteValueARM64_OpARM64GreaterEqualNoov(v *Value) bool { 5973 v_0 := v.Args[0] 5974 b := v.Block 5975 typ := &b.Func.Config.Types 5976 // match: (GreaterEqualNoov (InvertFlags x)) 5977 // result: (CSINC [OpARM64NotEqual] (LessThanNoov <typ.Bool> x) (MOVDconst [0]) x) 5978 for { 5979 if v_0.Op != OpARM64InvertFlags { 5980 break 5981 } 5982 x := v_0.Args[0] 5983 v.reset(OpARM64CSINC) 5984 v.AuxInt = opToAuxInt(OpARM64NotEqual) 5985 v0 := b.NewValue0(v.Pos, OpARM64LessThanNoov, typ.Bool) 5986 v0.AddArg(x) 5987 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 5988 v1.AuxInt = int64ToAuxInt(0) 5989 v.AddArg3(v0, v1, x) 5990 return true 5991 } 5992 return false 5993 } 5994 func rewriteValueARM64_OpARM64GreaterEqualU(v *Value) bool { 5995 v_0 := v.Args[0] 5996 // match: (GreaterEqualU (FlagConstant [fc])) 5997 // result: (MOVDconst [b2i(fc.uge())]) 5998 for { 5999 if v_0.Op != OpARM64FlagConstant { 6000 break 6001 } 6002 fc := auxIntToFlagConstant(v_0.AuxInt) 6003 v.reset(OpARM64MOVDconst) 6004 v.AuxInt = int64ToAuxInt(b2i(fc.uge())) 6005 return true 6006 } 6007 // match: (GreaterEqualU (InvertFlags x)) 6008 // result: (LessEqualU x) 6009 for { 6010 if v_0.Op != OpARM64InvertFlags { 6011 break 6012 } 6013 x := v_0.Args[0] 6014 v.reset(OpARM64LessEqualU) 6015 v.AddArg(x) 6016 return true 6017 } 6018 return false 6019 } 6020 func rewriteValueARM64_OpARM64GreaterThan(v *Value) bool { 6021 v_0 := v.Args[0] 6022 b := v.Block 6023 // match: (GreaterThan (CMPconst [0] z:(AND x y))) 6024 // cond: z.Uses == 1 6025 // result: (GreaterThan (TST x y)) 6026 for { 6027 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 6028 break 6029 } 6030 z := v_0.Args[0] 6031 if z.Op != OpARM64AND { 6032 break 6033 } 6034 y := z.Args[1] 6035 x := z.Args[0] 6036 if !(z.Uses == 1) { 6037 break 6038 } 6039 v.reset(OpARM64GreaterThan) 6040 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 6041 v0.AddArg2(x, y) 6042 v.AddArg(v0) 6043 return true 6044 } 6045 // match: (GreaterThan (CMPWconst [0] x:(ANDconst [c] y))) 6046 // cond: x.Uses == 1 6047 // result: (GreaterThan (TSTWconst [int32(c)] y)) 6048 for { 6049 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 6050 break 6051 } 6052 x := v_0.Args[0] 6053 if x.Op != OpARM64ANDconst { 6054 break 6055 } 6056 c := auxIntToInt64(x.AuxInt) 6057 y := x.Args[0] 6058 if !(x.Uses == 1) { 6059 break 6060 } 6061 v.reset(OpARM64GreaterThan) 6062 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 6063 v0.AuxInt = int32ToAuxInt(int32(c)) 6064 v0.AddArg(y) 6065 v.AddArg(v0) 6066 return true 6067 } 6068 // match: (GreaterThan (CMPWconst [0] z:(AND x y))) 6069 // cond: z.Uses == 1 6070 // result: (GreaterThan (TSTW x y)) 6071 for { 6072 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 6073 break 6074 } 6075 z := v_0.Args[0] 6076 if z.Op != OpARM64AND { 6077 break 6078 } 6079 y := z.Args[1] 6080 x := z.Args[0] 6081 if !(z.Uses == 1) { 6082 break 6083 } 6084 v.reset(OpARM64GreaterThan) 6085 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 6086 v0.AddArg2(x, y) 6087 v.AddArg(v0) 6088 return true 6089 } 6090 // match: (GreaterThan (CMPconst [0] x:(ANDconst [c] y))) 6091 // cond: x.Uses == 1 6092 // result: (GreaterThan (TSTconst [c] y)) 6093 for { 6094 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 6095 break 6096 } 6097 x := v_0.Args[0] 6098 if x.Op != OpARM64ANDconst { 6099 break 6100 } 6101 c := auxIntToInt64(x.AuxInt) 6102 y := x.Args[0] 6103 if !(x.Uses == 1) { 6104 break 6105 } 6106 v.reset(OpARM64GreaterThan) 6107 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 6108 v0.AuxInt = int64ToAuxInt(c) 6109 v0.AddArg(y) 6110 v.AddArg(v0) 6111 return true 6112 } 6113 // match: (GreaterThan (FlagConstant [fc])) 6114 // result: (MOVDconst [b2i(fc.gt())]) 6115 for { 6116 if v_0.Op != OpARM64FlagConstant { 6117 break 6118 } 6119 fc := auxIntToFlagConstant(v_0.AuxInt) 6120 v.reset(OpARM64MOVDconst) 6121 v.AuxInt = int64ToAuxInt(b2i(fc.gt())) 6122 return true 6123 } 6124 // match: (GreaterThan (InvertFlags x)) 6125 // result: (LessThan x) 6126 for { 6127 if v_0.Op != OpARM64InvertFlags { 6128 break 6129 } 6130 x := v_0.Args[0] 6131 v.reset(OpARM64LessThan) 6132 v.AddArg(x) 6133 return true 6134 } 6135 return false 6136 } 6137 func rewriteValueARM64_OpARM64GreaterThanF(v *Value) bool { 6138 v_0 := v.Args[0] 6139 // match: (GreaterThanF (InvertFlags x)) 6140 // result: (LessThanF x) 6141 for { 6142 if v_0.Op != OpARM64InvertFlags { 6143 break 6144 } 6145 x := v_0.Args[0] 6146 v.reset(OpARM64LessThanF) 6147 v.AddArg(x) 6148 return true 6149 } 6150 return false 6151 } 6152 func rewriteValueARM64_OpARM64GreaterThanU(v *Value) bool { 6153 v_0 := v.Args[0] 6154 // match: (GreaterThanU (FlagConstant [fc])) 6155 // result: (MOVDconst [b2i(fc.ugt())]) 6156 for { 6157 if v_0.Op != OpARM64FlagConstant { 6158 break 6159 } 6160 fc := auxIntToFlagConstant(v_0.AuxInt) 6161 v.reset(OpARM64MOVDconst) 6162 v.AuxInt = int64ToAuxInt(b2i(fc.ugt())) 6163 return true 6164 } 6165 // match: (GreaterThanU (InvertFlags x)) 6166 // result: (LessThanU x) 6167 for { 6168 if v_0.Op != OpARM64InvertFlags { 6169 break 6170 } 6171 x := v_0.Args[0] 6172 v.reset(OpARM64LessThanU) 6173 v.AddArg(x) 6174 return true 6175 } 6176 return false 6177 } 6178 func rewriteValueARM64_OpARM64LDP(v *Value) bool { 6179 v_1 := v.Args[1] 6180 v_0 := v.Args[0] 6181 b := v.Block 6182 config := b.Func.Config 6183 // match: (LDP [off1] {sym} (ADDconst [off2] ptr) mem) 6184 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 6185 // result: (LDP [off1+int32(off2)] {sym} ptr mem) 6186 for { 6187 off1 := auxIntToInt32(v.AuxInt) 6188 sym := auxToSym(v.Aux) 6189 if v_0.Op != OpARM64ADDconst { 6190 break 6191 } 6192 off2 := auxIntToInt64(v_0.AuxInt) 6193 ptr := v_0.Args[0] 6194 mem := v_1 6195 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 6196 break 6197 } 6198 v.reset(OpARM64LDP) 6199 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 6200 v.Aux = symToAux(sym) 6201 v.AddArg2(ptr, mem) 6202 return true 6203 } 6204 // match: (LDP [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 6205 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 6206 // result: (LDP [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 6207 for { 6208 off1 := auxIntToInt32(v.AuxInt) 6209 sym1 := auxToSym(v.Aux) 6210 if v_0.Op != OpARM64MOVDaddr { 6211 break 6212 } 6213 off2 := auxIntToInt32(v_0.AuxInt) 6214 sym2 := auxToSym(v_0.Aux) 6215 ptr := v_0.Args[0] 6216 mem := v_1 6217 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 6218 break 6219 } 6220 v.reset(OpARM64LDP) 6221 v.AuxInt = int32ToAuxInt(off1 + off2) 6222 v.Aux = symToAux(mergeSym(sym1, sym2)) 6223 v.AddArg2(ptr, mem) 6224 return true 6225 } 6226 return false 6227 } 6228 func rewriteValueARM64_OpARM64LessEqual(v *Value) bool { 6229 v_0 := v.Args[0] 6230 b := v.Block 6231 // match: (LessEqual (CMPconst [0] z:(AND x y))) 6232 // cond: z.Uses == 1 6233 // result: (LessEqual (TST x y)) 6234 for { 6235 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 6236 break 6237 } 6238 z := v_0.Args[0] 6239 if z.Op != OpARM64AND { 6240 break 6241 } 6242 y := z.Args[1] 6243 x := z.Args[0] 6244 if !(z.Uses == 1) { 6245 break 6246 } 6247 v.reset(OpARM64LessEqual) 6248 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 6249 v0.AddArg2(x, y) 6250 v.AddArg(v0) 6251 return true 6252 } 6253 // match: (LessEqual (CMPWconst [0] x:(ANDconst [c] y))) 6254 // cond: x.Uses == 1 6255 // result: (LessEqual (TSTWconst [int32(c)] y)) 6256 for { 6257 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 6258 break 6259 } 6260 x := v_0.Args[0] 6261 if x.Op != OpARM64ANDconst { 6262 break 6263 } 6264 c := auxIntToInt64(x.AuxInt) 6265 y := x.Args[0] 6266 if !(x.Uses == 1) { 6267 break 6268 } 6269 v.reset(OpARM64LessEqual) 6270 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 6271 v0.AuxInt = int32ToAuxInt(int32(c)) 6272 v0.AddArg(y) 6273 v.AddArg(v0) 6274 return true 6275 } 6276 // match: (LessEqual (CMPWconst [0] z:(AND x y))) 6277 // cond: z.Uses == 1 6278 // result: (LessEqual (TSTW x y)) 6279 for { 6280 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 6281 break 6282 } 6283 z := v_0.Args[0] 6284 if z.Op != OpARM64AND { 6285 break 6286 } 6287 y := z.Args[1] 6288 x := z.Args[0] 6289 if !(z.Uses == 1) { 6290 break 6291 } 6292 v.reset(OpARM64LessEqual) 6293 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 6294 v0.AddArg2(x, y) 6295 v.AddArg(v0) 6296 return true 6297 } 6298 // match: (LessEqual (CMPconst [0] x:(ANDconst [c] y))) 6299 // cond: x.Uses == 1 6300 // result: (LessEqual (TSTconst [c] y)) 6301 for { 6302 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 6303 break 6304 } 6305 x := v_0.Args[0] 6306 if x.Op != OpARM64ANDconst { 6307 break 6308 } 6309 c := auxIntToInt64(x.AuxInt) 6310 y := x.Args[0] 6311 if !(x.Uses == 1) { 6312 break 6313 } 6314 v.reset(OpARM64LessEqual) 6315 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 6316 v0.AuxInt = int64ToAuxInt(c) 6317 v0.AddArg(y) 6318 v.AddArg(v0) 6319 return true 6320 } 6321 // match: (LessEqual (FlagConstant [fc])) 6322 // result: (MOVDconst [b2i(fc.le())]) 6323 for { 6324 if v_0.Op != OpARM64FlagConstant { 6325 break 6326 } 6327 fc := auxIntToFlagConstant(v_0.AuxInt) 6328 v.reset(OpARM64MOVDconst) 6329 v.AuxInt = int64ToAuxInt(b2i(fc.le())) 6330 return true 6331 } 6332 // match: (LessEqual (InvertFlags x)) 6333 // result: (GreaterEqual x) 6334 for { 6335 if v_0.Op != OpARM64InvertFlags { 6336 break 6337 } 6338 x := v_0.Args[0] 6339 v.reset(OpARM64GreaterEqual) 6340 v.AddArg(x) 6341 return true 6342 } 6343 return false 6344 } 6345 func rewriteValueARM64_OpARM64LessEqualF(v *Value) bool { 6346 v_0 := v.Args[0] 6347 // match: (LessEqualF (InvertFlags x)) 6348 // result: (GreaterEqualF x) 6349 for { 6350 if v_0.Op != OpARM64InvertFlags { 6351 break 6352 } 6353 x := v_0.Args[0] 6354 v.reset(OpARM64GreaterEqualF) 6355 v.AddArg(x) 6356 return true 6357 } 6358 return false 6359 } 6360 func rewriteValueARM64_OpARM64LessEqualU(v *Value) bool { 6361 v_0 := v.Args[0] 6362 // match: (LessEqualU (FlagConstant [fc])) 6363 // result: (MOVDconst [b2i(fc.ule())]) 6364 for { 6365 if v_0.Op != OpARM64FlagConstant { 6366 break 6367 } 6368 fc := auxIntToFlagConstant(v_0.AuxInt) 6369 v.reset(OpARM64MOVDconst) 6370 v.AuxInt = int64ToAuxInt(b2i(fc.ule())) 6371 return true 6372 } 6373 // match: (LessEqualU (InvertFlags x)) 6374 // result: (GreaterEqualU x) 6375 for { 6376 if v_0.Op != OpARM64InvertFlags { 6377 break 6378 } 6379 x := v_0.Args[0] 6380 v.reset(OpARM64GreaterEqualU) 6381 v.AddArg(x) 6382 return true 6383 } 6384 return false 6385 } 6386 func rewriteValueARM64_OpARM64LessThan(v *Value) bool { 6387 v_0 := v.Args[0] 6388 b := v.Block 6389 // match: (LessThan (CMPconst [0] z:(AND x y))) 6390 // cond: z.Uses == 1 6391 // result: (LessThan (TST x y)) 6392 for { 6393 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 6394 break 6395 } 6396 z := v_0.Args[0] 6397 if z.Op != OpARM64AND { 6398 break 6399 } 6400 y := z.Args[1] 6401 x := z.Args[0] 6402 if !(z.Uses == 1) { 6403 break 6404 } 6405 v.reset(OpARM64LessThan) 6406 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 6407 v0.AddArg2(x, y) 6408 v.AddArg(v0) 6409 return true 6410 } 6411 // match: (LessThan (CMPWconst [0] x:(ANDconst [c] y))) 6412 // cond: x.Uses == 1 6413 // result: (LessThan (TSTWconst [int32(c)] y)) 6414 for { 6415 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 6416 break 6417 } 6418 x := v_0.Args[0] 6419 if x.Op != OpARM64ANDconst { 6420 break 6421 } 6422 c := auxIntToInt64(x.AuxInt) 6423 y := x.Args[0] 6424 if !(x.Uses == 1) { 6425 break 6426 } 6427 v.reset(OpARM64LessThan) 6428 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 6429 v0.AuxInt = int32ToAuxInt(int32(c)) 6430 v0.AddArg(y) 6431 v.AddArg(v0) 6432 return true 6433 } 6434 // match: (LessThan (CMPWconst [0] z:(AND x y))) 6435 // cond: z.Uses == 1 6436 // result: (LessThan (TSTW x y)) 6437 for { 6438 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 6439 break 6440 } 6441 z := v_0.Args[0] 6442 if z.Op != OpARM64AND { 6443 break 6444 } 6445 y := z.Args[1] 6446 x := z.Args[0] 6447 if !(z.Uses == 1) { 6448 break 6449 } 6450 v.reset(OpARM64LessThan) 6451 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 6452 v0.AddArg2(x, y) 6453 v.AddArg(v0) 6454 return true 6455 } 6456 // match: (LessThan (CMPconst [0] x:(ANDconst [c] y))) 6457 // cond: x.Uses == 1 6458 // result: (LessThan (TSTconst [c] y)) 6459 for { 6460 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 6461 break 6462 } 6463 x := v_0.Args[0] 6464 if x.Op != OpARM64ANDconst { 6465 break 6466 } 6467 c := auxIntToInt64(x.AuxInt) 6468 y := x.Args[0] 6469 if !(x.Uses == 1) { 6470 break 6471 } 6472 v.reset(OpARM64LessThan) 6473 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 6474 v0.AuxInt = int64ToAuxInt(c) 6475 v0.AddArg(y) 6476 v.AddArg(v0) 6477 return true 6478 } 6479 // match: (LessThan (CMPconst [0] x:(ADDconst [c] y))) 6480 // cond: x.Uses == 1 6481 // result: (LessThanNoov (CMNconst [c] y)) 6482 for { 6483 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 6484 break 6485 } 6486 x := v_0.Args[0] 6487 if x.Op != OpARM64ADDconst { 6488 break 6489 } 6490 c := auxIntToInt64(x.AuxInt) 6491 y := x.Args[0] 6492 if !(x.Uses == 1) { 6493 break 6494 } 6495 v.reset(OpARM64LessThanNoov) 6496 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags) 6497 v0.AuxInt = int64ToAuxInt(c) 6498 v0.AddArg(y) 6499 v.AddArg(v0) 6500 return true 6501 } 6502 // match: (LessThan (CMPWconst [0] x:(ADDconst [c] y))) 6503 // cond: x.Uses == 1 6504 // result: (LessThanNoov (CMNWconst [int32(c)] y)) 6505 for { 6506 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 6507 break 6508 } 6509 x := v_0.Args[0] 6510 if x.Op != OpARM64ADDconst { 6511 break 6512 } 6513 c := auxIntToInt64(x.AuxInt) 6514 y := x.Args[0] 6515 if !(x.Uses == 1) { 6516 break 6517 } 6518 v.reset(OpARM64LessThanNoov) 6519 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags) 6520 v0.AuxInt = int32ToAuxInt(int32(c)) 6521 v0.AddArg(y) 6522 v.AddArg(v0) 6523 return true 6524 } 6525 // match: (LessThan (CMPconst [0] z:(ADD x y))) 6526 // cond: z.Uses == 1 6527 // result: (LessThanNoov (CMN x y)) 6528 for { 6529 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 6530 break 6531 } 6532 z := v_0.Args[0] 6533 if z.Op != OpARM64ADD { 6534 break 6535 } 6536 y := z.Args[1] 6537 x := z.Args[0] 6538 if !(z.Uses == 1) { 6539 break 6540 } 6541 v.reset(OpARM64LessThanNoov) 6542 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 6543 v0.AddArg2(x, y) 6544 v.AddArg(v0) 6545 return true 6546 } 6547 // match: (LessThan (CMPWconst [0] z:(ADD x y))) 6548 // cond: z.Uses == 1 6549 // result: (LessThanNoov (CMNW x y)) 6550 for { 6551 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 6552 break 6553 } 6554 z := v_0.Args[0] 6555 if z.Op != OpARM64ADD { 6556 break 6557 } 6558 y := z.Args[1] 6559 x := z.Args[0] 6560 if !(z.Uses == 1) { 6561 break 6562 } 6563 v.reset(OpARM64LessThanNoov) 6564 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 6565 v0.AddArg2(x, y) 6566 v.AddArg(v0) 6567 return true 6568 } 6569 // match: (LessThan (CMPconst [0] z:(MADD a x y))) 6570 // cond: z.Uses == 1 6571 // result: (LessThanNoov (CMN a (MUL <x.Type> x y))) 6572 for { 6573 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 6574 break 6575 } 6576 z := v_0.Args[0] 6577 if z.Op != OpARM64MADD { 6578 break 6579 } 6580 y := z.Args[2] 6581 a := z.Args[0] 6582 x := z.Args[1] 6583 if !(z.Uses == 1) { 6584 break 6585 } 6586 v.reset(OpARM64LessThanNoov) 6587 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 6588 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 6589 v1.AddArg2(x, y) 6590 v0.AddArg2(a, v1) 6591 v.AddArg(v0) 6592 return true 6593 } 6594 // match: (LessThan (CMPconst [0] z:(MSUB a x y))) 6595 // cond: z.Uses == 1 6596 // result: (LessThanNoov (CMP a (MUL <x.Type> x y))) 6597 for { 6598 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 6599 break 6600 } 6601 z := v_0.Args[0] 6602 if z.Op != OpARM64MSUB { 6603 break 6604 } 6605 y := z.Args[2] 6606 a := z.Args[0] 6607 x := z.Args[1] 6608 if !(z.Uses == 1) { 6609 break 6610 } 6611 v.reset(OpARM64LessThanNoov) 6612 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 6613 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 6614 v1.AddArg2(x, y) 6615 v0.AddArg2(a, v1) 6616 v.AddArg(v0) 6617 return true 6618 } 6619 // match: (LessThan (CMPWconst [0] z:(MADDW a x y))) 6620 // cond: z.Uses == 1 6621 // result: (LessThanNoov (CMNW a (MULW <x.Type> x y))) 6622 for { 6623 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 6624 break 6625 } 6626 z := v_0.Args[0] 6627 if z.Op != OpARM64MADDW { 6628 break 6629 } 6630 y := z.Args[2] 6631 a := z.Args[0] 6632 x := z.Args[1] 6633 if !(z.Uses == 1) { 6634 break 6635 } 6636 v.reset(OpARM64LessThanNoov) 6637 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 6638 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 6639 v1.AddArg2(x, y) 6640 v0.AddArg2(a, v1) 6641 v.AddArg(v0) 6642 return true 6643 } 6644 // match: (LessThan (CMPWconst [0] z:(MSUBW a x y))) 6645 // cond: z.Uses == 1 6646 // result: (LessThanNoov (CMPW a (MULW <x.Type> x y))) 6647 for { 6648 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 6649 break 6650 } 6651 z := v_0.Args[0] 6652 if z.Op != OpARM64MSUBW { 6653 break 6654 } 6655 y := z.Args[2] 6656 a := z.Args[0] 6657 x := z.Args[1] 6658 if !(z.Uses == 1) { 6659 break 6660 } 6661 v.reset(OpARM64LessThanNoov) 6662 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 6663 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 6664 v1.AddArg2(x, y) 6665 v0.AddArg2(a, v1) 6666 v.AddArg(v0) 6667 return true 6668 } 6669 // match: (LessThan (FlagConstant [fc])) 6670 // result: (MOVDconst [b2i(fc.lt())]) 6671 for { 6672 if v_0.Op != OpARM64FlagConstant { 6673 break 6674 } 6675 fc := auxIntToFlagConstant(v_0.AuxInt) 6676 v.reset(OpARM64MOVDconst) 6677 v.AuxInt = int64ToAuxInt(b2i(fc.lt())) 6678 return true 6679 } 6680 // match: (LessThan (InvertFlags x)) 6681 // result: (GreaterThan x) 6682 for { 6683 if v_0.Op != OpARM64InvertFlags { 6684 break 6685 } 6686 x := v_0.Args[0] 6687 v.reset(OpARM64GreaterThan) 6688 v.AddArg(x) 6689 return true 6690 } 6691 return false 6692 } 6693 func rewriteValueARM64_OpARM64LessThanF(v *Value) bool { 6694 v_0 := v.Args[0] 6695 // match: (LessThanF (InvertFlags x)) 6696 // result: (GreaterThanF x) 6697 for { 6698 if v_0.Op != OpARM64InvertFlags { 6699 break 6700 } 6701 x := v_0.Args[0] 6702 v.reset(OpARM64GreaterThanF) 6703 v.AddArg(x) 6704 return true 6705 } 6706 return false 6707 } 6708 func rewriteValueARM64_OpARM64LessThanNoov(v *Value) bool { 6709 v_0 := v.Args[0] 6710 b := v.Block 6711 typ := &b.Func.Config.Types 6712 // match: (LessThanNoov (InvertFlags x)) 6713 // result: (CSEL0 [OpARM64NotEqual] (GreaterEqualNoov <typ.Bool> x) x) 6714 for { 6715 if v_0.Op != OpARM64InvertFlags { 6716 break 6717 } 6718 x := v_0.Args[0] 6719 v.reset(OpARM64CSEL0) 6720 v.AuxInt = opToAuxInt(OpARM64NotEqual) 6721 v0 := b.NewValue0(v.Pos, OpARM64GreaterEqualNoov, typ.Bool) 6722 v0.AddArg(x) 6723 v.AddArg2(v0, x) 6724 return true 6725 } 6726 return false 6727 } 6728 func rewriteValueARM64_OpARM64LessThanU(v *Value) bool { 6729 v_0 := v.Args[0] 6730 // match: (LessThanU (FlagConstant [fc])) 6731 // result: (MOVDconst [b2i(fc.ult())]) 6732 for { 6733 if v_0.Op != OpARM64FlagConstant { 6734 break 6735 } 6736 fc := auxIntToFlagConstant(v_0.AuxInt) 6737 v.reset(OpARM64MOVDconst) 6738 v.AuxInt = int64ToAuxInt(b2i(fc.ult())) 6739 return true 6740 } 6741 // match: (LessThanU (InvertFlags x)) 6742 // result: (GreaterThanU x) 6743 for { 6744 if v_0.Op != OpARM64InvertFlags { 6745 break 6746 } 6747 x := v_0.Args[0] 6748 v.reset(OpARM64GreaterThanU) 6749 v.AddArg(x) 6750 return true 6751 } 6752 return false 6753 } 6754 func rewriteValueARM64_OpARM64MADD(v *Value) bool { 6755 v_2 := v.Args[2] 6756 v_1 := v.Args[1] 6757 v_0 := v.Args[0] 6758 b := v.Block 6759 // match: (MADD a x (MOVDconst [-1])) 6760 // result: (SUB a x) 6761 for { 6762 a := v_0 6763 x := v_1 6764 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != -1 { 6765 break 6766 } 6767 v.reset(OpARM64SUB) 6768 v.AddArg2(a, x) 6769 return true 6770 } 6771 // match: (MADD a _ (MOVDconst [0])) 6772 // result: a 6773 for { 6774 a := v_0 6775 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 { 6776 break 6777 } 6778 v.copyOf(a) 6779 return true 6780 } 6781 // match: (MADD a x (MOVDconst [1])) 6782 // result: (ADD a x) 6783 for { 6784 a := v_0 6785 x := v_1 6786 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 1 { 6787 break 6788 } 6789 v.reset(OpARM64ADD) 6790 v.AddArg2(a, x) 6791 return true 6792 } 6793 // match: (MADD a x (MOVDconst [c])) 6794 // cond: isPowerOfTwo64(c) 6795 // result: (ADDshiftLL a x [log64(c)]) 6796 for { 6797 a := v_0 6798 x := v_1 6799 if v_2.Op != OpARM64MOVDconst { 6800 break 6801 } 6802 c := auxIntToInt64(v_2.AuxInt) 6803 if !(isPowerOfTwo64(c)) { 6804 break 6805 } 6806 v.reset(OpARM64ADDshiftLL) 6807 v.AuxInt = int64ToAuxInt(log64(c)) 6808 v.AddArg2(a, x) 6809 return true 6810 } 6811 // match: (MADD a x (MOVDconst [c])) 6812 // cond: isPowerOfTwo64(c-1) && c>=3 6813 // result: (ADD a (ADDshiftLL <x.Type> x x [log64(c-1)])) 6814 for { 6815 a := v_0 6816 x := v_1 6817 if v_2.Op != OpARM64MOVDconst { 6818 break 6819 } 6820 c := auxIntToInt64(v_2.AuxInt) 6821 if !(isPowerOfTwo64(c-1) && c >= 3) { 6822 break 6823 } 6824 v.reset(OpARM64ADD) 6825 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6826 v0.AuxInt = int64ToAuxInt(log64(c - 1)) 6827 v0.AddArg2(x, x) 6828 v.AddArg2(a, v0) 6829 return true 6830 } 6831 // match: (MADD a x (MOVDconst [c])) 6832 // cond: isPowerOfTwo64(c+1) && c>=7 6833 // result: (SUB a (SUBshiftLL <x.Type> x x [log64(c+1)])) 6834 for { 6835 a := v_0 6836 x := v_1 6837 if v_2.Op != OpARM64MOVDconst { 6838 break 6839 } 6840 c := auxIntToInt64(v_2.AuxInt) 6841 if !(isPowerOfTwo64(c+1) && c >= 7) { 6842 break 6843 } 6844 v.reset(OpARM64SUB) 6845 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 6846 v0.AuxInt = int64ToAuxInt(log64(c + 1)) 6847 v0.AddArg2(x, x) 6848 v.AddArg2(a, v0) 6849 return true 6850 } 6851 // match: (MADD a x (MOVDconst [c])) 6852 // cond: c%3 == 0 && isPowerOfTwo64(c/3) 6853 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)]) 6854 for { 6855 a := v_0 6856 x := v_1 6857 if v_2.Op != OpARM64MOVDconst { 6858 break 6859 } 6860 c := auxIntToInt64(v_2.AuxInt) 6861 if !(c%3 == 0 && isPowerOfTwo64(c/3)) { 6862 break 6863 } 6864 v.reset(OpARM64SUBshiftLL) 6865 v.AuxInt = int64ToAuxInt(log64(c / 3)) 6866 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 6867 v0.AuxInt = int64ToAuxInt(2) 6868 v0.AddArg2(x, x) 6869 v.AddArg2(a, v0) 6870 return true 6871 } 6872 // match: (MADD a x (MOVDconst [c])) 6873 // cond: c%5 == 0 && isPowerOfTwo64(c/5) 6874 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)]) 6875 for { 6876 a := v_0 6877 x := v_1 6878 if v_2.Op != OpARM64MOVDconst { 6879 break 6880 } 6881 c := auxIntToInt64(v_2.AuxInt) 6882 if !(c%5 == 0 && isPowerOfTwo64(c/5)) { 6883 break 6884 } 6885 v.reset(OpARM64ADDshiftLL) 6886 v.AuxInt = int64ToAuxInt(log64(c / 5)) 6887 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6888 v0.AuxInt = int64ToAuxInt(2) 6889 v0.AddArg2(x, x) 6890 v.AddArg2(a, v0) 6891 return true 6892 } 6893 // match: (MADD a x (MOVDconst [c])) 6894 // cond: c%7 == 0 && isPowerOfTwo64(c/7) 6895 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)]) 6896 for { 6897 a := v_0 6898 x := v_1 6899 if v_2.Op != OpARM64MOVDconst { 6900 break 6901 } 6902 c := auxIntToInt64(v_2.AuxInt) 6903 if !(c%7 == 0 && isPowerOfTwo64(c/7)) { 6904 break 6905 } 6906 v.reset(OpARM64SUBshiftLL) 6907 v.AuxInt = int64ToAuxInt(log64(c / 7)) 6908 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 6909 v0.AuxInt = int64ToAuxInt(3) 6910 v0.AddArg2(x, x) 6911 v.AddArg2(a, v0) 6912 return true 6913 } 6914 // match: (MADD a x (MOVDconst [c])) 6915 // cond: c%9 == 0 && isPowerOfTwo64(c/9) 6916 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)]) 6917 for { 6918 a := v_0 6919 x := v_1 6920 if v_2.Op != OpARM64MOVDconst { 6921 break 6922 } 6923 c := auxIntToInt64(v_2.AuxInt) 6924 if !(c%9 == 0 && isPowerOfTwo64(c/9)) { 6925 break 6926 } 6927 v.reset(OpARM64ADDshiftLL) 6928 v.AuxInt = int64ToAuxInt(log64(c / 9)) 6929 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6930 v0.AuxInt = int64ToAuxInt(3) 6931 v0.AddArg2(x, x) 6932 v.AddArg2(a, v0) 6933 return true 6934 } 6935 // match: (MADD a (MOVDconst [-1]) x) 6936 // result: (SUB a x) 6937 for { 6938 a := v_0 6939 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 { 6940 break 6941 } 6942 x := v_2 6943 v.reset(OpARM64SUB) 6944 v.AddArg2(a, x) 6945 return true 6946 } 6947 // match: (MADD a (MOVDconst [0]) _) 6948 // result: a 6949 for { 6950 a := v_0 6951 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 { 6952 break 6953 } 6954 v.copyOf(a) 6955 return true 6956 } 6957 // match: (MADD a (MOVDconst [1]) x) 6958 // result: (ADD a x) 6959 for { 6960 a := v_0 6961 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 { 6962 break 6963 } 6964 x := v_2 6965 v.reset(OpARM64ADD) 6966 v.AddArg2(a, x) 6967 return true 6968 } 6969 // match: (MADD a (MOVDconst [c]) x) 6970 // cond: isPowerOfTwo64(c) 6971 // result: (ADDshiftLL a x [log64(c)]) 6972 for { 6973 a := v_0 6974 if v_1.Op != OpARM64MOVDconst { 6975 break 6976 } 6977 c := auxIntToInt64(v_1.AuxInt) 6978 x := v_2 6979 if !(isPowerOfTwo64(c)) { 6980 break 6981 } 6982 v.reset(OpARM64ADDshiftLL) 6983 v.AuxInt = int64ToAuxInt(log64(c)) 6984 v.AddArg2(a, x) 6985 return true 6986 } 6987 // match: (MADD a (MOVDconst [c]) x) 6988 // cond: isPowerOfTwo64(c-1) && c>=3 6989 // result: (ADD a (ADDshiftLL <x.Type> x x [log64(c-1)])) 6990 for { 6991 a := v_0 6992 if v_1.Op != OpARM64MOVDconst { 6993 break 6994 } 6995 c := auxIntToInt64(v_1.AuxInt) 6996 x := v_2 6997 if !(isPowerOfTwo64(c-1) && c >= 3) { 6998 break 6999 } 7000 v.reset(OpARM64ADD) 7001 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7002 v0.AuxInt = int64ToAuxInt(log64(c - 1)) 7003 v0.AddArg2(x, x) 7004 v.AddArg2(a, v0) 7005 return true 7006 } 7007 // match: (MADD a (MOVDconst [c]) x) 7008 // cond: isPowerOfTwo64(c+1) && c>=7 7009 // result: (SUB a (SUBshiftLL <x.Type> x x [log64(c+1)])) 7010 for { 7011 a := v_0 7012 if v_1.Op != OpARM64MOVDconst { 7013 break 7014 } 7015 c := auxIntToInt64(v_1.AuxInt) 7016 x := v_2 7017 if !(isPowerOfTwo64(c+1) && c >= 7) { 7018 break 7019 } 7020 v.reset(OpARM64SUB) 7021 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7022 v0.AuxInt = int64ToAuxInt(log64(c + 1)) 7023 v0.AddArg2(x, x) 7024 v.AddArg2(a, v0) 7025 return true 7026 } 7027 // match: (MADD a (MOVDconst [c]) x) 7028 // cond: c%3 == 0 && isPowerOfTwo64(c/3) 7029 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)]) 7030 for { 7031 a := v_0 7032 if v_1.Op != OpARM64MOVDconst { 7033 break 7034 } 7035 c := auxIntToInt64(v_1.AuxInt) 7036 x := v_2 7037 if !(c%3 == 0 && isPowerOfTwo64(c/3)) { 7038 break 7039 } 7040 v.reset(OpARM64SUBshiftLL) 7041 v.AuxInt = int64ToAuxInt(log64(c / 3)) 7042 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7043 v0.AuxInt = int64ToAuxInt(2) 7044 v0.AddArg2(x, x) 7045 v.AddArg2(a, v0) 7046 return true 7047 } 7048 // match: (MADD a (MOVDconst [c]) x) 7049 // cond: c%5 == 0 && isPowerOfTwo64(c/5) 7050 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)]) 7051 for { 7052 a := v_0 7053 if v_1.Op != OpARM64MOVDconst { 7054 break 7055 } 7056 c := auxIntToInt64(v_1.AuxInt) 7057 x := v_2 7058 if !(c%5 == 0 && isPowerOfTwo64(c/5)) { 7059 break 7060 } 7061 v.reset(OpARM64ADDshiftLL) 7062 v.AuxInt = int64ToAuxInt(log64(c / 5)) 7063 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7064 v0.AuxInt = int64ToAuxInt(2) 7065 v0.AddArg2(x, x) 7066 v.AddArg2(a, v0) 7067 return true 7068 } 7069 // match: (MADD a (MOVDconst [c]) x) 7070 // cond: c%7 == 0 && isPowerOfTwo64(c/7) 7071 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)]) 7072 for { 7073 a := v_0 7074 if v_1.Op != OpARM64MOVDconst { 7075 break 7076 } 7077 c := auxIntToInt64(v_1.AuxInt) 7078 x := v_2 7079 if !(c%7 == 0 && isPowerOfTwo64(c/7)) { 7080 break 7081 } 7082 v.reset(OpARM64SUBshiftLL) 7083 v.AuxInt = int64ToAuxInt(log64(c / 7)) 7084 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7085 v0.AuxInt = int64ToAuxInt(3) 7086 v0.AddArg2(x, x) 7087 v.AddArg2(a, v0) 7088 return true 7089 } 7090 // match: (MADD a (MOVDconst [c]) x) 7091 // cond: c%9 == 0 && isPowerOfTwo64(c/9) 7092 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)]) 7093 for { 7094 a := v_0 7095 if v_1.Op != OpARM64MOVDconst { 7096 break 7097 } 7098 c := auxIntToInt64(v_1.AuxInt) 7099 x := v_2 7100 if !(c%9 == 0 && isPowerOfTwo64(c/9)) { 7101 break 7102 } 7103 v.reset(OpARM64ADDshiftLL) 7104 v.AuxInt = int64ToAuxInt(log64(c / 9)) 7105 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7106 v0.AuxInt = int64ToAuxInt(3) 7107 v0.AddArg2(x, x) 7108 v.AddArg2(a, v0) 7109 return true 7110 } 7111 // match: (MADD (MOVDconst [c]) x y) 7112 // result: (ADDconst [c] (MUL <x.Type> x y)) 7113 for { 7114 if v_0.Op != OpARM64MOVDconst { 7115 break 7116 } 7117 c := auxIntToInt64(v_0.AuxInt) 7118 x := v_1 7119 y := v_2 7120 v.reset(OpARM64ADDconst) 7121 v.AuxInt = int64ToAuxInt(c) 7122 v0 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 7123 v0.AddArg2(x, y) 7124 v.AddArg(v0) 7125 return true 7126 } 7127 // match: (MADD a (MOVDconst [c]) (MOVDconst [d])) 7128 // result: (ADDconst [c*d] a) 7129 for { 7130 a := v_0 7131 if v_1.Op != OpARM64MOVDconst { 7132 break 7133 } 7134 c := auxIntToInt64(v_1.AuxInt) 7135 if v_2.Op != OpARM64MOVDconst { 7136 break 7137 } 7138 d := auxIntToInt64(v_2.AuxInt) 7139 v.reset(OpARM64ADDconst) 7140 v.AuxInt = int64ToAuxInt(c * d) 7141 v.AddArg(a) 7142 return true 7143 } 7144 return false 7145 } 7146 func rewriteValueARM64_OpARM64MADDW(v *Value) bool { 7147 v_2 := v.Args[2] 7148 v_1 := v.Args[1] 7149 v_0 := v.Args[0] 7150 b := v.Block 7151 // match: (MADDW a x (MOVDconst [c])) 7152 // cond: int32(c)==-1 7153 // result: (MOVWUreg (SUB <a.Type> a x)) 7154 for { 7155 a := v_0 7156 x := v_1 7157 if v_2.Op != OpARM64MOVDconst { 7158 break 7159 } 7160 c := auxIntToInt64(v_2.AuxInt) 7161 if !(int32(c) == -1) { 7162 break 7163 } 7164 v.reset(OpARM64MOVWUreg) 7165 v0 := b.NewValue0(v.Pos, OpARM64SUB, a.Type) 7166 v0.AddArg2(a, x) 7167 v.AddArg(v0) 7168 return true 7169 } 7170 // match: (MADDW a _ (MOVDconst [c])) 7171 // cond: int32(c)==0 7172 // result: (MOVWUreg a) 7173 for { 7174 a := v_0 7175 if v_2.Op != OpARM64MOVDconst { 7176 break 7177 } 7178 c := auxIntToInt64(v_2.AuxInt) 7179 if !(int32(c) == 0) { 7180 break 7181 } 7182 v.reset(OpARM64MOVWUreg) 7183 v.AddArg(a) 7184 return true 7185 } 7186 // match: (MADDW a x (MOVDconst [c])) 7187 // cond: int32(c)==1 7188 // result: (MOVWUreg (ADD <a.Type> a x)) 7189 for { 7190 a := v_0 7191 x := v_1 7192 if v_2.Op != OpARM64MOVDconst { 7193 break 7194 } 7195 c := auxIntToInt64(v_2.AuxInt) 7196 if !(int32(c) == 1) { 7197 break 7198 } 7199 v.reset(OpARM64MOVWUreg) 7200 v0 := b.NewValue0(v.Pos, OpARM64ADD, a.Type) 7201 v0.AddArg2(a, x) 7202 v.AddArg(v0) 7203 return true 7204 } 7205 // match: (MADDW a x (MOVDconst [c])) 7206 // cond: isPowerOfTwo64(c) 7207 // result: (MOVWUreg (ADDshiftLL <a.Type> a x [log64(c)])) 7208 for { 7209 a := v_0 7210 x := v_1 7211 if v_2.Op != OpARM64MOVDconst { 7212 break 7213 } 7214 c := auxIntToInt64(v_2.AuxInt) 7215 if !(isPowerOfTwo64(c)) { 7216 break 7217 } 7218 v.reset(OpARM64MOVWUreg) 7219 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type) 7220 v0.AuxInt = int64ToAuxInt(log64(c)) 7221 v0.AddArg2(a, x) 7222 v.AddArg(v0) 7223 return true 7224 } 7225 // match: (MADDW a x (MOVDconst [c])) 7226 // cond: isPowerOfTwo64(c-1) && int32(c)>=3 7227 // result: (MOVWUreg (ADD <a.Type> a (ADDshiftLL <x.Type> x x [log64(c-1)]))) 7228 for { 7229 a := v_0 7230 x := v_1 7231 if v_2.Op != OpARM64MOVDconst { 7232 break 7233 } 7234 c := auxIntToInt64(v_2.AuxInt) 7235 if !(isPowerOfTwo64(c-1) && int32(c) >= 3) { 7236 break 7237 } 7238 v.reset(OpARM64MOVWUreg) 7239 v0 := b.NewValue0(v.Pos, OpARM64ADD, a.Type) 7240 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7241 v1.AuxInt = int64ToAuxInt(log64(c - 1)) 7242 v1.AddArg2(x, x) 7243 v0.AddArg2(a, v1) 7244 v.AddArg(v0) 7245 return true 7246 } 7247 // match: (MADDW a x (MOVDconst [c])) 7248 // cond: isPowerOfTwo64(c+1) && int32(c)>=7 7249 // result: (MOVWUreg (SUB <a.Type> a (SUBshiftLL <x.Type> x x [log64(c+1)]))) 7250 for { 7251 a := v_0 7252 x := v_1 7253 if v_2.Op != OpARM64MOVDconst { 7254 break 7255 } 7256 c := auxIntToInt64(v_2.AuxInt) 7257 if !(isPowerOfTwo64(c+1) && int32(c) >= 7) { 7258 break 7259 } 7260 v.reset(OpARM64MOVWUreg) 7261 v0 := b.NewValue0(v.Pos, OpARM64SUB, a.Type) 7262 v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7263 v1.AuxInt = int64ToAuxInt(log64(c + 1)) 7264 v1.AddArg2(x, x) 7265 v0.AddArg2(a, v1) 7266 v.AddArg(v0) 7267 return true 7268 } 7269 // match: (MADDW a x (MOVDconst [c])) 7270 // cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c) 7271 // result: (MOVWUreg (SUBshiftLL <a.Type> a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)])) 7272 for { 7273 a := v_0 7274 x := v_1 7275 if v_2.Op != OpARM64MOVDconst { 7276 break 7277 } 7278 c := auxIntToInt64(v_2.AuxInt) 7279 if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) { 7280 break 7281 } 7282 v.reset(OpARM64MOVWUreg) 7283 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type) 7284 v0.AuxInt = int64ToAuxInt(log64(c / 3)) 7285 v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7286 v1.AuxInt = int64ToAuxInt(2) 7287 v1.AddArg2(x, x) 7288 v0.AddArg2(a, v1) 7289 v.AddArg(v0) 7290 return true 7291 } 7292 // match: (MADDW a x (MOVDconst [c])) 7293 // cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c) 7294 // result: (MOVWUreg (ADDshiftLL <a.Type> a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)])) 7295 for { 7296 a := v_0 7297 x := v_1 7298 if v_2.Op != OpARM64MOVDconst { 7299 break 7300 } 7301 c := auxIntToInt64(v_2.AuxInt) 7302 if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) { 7303 break 7304 } 7305 v.reset(OpARM64MOVWUreg) 7306 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type) 7307 v0.AuxInt = int64ToAuxInt(log64(c / 5)) 7308 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7309 v1.AuxInt = int64ToAuxInt(2) 7310 v1.AddArg2(x, x) 7311 v0.AddArg2(a, v1) 7312 v.AddArg(v0) 7313 return true 7314 } 7315 // match: (MADDW a x (MOVDconst [c])) 7316 // cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c) 7317 // result: (MOVWUreg (SUBshiftLL <a.Type> a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)])) 7318 for { 7319 a := v_0 7320 x := v_1 7321 if v_2.Op != OpARM64MOVDconst { 7322 break 7323 } 7324 c := auxIntToInt64(v_2.AuxInt) 7325 if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) { 7326 break 7327 } 7328 v.reset(OpARM64MOVWUreg) 7329 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type) 7330 v0.AuxInt = int64ToAuxInt(log64(c / 7)) 7331 v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7332 v1.AuxInt = int64ToAuxInt(3) 7333 v1.AddArg2(x, x) 7334 v0.AddArg2(a, v1) 7335 v.AddArg(v0) 7336 return true 7337 } 7338 // match: (MADDW a x (MOVDconst [c])) 7339 // cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c) 7340 // result: (MOVWUreg (ADDshiftLL <a.Type> a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)])) 7341 for { 7342 a := v_0 7343 x := v_1 7344 if v_2.Op != OpARM64MOVDconst { 7345 break 7346 } 7347 c := auxIntToInt64(v_2.AuxInt) 7348 if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) { 7349 break 7350 } 7351 v.reset(OpARM64MOVWUreg) 7352 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type) 7353 v0.AuxInt = int64ToAuxInt(log64(c / 9)) 7354 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7355 v1.AuxInt = int64ToAuxInt(3) 7356 v1.AddArg2(x, x) 7357 v0.AddArg2(a, v1) 7358 v.AddArg(v0) 7359 return true 7360 } 7361 // match: (MADDW a (MOVDconst [c]) x) 7362 // cond: int32(c)==-1 7363 // result: (MOVWUreg (SUB <a.Type> a x)) 7364 for { 7365 a := v_0 7366 if v_1.Op != OpARM64MOVDconst { 7367 break 7368 } 7369 c := auxIntToInt64(v_1.AuxInt) 7370 x := v_2 7371 if !(int32(c) == -1) { 7372 break 7373 } 7374 v.reset(OpARM64MOVWUreg) 7375 v0 := b.NewValue0(v.Pos, OpARM64SUB, a.Type) 7376 v0.AddArg2(a, x) 7377 v.AddArg(v0) 7378 return true 7379 } 7380 // match: (MADDW a (MOVDconst [c]) _) 7381 // cond: int32(c)==0 7382 // result: (MOVWUreg a) 7383 for { 7384 a := v_0 7385 if v_1.Op != OpARM64MOVDconst { 7386 break 7387 } 7388 c := auxIntToInt64(v_1.AuxInt) 7389 if !(int32(c) == 0) { 7390 break 7391 } 7392 v.reset(OpARM64MOVWUreg) 7393 v.AddArg(a) 7394 return true 7395 } 7396 // match: (MADDW a (MOVDconst [c]) x) 7397 // cond: int32(c)==1 7398 // result: (MOVWUreg (ADD <a.Type> a x)) 7399 for { 7400 a := v_0 7401 if v_1.Op != OpARM64MOVDconst { 7402 break 7403 } 7404 c := auxIntToInt64(v_1.AuxInt) 7405 x := v_2 7406 if !(int32(c) == 1) { 7407 break 7408 } 7409 v.reset(OpARM64MOVWUreg) 7410 v0 := b.NewValue0(v.Pos, OpARM64ADD, a.Type) 7411 v0.AddArg2(a, x) 7412 v.AddArg(v0) 7413 return true 7414 } 7415 // match: (MADDW a (MOVDconst [c]) x) 7416 // cond: isPowerOfTwo64(c) 7417 // result: (MOVWUreg (ADDshiftLL <a.Type> a x [log64(c)])) 7418 for { 7419 a := v_0 7420 if v_1.Op != OpARM64MOVDconst { 7421 break 7422 } 7423 c := auxIntToInt64(v_1.AuxInt) 7424 x := v_2 7425 if !(isPowerOfTwo64(c)) { 7426 break 7427 } 7428 v.reset(OpARM64MOVWUreg) 7429 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type) 7430 v0.AuxInt = int64ToAuxInt(log64(c)) 7431 v0.AddArg2(a, x) 7432 v.AddArg(v0) 7433 return true 7434 } 7435 // match: (MADDW a (MOVDconst [c]) x) 7436 // cond: isPowerOfTwo64(c-1) && int32(c)>=3 7437 // result: (MOVWUreg (ADD <a.Type> a (ADDshiftLL <x.Type> x x [log64(c-1)]))) 7438 for { 7439 a := v_0 7440 if v_1.Op != OpARM64MOVDconst { 7441 break 7442 } 7443 c := auxIntToInt64(v_1.AuxInt) 7444 x := v_2 7445 if !(isPowerOfTwo64(c-1) && int32(c) >= 3) { 7446 break 7447 } 7448 v.reset(OpARM64MOVWUreg) 7449 v0 := b.NewValue0(v.Pos, OpARM64ADD, a.Type) 7450 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7451 v1.AuxInt = int64ToAuxInt(log64(c - 1)) 7452 v1.AddArg2(x, x) 7453 v0.AddArg2(a, v1) 7454 v.AddArg(v0) 7455 return true 7456 } 7457 // match: (MADDW a (MOVDconst [c]) x) 7458 // cond: isPowerOfTwo64(c+1) && int32(c)>=7 7459 // result: (MOVWUreg (SUB <a.Type> a (SUBshiftLL <x.Type> x x [log64(c+1)]))) 7460 for { 7461 a := v_0 7462 if v_1.Op != OpARM64MOVDconst { 7463 break 7464 } 7465 c := auxIntToInt64(v_1.AuxInt) 7466 x := v_2 7467 if !(isPowerOfTwo64(c+1) && int32(c) >= 7) { 7468 break 7469 } 7470 v.reset(OpARM64MOVWUreg) 7471 v0 := b.NewValue0(v.Pos, OpARM64SUB, a.Type) 7472 v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7473 v1.AuxInt = int64ToAuxInt(log64(c + 1)) 7474 v1.AddArg2(x, x) 7475 v0.AddArg2(a, v1) 7476 v.AddArg(v0) 7477 return true 7478 } 7479 // match: (MADDW a (MOVDconst [c]) x) 7480 // cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c) 7481 // result: (MOVWUreg (SUBshiftLL <a.Type> a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)])) 7482 for { 7483 a := v_0 7484 if v_1.Op != OpARM64MOVDconst { 7485 break 7486 } 7487 c := auxIntToInt64(v_1.AuxInt) 7488 x := v_2 7489 if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) { 7490 break 7491 } 7492 v.reset(OpARM64MOVWUreg) 7493 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type) 7494 v0.AuxInt = int64ToAuxInt(log64(c / 3)) 7495 v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7496 v1.AuxInt = int64ToAuxInt(2) 7497 v1.AddArg2(x, x) 7498 v0.AddArg2(a, v1) 7499 v.AddArg(v0) 7500 return true 7501 } 7502 // match: (MADDW a (MOVDconst [c]) x) 7503 // cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c) 7504 // result: (MOVWUreg (ADDshiftLL <a.Type> a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)])) 7505 for { 7506 a := v_0 7507 if v_1.Op != OpARM64MOVDconst { 7508 break 7509 } 7510 c := auxIntToInt64(v_1.AuxInt) 7511 x := v_2 7512 if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) { 7513 break 7514 } 7515 v.reset(OpARM64MOVWUreg) 7516 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type) 7517 v0.AuxInt = int64ToAuxInt(log64(c / 5)) 7518 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7519 v1.AuxInt = int64ToAuxInt(2) 7520 v1.AddArg2(x, x) 7521 v0.AddArg2(a, v1) 7522 v.AddArg(v0) 7523 return true 7524 } 7525 // match: (MADDW a (MOVDconst [c]) x) 7526 // cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c) 7527 // result: (MOVWUreg (SUBshiftLL <a.Type> a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)])) 7528 for { 7529 a := v_0 7530 if v_1.Op != OpARM64MOVDconst { 7531 break 7532 } 7533 c := auxIntToInt64(v_1.AuxInt) 7534 x := v_2 7535 if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) { 7536 break 7537 } 7538 v.reset(OpARM64MOVWUreg) 7539 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type) 7540 v0.AuxInt = int64ToAuxInt(log64(c / 7)) 7541 v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7542 v1.AuxInt = int64ToAuxInt(3) 7543 v1.AddArg2(x, x) 7544 v0.AddArg2(a, v1) 7545 v.AddArg(v0) 7546 return true 7547 } 7548 // match: (MADDW a (MOVDconst [c]) x) 7549 // cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c) 7550 // result: (MOVWUreg (ADDshiftLL <a.Type> a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)])) 7551 for { 7552 a := v_0 7553 if v_1.Op != OpARM64MOVDconst { 7554 break 7555 } 7556 c := auxIntToInt64(v_1.AuxInt) 7557 x := v_2 7558 if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) { 7559 break 7560 } 7561 v.reset(OpARM64MOVWUreg) 7562 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type) 7563 v0.AuxInt = int64ToAuxInt(log64(c / 9)) 7564 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7565 v1.AuxInt = int64ToAuxInt(3) 7566 v1.AddArg2(x, x) 7567 v0.AddArg2(a, v1) 7568 v.AddArg(v0) 7569 return true 7570 } 7571 // match: (MADDW (MOVDconst [c]) x y) 7572 // result: (MOVWUreg (ADDconst <x.Type> [c] (MULW <x.Type> x y))) 7573 for { 7574 if v_0.Op != OpARM64MOVDconst { 7575 break 7576 } 7577 c := auxIntToInt64(v_0.AuxInt) 7578 x := v_1 7579 y := v_2 7580 v.reset(OpARM64MOVWUreg) 7581 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, x.Type) 7582 v0.AuxInt = int64ToAuxInt(c) 7583 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 7584 v1.AddArg2(x, y) 7585 v0.AddArg(v1) 7586 v.AddArg(v0) 7587 return true 7588 } 7589 // match: (MADDW a (MOVDconst [c]) (MOVDconst [d])) 7590 // result: (MOVWUreg (ADDconst <a.Type> [c*d] a)) 7591 for { 7592 a := v_0 7593 if v_1.Op != OpARM64MOVDconst { 7594 break 7595 } 7596 c := auxIntToInt64(v_1.AuxInt) 7597 if v_2.Op != OpARM64MOVDconst { 7598 break 7599 } 7600 d := auxIntToInt64(v_2.AuxInt) 7601 v.reset(OpARM64MOVWUreg) 7602 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, a.Type) 7603 v0.AuxInt = int64ToAuxInt(c * d) 7604 v0.AddArg(a) 7605 v.AddArg(v0) 7606 return true 7607 } 7608 return false 7609 } 7610 func rewriteValueARM64_OpARM64MNEG(v *Value) bool { 7611 v_1 := v.Args[1] 7612 v_0 := v.Args[0] 7613 b := v.Block 7614 // match: (MNEG x (MOVDconst [-1])) 7615 // result: x 7616 for { 7617 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7618 x := v_0 7619 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 { 7620 continue 7621 } 7622 v.copyOf(x) 7623 return true 7624 } 7625 break 7626 } 7627 // match: (MNEG _ (MOVDconst [0])) 7628 // result: (MOVDconst [0]) 7629 for { 7630 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7631 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 { 7632 continue 7633 } 7634 v.reset(OpARM64MOVDconst) 7635 v.AuxInt = int64ToAuxInt(0) 7636 return true 7637 } 7638 break 7639 } 7640 // match: (MNEG x (MOVDconst [1])) 7641 // result: (NEG x) 7642 for { 7643 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7644 x := v_0 7645 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 { 7646 continue 7647 } 7648 v.reset(OpARM64NEG) 7649 v.AddArg(x) 7650 return true 7651 } 7652 break 7653 } 7654 // match: (MNEG x (MOVDconst [c])) 7655 // cond: isPowerOfTwo64(c) 7656 // result: (NEG (SLLconst <x.Type> [log64(c)] x)) 7657 for { 7658 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7659 x := v_0 7660 if v_1.Op != OpARM64MOVDconst { 7661 continue 7662 } 7663 c := auxIntToInt64(v_1.AuxInt) 7664 if !(isPowerOfTwo64(c)) { 7665 continue 7666 } 7667 v.reset(OpARM64NEG) 7668 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7669 v0.AuxInt = int64ToAuxInt(log64(c)) 7670 v0.AddArg(x) 7671 v.AddArg(v0) 7672 return true 7673 } 7674 break 7675 } 7676 // match: (MNEG x (MOVDconst [c])) 7677 // cond: isPowerOfTwo64(c-1) && c >= 3 7678 // result: (NEG (ADDshiftLL <x.Type> x x [log64(c-1)])) 7679 for { 7680 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7681 x := v_0 7682 if v_1.Op != OpARM64MOVDconst { 7683 continue 7684 } 7685 c := auxIntToInt64(v_1.AuxInt) 7686 if !(isPowerOfTwo64(c-1) && c >= 3) { 7687 continue 7688 } 7689 v.reset(OpARM64NEG) 7690 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7691 v0.AuxInt = int64ToAuxInt(log64(c - 1)) 7692 v0.AddArg2(x, x) 7693 v.AddArg(v0) 7694 return true 7695 } 7696 break 7697 } 7698 // match: (MNEG x (MOVDconst [c])) 7699 // cond: isPowerOfTwo64(c+1) && c >= 7 7700 // result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log64(c+1)])) 7701 for { 7702 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7703 x := v_0 7704 if v_1.Op != OpARM64MOVDconst { 7705 continue 7706 } 7707 c := auxIntToInt64(v_1.AuxInt) 7708 if !(isPowerOfTwo64(c+1) && c >= 7) { 7709 continue 7710 } 7711 v.reset(OpARM64NEG) 7712 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7713 v0.AuxInt = int64ToAuxInt(log64(c + 1)) 7714 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 7715 v1.AddArg(x) 7716 v0.AddArg2(v1, x) 7717 v.AddArg(v0) 7718 return true 7719 } 7720 break 7721 } 7722 // match: (MNEG x (MOVDconst [c])) 7723 // cond: c%3 == 0 && isPowerOfTwo64(c/3) 7724 // result: (SLLconst <x.Type> [log64(c/3)] (SUBshiftLL <x.Type> x x [2])) 7725 for { 7726 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7727 x := v_0 7728 if v_1.Op != OpARM64MOVDconst { 7729 continue 7730 } 7731 c := auxIntToInt64(v_1.AuxInt) 7732 if !(c%3 == 0 && isPowerOfTwo64(c/3)) { 7733 continue 7734 } 7735 v.reset(OpARM64SLLconst) 7736 v.Type = x.Type 7737 v.AuxInt = int64ToAuxInt(log64(c / 3)) 7738 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7739 v0.AuxInt = int64ToAuxInt(2) 7740 v0.AddArg2(x, x) 7741 v.AddArg(v0) 7742 return true 7743 } 7744 break 7745 } 7746 // match: (MNEG x (MOVDconst [c])) 7747 // cond: c%5 == 0 && isPowerOfTwo64(c/5) 7748 // result: (NEG (SLLconst <x.Type> [log64(c/5)] (ADDshiftLL <x.Type> x x [2]))) 7749 for { 7750 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7751 x := v_0 7752 if v_1.Op != OpARM64MOVDconst { 7753 continue 7754 } 7755 c := auxIntToInt64(v_1.AuxInt) 7756 if !(c%5 == 0 && isPowerOfTwo64(c/5)) { 7757 continue 7758 } 7759 v.reset(OpARM64NEG) 7760 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7761 v0.AuxInt = int64ToAuxInt(log64(c / 5)) 7762 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7763 v1.AuxInt = int64ToAuxInt(2) 7764 v1.AddArg2(x, x) 7765 v0.AddArg(v1) 7766 v.AddArg(v0) 7767 return true 7768 } 7769 break 7770 } 7771 // match: (MNEG x (MOVDconst [c])) 7772 // cond: c%7 == 0 && isPowerOfTwo64(c/7) 7773 // result: (SLLconst <x.Type> [log64(c/7)] (SUBshiftLL <x.Type> x x [3])) 7774 for { 7775 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7776 x := v_0 7777 if v_1.Op != OpARM64MOVDconst { 7778 continue 7779 } 7780 c := auxIntToInt64(v_1.AuxInt) 7781 if !(c%7 == 0 && isPowerOfTwo64(c/7)) { 7782 continue 7783 } 7784 v.reset(OpARM64SLLconst) 7785 v.Type = x.Type 7786 v.AuxInt = int64ToAuxInt(log64(c / 7)) 7787 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7788 v0.AuxInt = int64ToAuxInt(3) 7789 v0.AddArg2(x, x) 7790 v.AddArg(v0) 7791 return true 7792 } 7793 break 7794 } 7795 // match: (MNEG x (MOVDconst [c])) 7796 // cond: c%9 == 0 && isPowerOfTwo64(c/9) 7797 // result: (NEG (SLLconst <x.Type> [log64(c/9)] (ADDshiftLL <x.Type> x x [3]))) 7798 for { 7799 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7800 x := v_0 7801 if v_1.Op != OpARM64MOVDconst { 7802 continue 7803 } 7804 c := auxIntToInt64(v_1.AuxInt) 7805 if !(c%9 == 0 && isPowerOfTwo64(c/9)) { 7806 continue 7807 } 7808 v.reset(OpARM64NEG) 7809 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7810 v0.AuxInt = int64ToAuxInt(log64(c / 9)) 7811 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7812 v1.AuxInt = int64ToAuxInt(3) 7813 v1.AddArg2(x, x) 7814 v0.AddArg(v1) 7815 v.AddArg(v0) 7816 return true 7817 } 7818 break 7819 } 7820 // match: (MNEG (MOVDconst [c]) (MOVDconst [d])) 7821 // result: (MOVDconst [-c*d]) 7822 for { 7823 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7824 if v_0.Op != OpARM64MOVDconst { 7825 continue 7826 } 7827 c := auxIntToInt64(v_0.AuxInt) 7828 if v_1.Op != OpARM64MOVDconst { 7829 continue 7830 } 7831 d := auxIntToInt64(v_1.AuxInt) 7832 v.reset(OpARM64MOVDconst) 7833 v.AuxInt = int64ToAuxInt(-c * d) 7834 return true 7835 } 7836 break 7837 } 7838 return false 7839 } 7840 func rewriteValueARM64_OpARM64MNEGW(v *Value) bool { 7841 v_1 := v.Args[1] 7842 v_0 := v.Args[0] 7843 b := v.Block 7844 // match: (MNEGW x (MOVDconst [c])) 7845 // cond: int32(c)==-1 7846 // result: (MOVWUreg x) 7847 for { 7848 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7849 x := v_0 7850 if v_1.Op != OpARM64MOVDconst { 7851 continue 7852 } 7853 c := auxIntToInt64(v_1.AuxInt) 7854 if !(int32(c) == -1) { 7855 continue 7856 } 7857 v.reset(OpARM64MOVWUreg) 7858 v.AddArg(x) 7859 return true 7860 } 7861 break 7862 } 7863 // match: (MNEGW _ (MOVDconst [c])) 7864 // cond: int32(c)==0 7865 // result: (MOVDconst [0]) 7866 for { 7867 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7868 if v_1.Op != OpARM64MOVDconst { 7869 continue 7870 } 7871 c := auxIntToInt64(v_1.AuxInt) 7872 if !(int32(c) == 0) { 7873 continue 7874 } 7875 v.reset(OpARM64MOVDconst) 7876 v.AuxInt = int64ToAuxInt(0) 7877 return true 7878 } 7879 break 7880 } 7881 // match: (MNEGW x (MOVDconst [c])) 7882 // cond: int32(c)==1 7883 // result: (MOVWUreg (NEG <x.Type> x)) 7884 for { 7885 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7886 x := v_0 7887 if v_1.Op != OpARM64MOVDconst { 7888 continue 7889 } 7890 c := auxIntToInt64(v_1.AuxInt) 7891 if !(int32(c) == 1) { 7892 continue 7893 } 7894 v.reset(OpARM64MOVWUreg) 7895 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 7896 v0.AddArg(x) 7897 v.AddArg(v0) 7898 return true 7899 } 7900 break 7901 } 7902 // match: (MNEGW x (MOVDconst [c])) 7903 // cond: isPowerOfTwo64(c) 7904 // result: (NEG (SLLconst <x.Type> [log64(c)] x)) 7905 for { 7906 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7907 x := v_0 7908 if v_1.Op != OpARM64MOVDconst { 7909 continue 7910 } 7911 c := auxIntToInt64(v_1.AuxInt) 7912 if !(isPowerOfTwo64(c)) { 7913 continue 7914 } 7915 v.reset(OpARM64NEG) 7916 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7917 v0.AuxInt = int64ToAuxInt(log64(c)) 7918 v0.AddArg(x) 7919 v.AddArg(v0) 7920 return true 7921 } 7922 break 7923 } 7924 // match: (MNEGW x (MOVDconst [c])) 7925 // cond: isPowerOfTwo64(c-1) && int32(c) >= 3 7926 // result: (MOVWUreg (NEG <x.Type> (ADDshiftLL <x.Type> x x [log64(c-1)]))) 7927 for { 7928 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7929 x := v_0 7930 if v_1.Op != OpARM64MOVDconst { 7931 continue 7932 } 7933 c := auxIntToInt64(v_1.AuxInt) 7934 if !(isPowerOfTwo64(c-1) && int32(c) >= 3) { 7935 continue 7936 } 7937 v.reset(OpARM64MOVWUreg) 7938 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 7939 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7940 v1.AuxInt = int64ToAuxInt(log64(c - 1)) 7941 v1.AddArg2(x, x) 7942 v0.AddArg(v1) 7943 v.AddArg(v0) 7944 return true 7945 } 7946 break 7947 } 7948 // match: (MNEGW x (MOVDconst [c])) 7949 // cond: isPowerOfTwo64(c+1) && int32(c) >= 7 7950 // result: (MOVWUreg (NEG <x.Type> (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log64(c+1)]))) 7951 for { 7952 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7953 x := v_0 7954 if v_1.Op != OpARM64MOVDconst { 7955 continue 7956 } 7957 c := auxIntToInt64(v_1.AuxInt) 7958 if !(isPowerOfTwo64(c+1) && int32(c) >= 7) { 7959 continue 7960 } 7961 v.reset(OpARM64MOVWUreg) 7962 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 7963 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7964 v1.AuxInt = int64ToAuxInt(log64(c + 1)) 7965 v2 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 7966 v2.AddArg(x) 7967 v1.AddArg2(v2, x) 7968 v0.AddArg(v1) 7969 v.AddArg(v0) 7970 return true 7971 } 7972 break 7973 } 7974 // match: (MNEGW x (MOVDconst [c])) 7975 // cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c) 7976 // result: (MOVWUreg (SLLconst <x.Type> [log64(c/3)] (SUBshiftLL <x.Type> x x [2]))) 7977 for { 7978 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 7979 x := v_0 7980 if v_1.Op != OpARM64MOVDconst { 7981 continue 7982 } 7983 c := auxIntToInt64(v_1.AuxInt) 7984 if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) { 7985 continue 7986 } 7987 v.reset(OpARM64MOVWUreg) 7988 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7989 v0.AuxInt = int64ToAuxInt(log64(c / 3)) 7990 v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7991 v1.AuxInt = int64ToAuxInt(2) 7992 v1.AddArg2(x, x) 7993 v0.AddArg(v1) 7994 v.AddArg(v0) 7995 return true 7996 } 7997 break 7998 } 7999 // match: (MNEGW x (MOVDconst [c])) 8000 // cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c) 8001 // result: (MOVWUreg (NEG <x.Type> (SLLconst <x.Type> [log64(c/5)] (ADDshiftLL <x.Type> x x [2])))) 8002 for { 8003 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 8004 x := v_0 8005 if v_1.Op != OpARM64MOVDconst { 8006 continue 8007 } 8008 c := auxIntToInt64(v_1.AuxInt) 8009 if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) { 8010 continue 8011 } 8012 v.reset(OpARM64MOVWUreg) 8013 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 8014 v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8015 v1.AuxInt = int64ToAuxInt(log64(c / 5)) 8016 v2 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8017 v2.AuxInt = int64ToAuxInt(2) 8018 v2.AddArg2(x, x) 8019 v1.AddArg(v2) 8020 v0.AddArg(v1) 8021 v.AddArg(v0) 8022 return true 8023 } 8024 break 8025 } 8026 // match: (MNEGW x (MOVDconst [c])) 8027 // cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c) 8028 // result: (MOVWUreg (SLLconst <x.Type> [log64(c/7)] (SUBshiftLL <x.Type> x x [3]))) 8029 for { 8030 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 8031 x := v_0 8032 if v_1.Op != OpARM64MOVDconst { 8033 continue 8034 } 8035 c := auxIntToInt64(v_1.AuxInt) 8036 if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) { 8037 continue 8038 } 8039 v.reset(OpARM64MOVWUreg) 8040 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8041 v0.AuxInt = int64ToAuxInt(log64(c / 7)) 8042 v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8043 v1.AuxInt = int64ToAuxInt(3) 8044 v1.AddArg2(x, x) 8045 v0.AddArg(v1) 8046 v.AddArg(v0) 8047 return true 8048 } 8049 break 8050 } 8051 // match: (MNEGW x (MOVDconst [c])) 8052 // cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c) 8053 // result: (MOVWUreg (NEG <x.Type> (SLLconst <x.Type> [log64(c/9)] (ADDshiftLL <x.Type> x x [3])))) 8054 for { 8055 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 8056 x := v_0 8057 if v_1.Op != OpARM64MOVDconst { 8058 continue 8059 } 8060 c := auxIntToInt64(v_1.AuxInt) 8061 if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) { 8062 continue 8063 } 8064 v.reset(OpARM64MOVWUreg) 8065 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 8066 v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8067 v1.AuxInt = int64ToAuxInt(log64(c / 9)) 8068 v2 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8069 v2.AuxInt = int64ToAuxInt(3) 8070 v2.AddArg2(x, x) 8071 v1.AddArg(v2) 8072 v0.AddArg(v1) 8073 v.AddArg(v0) 8074 return true 8075 } 8076 break 8077 } 8078 // match: (MNEGW (MOVDconst [c]) (MOVDconst [d])) 8079 // result: (MOVDconst [int64(uint32(-c*d))]) 8080 for { 8081 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 8082 if v_0.Op != OpARM64MOVDconst { 8083 continue 8084 } 8085 c := auxIntToInt64(v_0.AuxInt) 8086 if v_1.Op != OpARM64MOVDconst { 8087 continue 8088 } 8089 d := auxIntToInt64(v_1.AuxInt) 8090 v.reset(OpARM64MOVDconst) 8091 v.AuxInt = int64ToAuxInt(int64(uint32(-c * d))) 8092 return true 8093 } 8094 break 8095 } 8096 return false 8097 } 8098 func rewriteValueARM64_OpARM64MOD(v *Value) bool { 8099 v_1 := v.Args[1] 8100 v_0 := v.Args[0] 8101 // match: (MOD (MOVDconst [c]) (MOVDconst [d])) 8102 // cond: d != 0 8103 // result: (MOVDconst [c%d]) 8104 for { 8105 if v_0.Op != OpARM64MOVDconst { 8106 break 8107 } 8108 c := auxIntToInt64(v_0.AuxInt) 8109 if v_1.Op != OpARM64MOVDconst { 8110 break 8111 } 8112 d := auxIntToInt64(v_1.AuxInt) 8113 if !(d != 0) { 8114 break 8115 } 8116 v.reset(OpARM64MOVDconst) 8117 v.AuxInt = int64ToAuxInt(c % d) 8118 return true 8119 } 8120 return false 8121 } 8122 func rewriteValueARM64_OpARM64MODW(v *Value) bool { 8123 v_1 := v.Args[1] 8124 v_0 := v.Args[0] 8125 // match: (MODW (MOVDconst [c]) (MOVDconst [d])) 8126 // cond: d != 0 8127 // result: (MOVDconst [int64(uint32(int32(c)%int32(d)))]) 8128 for { 8129 if v_0.Op != OpARM64MOVDconst { 8130 break 8131 } 8132 c := auxIntToInt64(v_0.AuxInt) 8133 if v_1.Op != OpARM64MOVDconst { 8134 break 8135 } 8136 d := auxIntToInt64(v_1.AuxInt) 8137 if !(d != 0) { 8138 break 8139 } 8140 v.reset(OpARM64MOVDconst) 8141 v.AuxInt = int64ToAuxInt(int64(uint32(int32(c) % int32(d)))) 8142 return true 8143 } 8144 return false 8145 } 8146 func rewriteValueARM64_OpARM64MOVBUload(v *Value) bool { 8147 v_1 := v.Args[1] 8148 v_0 := v.Args[0] 8149 b := v.Block 8150 config := b.Func.Config 8151 // match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem) 8152 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 8153 // result: (MOVBUload [off1+int32(off2)] {sym} ptr mem) 8154 for { 8155 off1 := auxIntToInt32(v.AuxInt) 8156 sym := auxToSym(v.Aux) 8157 if v_0.Op != OpARM64ADDconst { 8158 break 8159 } 8160 off2 := auxIntToInt64(v_0.AuxInt) 8161 ptr := v_0.Args[0] 8162 mem := v_1 8163 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 8164 break 8165 } 8166 v.reset(OpARM64MOVBUload) 8167 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 8168 v.Aux = symToAux(sym) 8169 v.AddArg2(ptr, mem) 8170 return true 8171 } 8172 // match: (MOVBUload [off] {sym} (ADD ptr idx) mem) 8173 // cond: off == 0 && sym == nil 8174 // result: (MOVBUloadidx ptr idx mem) 8175 for { 8176 off := auxIntToInt32(v.AuxInt) 8177 sym := auxToSym(v.Aux) 8178 if v_0.Op != OpARM64ADD { 8179 break 8180 } 8181 idx := v_0.Args[1] 8182 ptr := v_0.Args[0] 8183 mem := v_1 8184 if !(off == 0 && sym == nil) { 8185 break 8186 } 8187 v.reset(OpARM64MOVBUloadidx) 8188 v.AddArg3(ptr, idx, mem) 8189 return true 8190 } 8191 // match: (MOVBUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 8192 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 8193 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 8194 for { 8195 off1 := auxIntToInt32(v.AuxInt) 8196 sym1 := auxToSym(v.Aux) 8197 if v_0.Op != OpARM64MOVDaddr { 8198 break 8199 } 8200 off2 := auxIntToInt32(v_0.AuxInt) 8201 sym2 := auxToSym(v_0.Aux) 8202 ptr := v_0.Args[0] 8203 mem := v_1 8204 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 8205 break 8206 } 8207 v.reset(OpARM64MOVBUload) 8208 v.AuxInt = int32ToAuxInt(off1 + off2) 8209 v.Aux = symToAux(mergeSym(sym1, sym2)) 8210 v.AddArg2(ptr, mem) 8211 return true 8212 } 8213 // match: (MOVBUload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 8214 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 8215 // result: (MOVDconst [0]) 8216 for { 8217 off := auxIntToInt32(v.AuxInt) 8218 sym := auxToSym(v.Aux) 8219 ptr := v_0 8220 if v_1.Op != OpARM64MOVBstorezero { 8221 break 8222 } 8223 off2 := auxIntToInt32(v_1.AuxInt) 8224 sym2 := auxToSym(v_1.Aux) 8225 ptr2 := v_1.Args[0] 8226 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 8227 break 8228 } 8229 v.reset(OpARM64MOVDconst) 8230 v.AuxInt = int64ToAuxInt(0) 8231 return true 8232 } 8233 // match: (MOVBUload [off] {sym} (SB) _) 8234 // cond: symIsRO(sym) 8235 // result: (MOVDconst [int64(read8(sym, int64(off)))]) 8236 for { 8237 off := auxIntToInt32(v.AuxInt) 8238 sym := auxToSym(v.Aux) 8239 if v_0.Op != OpSB || !(symIsRO(sym)) { 8240 break 8241 } 8242 v.reset(OpARM64MOVDconst) 8243 v.AuxInt = int64ToAuxInt(int64(read8(sym, int64(off)))) 8244 return true 8245 } 8246 return false 8247 } 8248 func rewriteValueARM64_OpARM64MOVBUloadidx(v *Value) bool { 8249 v_2 := v.Args[2] 8250 v_1 := v.Args[1] 8251 v_0 := v.Args[0] 8252 // match: (MOVBUloadidx ptr (MOVDconst [c]) mem) 8253 // cond: is32Bit(c) 8254 // result: (MOVBUload [int32(c)] ptr mem) 8255 for { 8256 ptr := v_0 8257 if v_1.Op != OpARM64MOVDconst { 8258 break 8259 } 8260 c := auxIntToInt64(v_1.AuxInt) 8261 mem := v_2 8262 if !(is32Bit(c)) { 8263 break 8264 } 8265 v.reset(OpARM64MOVBUload) 8266 v.AuxInt = int32ToAuxInt(int32(c)) 8267 v.AddArg2(ptr, mem) 8268 return true 8269 } 8270 // match: (MOVBUloadidx (MOVDconst [c]) ptr mem) 8271 // cond: is32Bit(c) 8272 // result: (MOVBUload [int32(c)] ptr mem) 8273 for { 8274 if v_0.Op != OpARM64MOVDconst { 8275 break 8276 } 8277 c := auxIntToInt64(v_0.AuxInt) 8278 ptr := v_1 8279 mem := v_2 8280 if !(is32Bit(c)) { 8281 break 8282 } 8283 v.reset(OpARM64MOVBUload) 8284 v.AuxInt = int32ToAuxInt(int32(c)) 8285 v.AddArg2(ptr, mem) 8286 return true 8287 } 8288 // match: (MOVBUloadidx ptr idx (MOVBstorezeroidx ptr2 idx2 _)) 8289 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 8290 // result: (MOVDconst [0]) 8291 for { 8292 ptr := v_0 8293 idx := v_1 8294 if v_2.Op != OpARM64MOVBstorezeroidx { 8295 break 8296 } 8297 idx2 := v_2.Args[1] 8298 ptr2 := v_2.Args[0] 8299 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 8300 break 8301 } 8302 v.reset(OpARM64MOVDconst) 8303 v.AuxInt = int64ToAuxInt(0) 8304 return true 8305 } 8306 return false 8307 } 8308 func rewriteValueARM64_OpARM64MOVBUreg(v *Value) bool { 8309 v_0 := v.Args[0] 8310 // match: (MOVBUreg x:(MOVBUload _ _)) 8311 // result: (MOVDreg x) 8312 for { 8313 x := v_0 8314 if x.Op != OpARM64MOVBUload { 8315 break 8316 } 8317 v.reset(OpARM64MOVDreg) 8318 v.AddArg(x) 8319 return true 8320 } 8321 // match: (MOVBUreg x:(MOVBUloadidx _ _ _)) 8322 // result: (MOVDreg x) 8323 for { 8324 x := v_0 8325 if x.Op != OpARM64MOVBUloadidx { 8326 break 8327 } 8328 v.reset(OpARM64MOVDreg) 8329 v.AddArg(x) 8330 return true 8331 } 8332 // match: (MOVBUreg x:(MOVBUreg _)) 8333 // result: (MOVDreg x) 8334 for { 8335 x := v_0 8336 if x.Op != OpARM64MOVBUreg { 8337 break 8338 } 8339 v.reset(OpARM64MOVDreg) 8340 v.AddArg(x) 8341 return true 8342 } 8343 // match: (MOVBUreg (ANDconst [c] x)) 8344 // result: (ANDconst [c&(1<<8-1)] x) 8345 for { 8346 if v_0.Op != OpARM64ANDconst { 8347 break 8348 } 8349 c := auxIntToInt64(v_0.AuxInt) 8350 x := v_0.Args[0] 8351 v.reset(OpARM64ANDconst) 8352 v.AuxInt = int64ToAuxInt(c & (1<<8 - 1)) 8353 v.AddArg(x) 8354 return true 8355 } 8356 // match: (MOVBUreg (MOVDconst [c])) 8357 // result: (MOVDconst [int64(uint8(c))]) 8358 for { 8359 if v_0.Op != OpARM64MOVDconst { 8360 break 8361 } 8362 c := auxIntToInt64(v_0.AuxInt) 8363 v.reset(OpARM64MOVDconst) 8364 v.AuxInt = int64ToAuxInt(int64(uint8(c))) 8365 return true 8366 } 8367 // match: (MOVBUreg x:(Equal _)) 8368 // result: (MOVDreg x) 8369 for { 8370 x := v_0 8371 if x.Op != OpARM64Equal { 8372 break 8373 } 8374 v.reset(OpARM64MOVDreg) 8375 v.AddArg(x) 8376 return true 8377 } 8378 // match: (MOVBUreg x:(NotEqual _)) 8379 // result: (MOVDreg x) 8380 for { 8381 x := v_0 8382 if x.Op != OpARM64NotEqual { 8383 break 8384 } 8385 v.reset(OpARM64MOVDreg) 8386 v.AddArg(x) 8387 return true 8388 } 8389 // match: (MOVBUreg x:(LessThan _)) 8390 // result: (MOVDreg x) 8391 for { 8392 x := v_0 8393 if x.Op != OpARM64LessThan { 8394 break 8395 } 8396 v.reset(OpARM64MOVDreg) 8397 v.AddArg(x) 8398 return true 8399 } 8400 // match: (MOVBUreg x:(LessThanU _)) 8401 // result: (MOVDreg x) 8402 for { 8403 x := v_0 8404 if x.Op != OpARM64LessThanU { 8405 break 8406 } 8407 v.reset(OpARM64MOVDreg) 8408 v.AddArg(x) 8409 return true 8410 } 8411 // match: (MOVBUreg x:(LessThanF _)) 8412 // result: (MOVDreg x) 8413 for { 8414 x := v_0 8415 if x.Op != OpARM64LessThanF { 8416 break 8417 } 8418 v.reset(OpARM64MOVDreg) 8419 v.AddArg(x) 8420 return true 8421 } 8422 // match: (MOVBUreg x:(LessEqual _)) 8423 // result: (MOVDreg x) 8424 for { 8425 x := v_0 8426 if x.Op != OpARM64LessEqual { 8427 break 8428 } 8429 v.reset(OpARM64MOVDreg) 8430 v.AddArg(x) 8431 return true 8432 } 8433 // match: (MOVBUreg x:(LessEqualU _)) 8434 // result: (MOVDreg x) 8435 for { 8436 x := v_0 8437 if x.Op != OpARM64LessEqualU { 8438 break 8439 } 8440 v.reset(OpARM64MOVDreg) 8441 v.AddArg(x) 8442 return true 8443 } 8444 // match: (MOVBUreg x:(LessEqualF _)) 8445 // result: (MOVDreg x) 8446 for { 8447 x := v_0 8448 if x.Op != OpARM64LessEqualF { 8449 break 8450 } 8451 v.reset(OpARM64MOVDreg) 8452 v.AddArg(x) 8453 return true 8454 } 8455 // match: (MOVBUreg x:(GreaterThan _)) 8456 // result: (MOVDreg x) 8457 for { 8458 x := v_0 8459 if x.Op != OpARM64GreaterThan { 8460 break 8461 } 8462 v.reset(OpARM64MOVDreg) 8463 v.AddArg(x) 8464 return true 8465 } 8466 // match: (MOVBUreg x:(GreaterThanU _)) 8467 // result: (MOVDreg x) 8468 for { 8469 x := v_0 8470 if x.Op != OpARM64GreaterThanU { 8471 break 8472 } 8473 v.reset(OpARM64MOVDreg) 8474 v.AddArg(x) 8475 return true 8476 } 8477 // match: (MOVBUreg x:(GreaterThanF _)) 8478 // result: (MOVDreg x) 8479 for { 8480 x := v_0 8481 if x.Op != OpARM64GreaterThanF { 8482 break 8483 } 8484 v.reset(OpARM64MOVDreg) 8485 v.AddArg(x) 8486 return true 8487 } 8488 // match: (MOVBUreg x:(GreaterEqual _)) 8489 // result: (MOVDreg x) 8490 for { 8491 x := v_0 8492 if x.Op != OpARM64GreaterEqual { 8493 break 8494 } 8495 v.reset(OpARM64MOVDreg) 8496 v.AddArg(x) 8497 return true 8498 } 8499 // match: (MOVBUreg x:(GreaterEqualU _)) 8500 // result: (MOVDreg x) 8501 for { 8502 x := v_0 8503 if x.Op != OpARM64GreaterEqualU { 8504 break 8505 } 8506 v.reset(OpARM64MOVDreg) 8507 v.AddArg(x) 8508 return true 8509 } 8510 // match: (MOVBUreg x:(GreaterEqualF _)) 8511 // result: (MOVDreg x) 8512 for { 8513 x := v_0 8514 if x.Op != OpARM64GreaterEqualF { 8515 break 8516 } 8517 v.reset(OpARM64MOVDreg) 8518 v.AddArg(x) 8519 return true 8520 } 8521 // match: (MOVBUreg x) 8522 // cond: v.Type.Size() <= 1 8523 // result: x 8524 for { 8525 x := v_0 8526 if !(v.Type.Size() <= 1) { 8527 break 8528 } 8529 v.copyOf(x) 8530 return true 8531 } 8532 // match: (MOVBUreg (SLLconst [lc] x)) 8533 // cond: lc >= 8 8534 // result: (MOVDconst [0]) 8535 for { 8536 if v_0.Op != OpARM64SLLconst { 8537 break 8538 } 8539 lc := auxIntToInt64(v_0.AuxInt) 8540 if !(lc >= 8) { 8541 break 8542 } 8543 v.reset(OpARM64MOVDconst) 8544 v.AuxInt = int64ToAuxInt(0) 8545 return true 8546 } 8547 // match: (MOVBUreg (SLLconst [lc] x)) 8548 // cond: lc < 8 8549 // result: (UBFIZ [armBFAuxInt(lc, 8-lc)] x) 8550 for { 8551 if v_0.Op != OpARM64SLLconst { 8552 break 8553 } 8554 lc := auxIntToInt64(v_0.AuxInt) 8555 x := v_0.Args[0] 8556 if !(lc < 8) { 8557 break 8558 } 8559 v.reset(OpARM64UBFIZ) 8560 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 8-lc)) 8561 v.AddArg(x) 8562 return true 8563 } 8564 // match: (MOVBUreg (SRLconst [rc] x)) 8565 // cond: rc < 8 8566 // result: (UBFX [armBFAuxInt(rc, 8)] x) 8567 for { 8568 if v_0.Op != OpARM64SRLconst { 8569 break 8570 } 8571 rc := auxIntToInt64(v_0.AuxInt) 8572 x := v_0.Args[0] 8573 if !(rc < 8) { 8574 break 8575 } 8576 v.reset(OpARM64UBFX) 8577 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 8)) 8578 v.AddArg(x) 8579 return true 8580 } 8581 // match: (MOVBUreg (UBFX [bfc] x)) 8582 // cond: bfc.getARM64BFwidth() <= 8 8583 // result: (UBFX [bfc] x) 8584 for { 8585 if v_0.Op != OpARM64UBFX { 8586 break 8587 } 8588 bfc := auxIntToArm64BitField(v_0.AuxInt) 8589 x := v_0.Args[0] 8590 if !(bfc.getARM64BFwidth() <= 8) { 8591 break 8592 } 8593 v.reset(OpARM64UBFX) 8594 v.AuxInt = arm64BitFieldToAuxInt(bfc) 8595 v.AddArg(x) 8596 return true 8597 } 8598 return false 8599 } 8600 func rewriteValueARM64_OpARM64MOVBload(v *Value) bool { 8601 v_1 := v.Args[1] 8602 v_0 := v.Args[0] 8603 b := v.Block 8604 config := b.Func.Config 8605 // match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem) 8606 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 8607 // result: (MOVBload [off1+int32(off2)] {sym} ptr mem) 8608 for { 8609 off1 := auxIntToInt32(v.AuxInt) 8610 sym := auxToSym(v.Aux) 8611 if v_0.Op != OpARM64ADDconst { 8612 break 8613 } 8614 off2 := auxIntToInt64(v_0.AuxInt) 8615 ptr := v_0.Args[0] 8616 mem := v_1 8617 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 8618 break 8619 } 8620 v.reset(OpARM64MOVBload) 8621 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 8622 v.Aux = symToAux(sym) 8623 v.AddArg2(ptr, mem) 8624 return true 8625 } 8626 // match: (MOVBload [off] {sym} (ADD ptr idx) mem) 8627 // cond: off == 0 && sym == nil 8628 // result: (MOVBloadidx ptr idx mem) 8629 for { 8630 off := auxIntToInt32(v.AuxInt) 8631 sym := auxToSym(v.Aux) 8632 if v_0.Op != OpARM64ADD { 8633 break 8634 } 8635 idx := v_0.Args[1] 8636 ptr := v_0.Args[0] 8637 mem := v_1 8638 if !(off == 0 && sym == nil) { 8639 break 8640 } 8641 v.reset(OpARM64MOVBloadidx) 8642 v.AddArg3(ptr, idx, mem) 8643 return true 8644 } 8645 // match: (MOVBload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 8646 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 8647 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 8648 for { 8649 off1 := auxIntToInt32(v.AuxInt) 8650 sym1 := auxToSym(v.Aux) 8651 if v_0.Op != OpARM64MOVDaddr { 8652 break 8653 } 8654 off2 := auxIntToInt32(v_0.AuxInt) 8655 sym2 := auxToSym(v_0.Aux) 8656 ptr := v_0.Args[0] 8657 mem := v_1 8658 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 8659 break 8660 } 8661 v.reset(OpARM64MOVBload) 8662 v.AuxInt = int32ToAuxInt(off1 + off2) 8663 v.Aux = symToAux(mergeSym(sym1, sym2)) 8664 v.AddArg2(ptr, mem) 8665 return true 8666 } 8667 // match: (MOVBload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 8668 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 8669 // result: (MOVDconst [0]) 8670 for { 8671 off := auxIntToInt32(v.AuxInt) 8672 sym := auxToSym(v.Aux) 8673 ptr := v_0 8674 if v_1.Op != OpARM64MOVBstorezero { 8675 break 8676 } 8677 off2 := auxIntToInt32(v_1.AuxInt) 8678 sym2 := auxToSym(v_1.Aux) 8679 ptr2 := v_1.Args[0] 8680 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 8681 break 8682 } 8683 v.reset(OpARM64MOVDconst) 8684 v.AuxInt = int64ToAuxInt(0) 8685 return true 8686 } 8687 return false 8688 } 8689 func rewriteValueARM64_OpARM64MOVBloadidx(v *Value) bool { 8690 v_2 := v.Args[2] 8691 v_1 := v.Args[1] 8692 v_0 := v.Args[0] 8693 // match: (MOVBloadidx ptr (MOVDconst [c]) mem) 8694 // cond: is32Bit(c) 8695 // result: (MOVBload [int32(c)] ptr mem) 8696 for { 8697 ptr := v_0 8698 if v_1.Op != OpARM64MOVDconst { 8699 break 8700 } 8701 c := auxIntToInt64(v_1.AuxInt) 8702 mem := v_2 8703 if !(is32Bit(c)) { 8704 break 8705 } 8706 v.reset(OpARM64MOVBload) 8707 v.AuxInt = int32ToAuxInt(int32(c)) 8708 v.AddArg2(ptr, mem) 8709 return true 8710 } 8711 // match: (MOVBloadidx (MOVDconst [c]) ptr mem) 8712 // cond: is32Bit(c) 8713 // result: (MOVBload [int32(c)] ptr mem) 8714 for { 8715 if v_0.Op != OpARM64MOVDconst { 8716 break 8717 } 8718 c := auxIntToInt64(v_0.AuxInt) 8719 ptr := v_1 8720 mem := v_2 8721 if !(is32Bit(c)) { 8722 break 8723 } 8724 v.reset(OpARM64MOVBload) 8725 v.AuxInt = int32ToAuxInt(int32(c)) 8726 v.AddArg2(ptr, mem) 8727 return true 8728 } 8729 // match: (MOVBloadidx ptr idx (MOVBstorezeroidx ptr2 idx2 _)) 8730 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 8731 // result: (MOVDconst [0]) 8732 for { 8733 ptr := v_0 8734 idx := v_1 8735 if v_2.Op != OpARM64MOVBstorezeroidx { 8736 break 8737 } 8738 idx2 := v_2.Args[1] 8739 ptr2 := v_2.Args[0] 8740 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 8741 break 8742 } 8743 v.reset(OpARM64MOVDconst) 8744 v.AuxInt = int64ToAuxInt(0) 8745 return true 8746 } 8747 return false 8748 } 8749 func rewriteValueARM64_OpARM64MOVBreg(v *Value) bool { 8750 v_0 := v.Args[0] 8751 // match: (MOVBreg x:(MOVBload _ _)) 8752 // result: (MOVDreg x) 8753 for { 8754 x := v_0 8755 if x.Op != OpARM64MOVBload { 8756 break 8757 } 8758 v.reset(OpARM64MOVDreg) 8759 v.AddArg(x) 8760 return true 8761 } 8762 // match: (MOVBreg x:(MOVBloadidx _ _ _)) 8763 // result: (MOVDreg x) 8764 for { 8765 x := v_0 8766 if x.Op != OpARM64MOVBloadidx { 8767 break 8768 } 8769 v.reset(OpARM64MOVDreg) 8770 v.AddArg(x) 8771 return true 8772 } 8773 // match: (MOVBreg x:(MOVBreg _)) 8774 // result: (MOVDreg x) 8775 for { 8776 x := v_0 8777 if x.Op != OpARM64MOVBreg { 8778 break 8779 } 8780 v.reset(OpARM64MOVDreg) 8781 v.AddArg(x) 8782 return true 8783 } 8784 // match: (MOVBreg (MOVDconst [c])) 8785 // result: (MOVDconst [int64(int8(c))]) 8786 for { 8787 if v_0.Op != OpARM64MOVDconst { 8788 break 8789 } 8790 c := auxIntToInt64(v_0.AuxInt) 8791 v.reset(OpARM64MOVDconst) 8792 v.AuxInt = int64ToAuxInt(int64(int8(c))) 8793 return true 8794 } 8795 // match: (MOVBreg x) 8796 // cond: v.Type.Size() <= 1 8797 // result: x 8798 for { 8799 x := v_0 8800 if !(v.Type.Size() <= 1) { 8801 break 8802 } 8803 v.copyOf(x) 8804 return true 8805 } 8806 // match: (MOVBreg <t> (ANDconst x [c])) 8807 // cond: uint64(c) & uint64(0xffffffffffffff80) == 0 8808 // result: (ANDconst <t> x [c]) 8809 for { 8810 t := v.Type 8811 if v_0.Op != OpARM64ANDconst { 8812 break 8813 } 8814 c := auxIntToInt64(v_0.AuxInt) 8815 x := v_0.Args[0] 8816 if !(uint64(c)&uint64(0xffffffffffffff80) == 0) { 8817 break 8818 } 8819 v.reset(OpARM64ANDconst) 8820 v.Type = t 8821 v.AuxInt = int64ToAuxInt(c) 8822 v.AddArg(x) 8823 return true 8824 } 8825 // match: (MOVBreg (SLLconst [lc] x)) 8826 // cond: lc < 8 8827 // result: (SBFIZ [armBFAuxInt(lc, 8-lc)] x) 8828 for { 8829 if v_0.Op != OpARM64SLLconst { 8830 break 8831 } 8832 lc := auxIntToInt64(v_0.AuxInt) 8833 x := v_0.Args[0] 8834 if !(lc < 8) { 8835 break 8836 } 8837 v.reset(OpARM64SBFIZ) 8838 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 8-lc)) 8839 v.AddArg(x) 8840 return true 8841 } 8842 // match: (MOVBreg (SBFX [bfc] x)) 8843 // cond: bfc.getARM64BFwidth() <= 8 8844 // result: (SBFX [bfc] x) 8845 for { 8846 if v_0.Op != OpARM64SBFX { 8847 break 8848 } 8849 bfc := auxIntToArm64BitField(v_0.AuxInt) 8850 x := v_0.Args[0] 8851 if !(bfc.getARM64BFwidth() <= 8) { 8852 break 8853 } 8854 v.reset(OpARM64SBFX) 8855 v.AuxInt = arm64BitFieldToAuxInt(bfc) 8856 v.AddArg(x) 8857 return true 8858 } 8859 return false 8860 } 8861 func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool { 8862 v_2 := v.Args[2] 8863 v_1 := v.Args[1] 8864 v_0 := v.Args[0] 8865 b := v.Block 8866 config := b.Func.Config 8867 // match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem) 8868 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 8869 // result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem) 8870 for { 8871 off1 := auxIntToInt32(v.AuxInt) 8872 sym := auxToSym(v.Aux) 8873 if v_0.Op != OpARM64ADDconst { 8874 break 8875 } 8876 off2 := auxIntToInt64(v_0.AuxInt) 8877 ptr := v_0.Args[0] 8878 val := v_1 8879 mem := v_2 8880 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 8881 break 8882 } 8883 v.reset(OpARM64MOVBstore) 8884 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 8885 v.Aux = symToAux(sym) 8886 v.AddArg3(ptr, val, mem) 8887 return true 8888 } 8889 // match: (MOVBstore [off] {sym} (ADD ptr idx) val mem) 8890 // cond: off == 0 && sym == nil 8891 // result: (MOVBstoreidx ptr idx val mem) 8892 for { 8893 off := auxIntToInt32(v.AuxInt) 8894 sym := auxToSym(v.Aux) 8895 if v_0.Op != OpARM64ADD { 8896 break 8897 } 8898 idx := v_0.Args[1] 8899 ptr := v_0.Args[0] 8900 val := v_1 8901 mem := v_2 8902 if !(off == 0 && sym == nil) { 8903 break 8904 } 8905 v.reset(OpARM64MOVBstoreidx) 8906 v.AddArg4(ptr, idx, val, mem) 8907 return true 8908 } 8909 // match: (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 8910 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 8911 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 8912 for { 8913 off1 := auxIntToInt32(v.AuxInt) 8914 sym1 := auxToSym(v.Aux) 8915 if v_0.Op != OpARM64MOVDaddr { 8916 break 8917 } 8918 off2 := auxIntToInt32(v_0.AuxInt) 8919 sym2 := auxToSym(v_0.Aux) 8920 ptr := v_0.Args[0] 8921 val := v_1 8922 mem := v_2 8923 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 8924 break 8925 } 8926 v.reset(OpARM64MOVBstore) 8927 v.AuxInt = int32ToAuxInt(off1 + off2) 8928 v.Aux = symToAux(mergeSym(sym1, sym2)) 8929 v.AddArg3(ptr, val, mem) 8930 return true 8931 } 8932 // match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem) 8933 // result: (MOVBstorezero [off] {sym} ptr mem) 8934 for { 8935 off := auxIntToInt32(v.AuxInt) 8936 sym := auxToSym(v.Aux) 8937 ptr := v_0 8938 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 { 8939 break 8940 } 8941 mem := v_2 8942 v.reset(OpARM64MOVBstorezero) 8943 v.AuxInt = int32ToAuxInt(off) 8944 v.Aux = symToAux(sym) 8945 v.AddArg2(ptr, mem) 8946 return true 8947 } 8948 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem) 8949 // result: (MOVBstore [off] {sym} ptr x mem) 8950 for { 8951 off := auxIntToInt32(v.AuxInt) 8952 sym := auxToSym(v.Aux) 8953 ptr := v_0 8954 if v_1.Op != OpARM64MOVBreg { 8955 break 8956 } 8957 x := v_1.Args[0] 8958 mem := v_2 8959 v.reset(OpARM64MOVBstore) 8960 v.AuxInt = int32ToAuxInt(off) 8961 v.Aux = symToAux(sym) 8962 v.AddArg3(ptr, x, mem) 8963 return true 8964 } 8965 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem) 8966 // result: (MOVBstore [off] {sym} ptr x mem) 8967 for { 8968 off := auxIntToInt32(v.AuxInt) 8969 sym := auxToSym(v.Aux) 8970 ptr := v_0 8971 if v_1.Op != OpARM64MOVBUreg { 8972 break 8973 } 8974 x := v_1.Args[0] 8975 mem := v_2 8976 v.reset(OpARM64MOVBstore) 8977 v.AuxInt = int32ToAuxInt(off) 8978 v.Aux = symToAux(sym) 8979 v.AddArg3(ptr, x, mem) 8980 return true 8981 } 8982 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem) 8983 // result: (MOVBstore [off] {sym} ptr x mem) 8984 for { 8985 off := auxIntToInt32(v.AuxInt) 8986 sym := auxToSym(v.Aux) 8987 ptr := v_0 8988 if v_1.Op != OpARM64MOVHreg { 8989 break 8990 } 8991 x := v_1.Args[0] 8992 mem := v_2 8993 v.reset(OpARM64MOVBstore) 8994 v.AuxInt = int32ToAuxInt(off) 8995 v.Aux = symToAux(sym) 8996 v.AddArg3(ptr, x, mem) 8997 return true 8998 } 8999 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem) 9000 // result: (MOVBstore [off] {sym} ptr x mem) 9001 for { 9002 off := auxIntToInt32(v.AuxInt) 9003 sym := auxToSym(v.Aux) 9004 ptr := v_0 9005 if v_1.Op != OpARM64MOVHUreg { 9006 break 9007 } 9008 x := v_1.Args[0] 9009 mem := v_2 9010 v.reset(OpARM64MOVBstore) 9011 v.AuxInt = int32ToAuxInt(off) 9012 v.Aux = symToAux(sym) 9013 v.AddArg3(ptr, x, mem) 9014 return true 9015 } 9016 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem) 9017 // result: (MOVBstore [off] {sym} ptr x mem) 9018 for { 9019 off := auxIntToInt32(v.AuxInt) 9020 sym := auxToSym(v.Aux) 9021 ptr := v_0 9022 if v_1.Op != OpARM64MOVWreg { 9023 break 9024 } 9025 x := v_1.Args[0] 9026 mem := v_2 9027 v.reset(OpARM64MOVBstore) 9028 v.AuxInt = int32ToAuxInt(off) 9029 v.Aux = symToAux(sym) 9030 v.AddArg3(ptr, x, mem) 9031 return true 9032 } 9033 // match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem) 9034 // result: (MOVBstore [off] {sym} ptr x mem) 9035 for { 9036 off := auxIntToInt32(v.AuxInt) 9037 sym := auxToSym(v.Aux) 9038 ptr := v_0 9039 if v_1.Op != OpARM64MOVWUreg { 9040 break 9041 } 9042 x := v_1.Args[0] 9043 mem := v_2 9044 v.reset(OpARM64MOVBstore) 9045 v.AuxInt = int32ToAuxInt(off) 9046 v.Aux = symToAux(sym) 9047 v.AddArg3(ptr, x, mem) 9048 return true 9049 } 9050 return false 9051 } 9052 func rewriteValueARM64_OpARM64MOVBstoreidx(v *Value) bool { 9053 v_3 := v.Args[3] 9054 v_2 := v.Args[2] 9055 v_1 := v.Args[1] 9056 v_0 := v.Args[0] 9057 // match: (MOVBstoreidx ptr (MOVDconst [c]) val mem) 9058 // cond: is32Bit(c) 9059 // result: (MOVBstore [int32(c)] ptr val mem) 9060 for { 9061 ptr := v_0 9062 if v_1.Op != OpARM64MOVDconst { 9063 break 9064 } 9065 c := auxIntToInt64(v_1.AuxInt) 9066 val := v_2 9067 mem := v_3 9068 if !(is32Bit(c)) { 9069 break 9070 } 9071 v.reset(OpARM64MOVBstore) 9072 v.AuxInt = int32ToAuxInt(int32(c)) 9073 v.AddArg3(ptr, val, mem) 9074 return true 9075 } 9076 // match: (MOVBstoreidx (MOVDconst [c]) idx val mem) 9077 // cond: is32Bit(c) 9078 // result: (MOVBstore [int32(c)] idx val mem) 9079 for { 9080 if v_0.Op != OpARM64MOVDconst { 9081 break 9082 } 9083 c := auxIntToInt64(v_0.AuxInt) 9084 idx := v_1 9085 val := v_2 9086 mem := v_3 9087 if !(is32Bit(c)) { 9088 break 9089 } 9090 v.reset(OpARM64MOVBstore) 9091 v.AuxInt = int32ToAuxInt(int32(c)) 9092 v.AddArg3(idx, val, mem) 9093 return true 9094 } 9095 // match: (MOVBstoreidx ptr idx (MOVDconst [0]) mem) 9096 // result: (MOVBstorezeroidx ptr idx mem) 9097 for { 9098 ptr := v_0 9099 idx := v_1 9100 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 { 9101 break 9102 } 9103 mem := v_3 9104 v.reset(OpARM64MOVBstorezeroidx) 9105 v.AddArg3(ptr, idx, mem) 9106 return true 9107 } 9108 // match: (MOVBstoreidx ptr idx (MOVBreg x) mem) 9109 // result: (MOVBstoreidx ptr idx x mem) 9110 for { 9111 ptr := v_0 9112 idx := v_1 9113 if v_2.Op != OpARM64MOVBreg { 9114 break 9115 } 9116 x := v_2.Args[0] 9117 mem := v_3 9118 v.reset(OpARM64MOVBstoreidx) 9119 v.AddArg4(ptr, idx, x, mem) 9120 return true 9121 } 9122 // match: (MOVBstoreidx ptr idx (MOVBUreg x) mem) 9123 // result: (MOVBstoreidx ptr idx x mem) 9124 for { 9125 ptr := v_0 9126 idx := v_1 9127 if v_2.Op != OpARM64MOVBUreg { 9128 break 9129 } 9130 x := v_2.Args[0] 9131 mem := v_3 9132 v.reset(OpARM64MOVBstoreidx) 9133 v.AddArg4(ptr, idx, x, mem) 9134 return true 9135 } 9136 // match: (MOVBstoreidx ptr idx (MOVHreg x) mem) 9137 // result: (MOVBstoreidx ptr idx x mem) 9138 for { 9139 ptr := v_0 9140 idx := v_1 9141 if v_2.Op != OpARM64MOVHreg { 9142 break 9143 } 9144 x := v_2.Args[0] 9145 mem := v_3 9146 v.reset(OpARM64MOVBstoreidx) 9147 v.AddArg4(ptr, idx, x, mem) 9148 return true 9149 } 9150 // match: (MOVBstoreidx ptr idx (MOVHUreg x) mem) 9151 // result: (MOVBstoreidx ptr idx x mem) 9152 for { 9153 ptr := v_0 9154 idx := v_1 9155 if v_2.Op != OpARM64MOVHUreg { 9156 break 9157 } 9158 x := v_2.Args[0] 9159 mem := v_3 9160 v.reset(OpARM64MOVBstoreidx) 9161 v.AddArg4(ptr, idx, x, mem) 9162 return true 9163 } 9164 // match: (MOVBstoreidx ptr idx (MOVWreg x) mem) 9165 // result: (MOVBstoreidx ptr idx x mem) 9166 for { 9167 ptr := v_0 9168 idx := v_1 9169 if v_2.Op != OpARM64MOVWreg { 9170 break 9171 } 9172 x := v_2.Args[0] 9173 mem := v_3 9174 v.reset(OpARM64MOVBstoreidx) 9175 v.AddArg4(ptr, idx, x, mem) 9176 return true 9177 } 9178 // match: (MOVBstoreidx ptr idx (MOVWUreg x) mem) 9179 // result: (MOVBstoreidx ptr idx x mem) 9180 for { 9181 ptr := v_0 9182 idx := v_1 9183 if v_2.Op != OpARM64MOVWUreg { 9184 break 9185 } 9186 x := v_2.Args[0] 9187 mem := v_3 9188 v.reset(OpARM64MOVBstoreidx) 9189 v.AddArg4(ptr, idx, x, mem) 9190 return true 9191 } 9192 return false 9193 } 9194 func rewriteValueARM64_OpARM64MOVBstorezero(v *Value) bool { 9195 v_1 := v.Args[1] 9196 v_0 := v.Args[0] 9197 b := v.Block 9198 config := b.Func.Config 9199 // match: (MOVBstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 9200 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 9201 // result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem) 9202 for { 9203 off1 := auxIntToInt32(v.AuxInt) 9204 sym := auxToSym(v.Aux) 9205 if v_0.Op != OpARM64ADDconst { 9206 break 9207 } 9208 off2 := auxIntToInt64(v_0.AuxInt) 9209 ptr := v_0.Args[0] 9210 mem := v_1 9211 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 9212 break 9213 } 9214 v.reset(OpARM64MOVBstorezero) 9215 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 9216 v.Aux = symToAux(sym) 9217 v.AddArg2(ptr, mem) 9218 return true 9219 } 9220 // match: (MOVBstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 9221 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 9222 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 9223 for { 9224 off1 := auxIntToInt32(v.AuxInt) 9225 sym1 := auxToSym(v.Aux) 9226 if v_0.Op != OpARM64MOVDaddr { 9227 break 9228 } 9229 off2 := auxIntToInt32(v_0.AuxInt) 9230 sym2 := auxToSym(v_0.Aux) 9231 ptr := v_0.Args[0] 9232 mem := v_1 9233 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 9234 break 9235 } 9236 v.reset(OpARM64MOVBstorezero) 9237 v.AuxInt = int32ToAuxInt(off1 + off2) 9238 v.Aux = symToAux(mergeSym(sym1, sym2)) 9239 v.AddArg2(ptr, mem) 9240 return true 9241 } 9242 // match: (MOVBstorezero [off] {sym} (ADD ptr idx) mem) 9243 // cond: off == 0 && sym == nil 9244 // result: (MOVBstorezeroidx ptr idx mem) 9245 for { 9246 off := auxIntToInt32(v.AuxInt) 9247 sym := auxToSym(v.Aux) 9248 if v_0.Op != OpARM64ADD { 9249 break 9250 } 9251 idx := v_0.Args[1] 9252 ptr := v_0.Args[0] 9253 mem := v_1 9254 if !(off == 0 && sym == nil) { 9255 break 9256 } 9257 v.reset(OpARM64MOVBstorezeroidx) 9258 v.AddArg3(ptr, idx, mem) 9259 return true 9260 } 9261 return false 9262 } 9263 func rewriteValueARM64_OpARM64MOVBstorezeroidx(v *Value) bool { 9264 v_2 := v.Args[2] 9265 v_1 := v.Args[1] 9266 v_0 := v.Args[0] 9267 // match: (MOVBstorezeroidx ptr (MOVDconst [c]) mem) 9268 // cond: is32Bit(c) 9269 // result: (MOVBstorezero [int32(c)] ptr mem) 9270 for { 9271 ptr := v_0 9272 if v_1.Op != OpARM64MOVDconst { 9273 break 9274 } 9275 c := auxIntToInt64(v_1.AuxInt) 9276 mem := v_2 9277 if !(is32Bit(c)) { 9278 break 9279 } 9280 v.reset(OpARM64MOVBstorezero) 9281 v.AuxInt = int32ToAuxInt(int32(c)) 9282 v.AddArg2(ptr, mem) 9283 return true 9284 } 9285 // match: (MOVBstorezeroidx (MOVDconst [c]) idx mem) 9286 // cond: is32Bit(c) 9287 // result: (MOVBstorezero [int32(c)] idx mem) 9288 for { 9289 if v_0.Op != OpARM64MOVDconst { 9290 break 9291 } 9292 c := auxIntToInt64(v_0.AuxInt) 9293 idx := v_1 9294 mem := v_2 9295 if !(is32Bit(c)) { 9296 break 9297 } 9298 v.reset(OpARM64MOVBstorezero) 9299 v.AuxInt = int32ToAuxInt(int32(c)) 9300 v.AddArg2(idx, mem) 9301 return true 9302 } 9303 return false 9304 } 9305 func rewriteValueARM64_OpARM64MOVDload(v *Value) bool { 9306 v_1 := v.Args[1] 9307 v_0 := v.Args[0] 9308 b := v.Block 9309 config := b.Func.Config 9310 // match: (MOVDload [off] {sym} ptr (FMOVDstore [off] {sym} ptr val _)) 9311 // result: (FMOVDfpgp val) 9312 for { 9313 off := auxIntToInt32(v.AuxInt) 9314 sym := auxToSym(v.Aux) 9315 ptr := v_0 9316 if v_1.Op != OpARM64FMOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym { 9317 break 9318 } 9319 val := v_1.Args[1] 9320 if ptr != v_1.Args[0] { 9321 break 9322 } 9323 v.reset(OpARM64FMOVDfpgp) 9324 v.AddArg(val) 9325 return true 9326 } 9327 // match: (MOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 9328 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 9329 // result: (MOVDload [off1+int32(off2)] {sym} ptr mem) 9330 for { 9331 off1 := auxIntToInt32(v.AuxInt) 9332 sym := auxToSym(v.Aux) 9333 if v_0.Op != OpARM64ADDconst { 9334 break 9335 } 9336 off2 := auxIntToInt64(v_0.AuxInt) 9337 ptr := v_0.Args[0] 9338 mem := v_1 9339 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 9340 break 9341 } 9342 v.reset(OpARM64MOVDload) 9343 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 9344 v.Aux = symToAux(sym) 9345 v.AddArg2(ptr, mem) 9346 return true 9347 } 9348 // match: (MOVDload [off] {sym} (ADD ptr idx) mem) 9349 // cond: off == 0 && sym == nil 9350 // result: (MOVDloadidx ptr idx mem) 9351 for { 9352 off := auxIntToInt32(v.AuxInt) 9353 sym := auxToSym(v.Aux) 9354 if v_0.Op != OpARM64ADD { 9355 break 9356 } 9357 idx := v_0.Args[1] 9358 ptr := v_0.Args[0] 9359 mem := v_1 9360 if !(off == 0 && sym == nil) { 9361 break 9362 } 9363 v.reset(OpARM64MOVDloadidx) 9364 v.AddArg3(ptr, idx, mem) 9365 return true 9366 } 9367 // match: (MOVDload [off] {sym} (ADDshiftLL [3] ptr idx) mem) 9368 // cond: off == 0 && sym == nil 9369 // result: (MOVDloadidx8 ptr idx mem) 9370 for { 9371 off := auxIntToInt32(v.AuxInt) 9372 sym := auxToSym(v.Aux) 9373 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 { 9374 break 9375 } 9376 idx := v_0.Args[1] 9377 ptr := v_0.Args[0] 9378 mem := v_1 9379 if !(off == 0 && sym == nil) { 9380 break 9381 } 9382 v.reset(OpARM64MOVDloadidx8) 9383 v.AddArg3(ptr, idx, mem) 9384 return true 9385 } 9386 // match: (MOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 9387 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 9388 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 9389 for { 9390 off1 := auxIntToInt32(v.AuxInt) 9391 sym1 := auxToSym(v.Aux) 9392 if v_0.Op != OpARM64MOVDaddr { 9393 break 9394 } 9395 off2 := auxIntToInt32(v_0.AuxInt) 9396 sym2 := auxToSym(v_0.Aux) 9397 ptr := v_0.Args[0] 9398 mem := v_1 9399 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 9400 break 9401 } 9402 v.reset(OpARM64MOVDload) 9403 v.AuxInt = int32ToAuxInt(off1 + off2) 9404 v.Aux = symToAux(mergeSym(sym1, sym2)) 9405 v.AddArg2(ptr, mem) 9406 return true 9407 } 9408 // match: (MOVDload [off] {sym} ptr (MOVDstorezero [off2] {sym2} ptr2 _)) 9409 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 9410 // result: (MOVDconst [0]) 9411 for { 9412 off := auxIntToInt32(v.AuxInt) 9413 sym := auxToSym(v.Aux) 9414 ptr := v_0 9415 if v_1.Op != OpARM64MOVDstorezero { 9416 break 9417 } 9418 off2 := auxIntToInt32(v_1.AuxInt) 9419 sym2 := auxToSym(v_1.Aux) 9420 ptr2 := v_1.Args[0] 9421 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 9422 break 9423 } 9424 v.reset(OpARM64MOVDconst) 9425 v.AuxInt = int64ToAuxInt(0) 9426 return true 9427 } 9428 // match: (MOVDload [off] {sym} (SB) _) 9429 // cond: symIsRO(sym) 9430 // result: (MOVDconst [int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder))]) 9431 for { 9432 off := auxIntToInt32(v.AuxInt) 9433 sym := auxToSym(v.Aux) 9434 if v_0.Op != OpSB || !(symIsRO(sym)) { 9435 break 9436 } 9437 v.reset(OpARM64MOVDconst) 9438 v.AuxInt = int64ToAuxInt(int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder))) 9439 return true 9440 } 9441 return false 9442 } 9443 func rewriteValueARM64_OpARM64MOVDloadidx(v *Value) bool { 9444 v_2 := v.Args[2] 9445 v_1 := v.Args[1] 9446 v_0 := v.Args[0] 9447 // match: (MOVDloadidx ptr (MOVDconst [c]) mem) 9448 // cond: is32Bit(c) 9449 // result: (MOVDload [int32(c)] ptr mem) 9450 for { 9451 ptr := v_0 9452 if v_1.Op != OpARM64MOVDconst { 9453 break 9454 } 9455 c := auxIntToInt64(v_1.AuxInt) 9456 mem := v_2 9457 if !(is32Bit(c)) { 9458 break 9459 } 9460 v.reset(OpARM64MOVDload) 9461 v.AuxInt = int32ToAuxInt(int32(c)) 9462 v.AddArg2(ptr, mem) 9463 return true 9464 } 9465 // match: (MOVDloadidx (MOVDconst [c]) ptr mem) 9466 // cond: is32Bit(c) 9467 // result: (MOVDload [int32(c)] ptr mem) 9468 for { 9469 if v_0.Op != OpARM64MOVDconst { 9470 break 9471 } 9472 c := auxIntToInt64(v_0.AuxInt) 9473 ptr := v_1 9474 mem := v_2 9475 if !(is32Bit(c)) { 9476 break 9477 } 9478 v.reset(OpARM64MOVDload) 9479 v.AuxInt = int32ToAuxInt(int32(c)) 9480 v.AddArg2(ptr, mem) 9481 return true 9482 } 9483 // match: (MOVDloadidx ptr (SLLconst [3] idx) mem) 9484 // result: (MOVDloadidx8 ptr idx mem) 9485 for { 9486 ptr := v_0 9487 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 { 9488 break 9489 } 9490 idx := v_1.Args[0] 9491 mem := v_2 9492 v.reset(OpARM64MOVDloadidx8) 9493 v.AddArg3(ptr, idx, mem) 9494 return true 9495 } 9496 // match: (MOVDloadidx (SLLconst [3] idx) ptr mem) 9497 // result: (MOVDloadidx8 ptr idx mem) 9498 for { 9499 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 { 9500 break 9501 } 9502 idx := v_0.Args[0] 9503 ptr := v_1 9504 mem := v_2 9505 v.reset(OpARM64MOVDloadidx8) 9506 v.AddArg3(ptr, idx, mem) 9507 return true 9508 } 9509 // match: (MOVDloadidx ptr idx (MOVDstorezeroidx ptr2 idx2 _)) 9510 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 9511 // result: (MOVDconst [0]) 9512 for { 9513 ptr := v_0 9514 idx := v_1 9515 if v_2.Op != OpARM64MOVDstorezeroidx { 9516 break 9517 } 9518 idx2 := v_2.Args[1] 9519 ptr2 := v_2.Args[0] 9520 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 9521 break 9522 } 9523 v.reset(OpARM64MOVDconst) 9524 v.AuxInt = int64ToAuxInt(0) 9525 return true 9526 } 9527 return false 9528 } 9529 func rewriteValueARM64_OpARM64MOVDloadidx8(v *Value) bool { 9530 v_2 := v.Args[2] 9531 v_1 := v.Args[1] 9532 v_0 := v.Args[0] 9533 // match: (MOVDloadidx8 ptr (MOVDconst [c]) mem) 9534 // cond: is32Bit(c<<3) 9535 // result: (MOVDload [int32(c)<<3] ptr mem) 9536 for { 9537 ptr := v_0 9538 if v_1.Op != OpARM64MOVDconst { 9539 break 9540 } 9541 c := auxIntToInt64(v_1.AuxInt) 9542 mem := v_2 9543 if !(is32Bit(c << 3)) { 9544 break 9545 } 9546 v.reset(OpARM64MOVDload) 9547 v.AuxInt = int32ToAuxInt(int32(c) << 3) 9548 v.AddArg2(ptr, mem) 9549 return true 9550 } 9551 // match: (MOVDloadidx8 ptr idx (MOVDstorezeroidx8 ptr2 idx2 _)) 9552 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 9553 // result: (MOVDconst [0]) 9554 for { 9555 ptr := v_0 9556 idx := v_1 9557 if v_2.Op != OpARM64MOVDstorezeroidx8 { 9558 break 9559 } 9560 idx2 := v_2.Args[1] 9561 ptr2 := v_2.Args[0] 9562 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 9563 break 9564 } 9565 v.reset(OpARM64MOVDconst) 9566 v.AuxInt = int64ToAuxInt(0) 9567 return true 9568 } 9569 return false 9570 } 9571 func rewriteValueARM64_OpARM64MOVDnop(v *Value) bool { 9572 v_0 := v.Args[0] 9573 // match: (MOVDnop (MOVDconst [c])) 9574 // result: (MOVDconst [c]) 9575 for { 9576 if v_0.Op != OpARM64MOVDconst { 9577 break 9578 } 9579 c := auxIntToInt64(v_0.AuxInt) 9580 v.reset(OpARM64MOVDconst) 9581 v.AuxInt = int64ToAuxInt(c) 9582 return true 9583 } 9584 return false 9585 } 9586 func rewriteValueARM64_OpARM64MOVDreg(v *Value) bool { 9587 v_0 := v.Args[0] 9588 // match: (MOVDreg x) 9589 // cond: x.Uses == 1 9590 // result: (MOVDnop x) 9591 for { 9592 x := v_0 9593 if !(x.Uses == 1) { 9594 break 9595 } 9596 v.reset(OpARM64MOVDnop) 9597 v.AddArg(x) 9598 return true 9599 } 9600 // match: (MOVDreg (MOVDconst [c])) 9601 // result: (MOVDconst [c]) 9602 for { 9603 if v_0.Op != OpARM64MOVDconst { 9604 break 9605 } 9606 c := auxIntToInt64(v_0.AuxInt) 9607 v.reset(OpARM64MOVDconst) 9608 v.AuxInt = int64ToAuxInt(c) 9609 return true 9610 } 9611 return false 9612 } 9613 func rewriteValueARM64_OpARM64MOVDstore(v *Value) bool { 9614 v_2 := v.Args[2] 9615 v_1 := v.Args[1] 9616 v_0 := v.Args[0] 9617 b := v.Block 9618 config := b.Func.Config 9619 // match: (MOVDstore [off] {sym} ptr (FMOVDfpgp val) mem) 9620 // result: (FMOVDstore [off] {sym} ptr val mem) 9621 for { 9622 off := auxIntToInt32(v.AuxInt) 9623 sym := auxToSym(v.Aux) 9624 ptr := v_0 9625 if v_1.Op != OpARM64FMOVDfpgp { 9626 break 9627 } 9628 val := v_1.Args[0] 9629 mem := v_2 9630 v.reset(OpARM64FMOVDstore) 9631 v.AuxInt = int32ToAuxInt(off) 9632 v.Aux = symToAux(sym) 9633 v.AddArg3(ptr, val, mem) 9634 return true 9635 } 9636 // match: (MOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 9637 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 9638 // result: (MOVDstore [off1+int32(off2)] {sym} ptr val mem) 9639 for { 9640 off1 := auxIntToInt32(v.AuxInt) 9641 sym := auxToSym(v.Aux) 9642 if v_0.Op != OpARM64ADDconst { 9643 break 9644 } 9645 off2 := auxIntToInt64(v_0.AuxInt) 9646 ptr := v_0.Args[0] 9647 val := v_1 9648 mem := v_2 9649 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 9650 break 9651 } 9652 v.reset(OpARM64MOVDstore) 9653 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 9654 v.Aux = symToAux(sym) 9655 v.AddArg3(ptr, val, mem) 9656 return true 9657 } 9658 // match: (MOVDstore [off] {sym} (ADD ptr idx) val mem) 9659 // cond: off == 0 && sym == nil 9660 // result: (MOVDstoreidx ptr idx val mem) 9661 for { 9662 off := auxIntToInt32(v.AuxInt) 9663 sym := auxToSym(v.Aux) 9664 if v_0.Op != OpARM64ADD { 9665 break 9666 } 9667 idx := v_0.Args[1] 9668 ptr := v_0.Args[0] 9669 val := v_1 9670 mem := v_2 9671 if !(off == 0 && sym == nil) { 9672 break 9673 } 9674 v.reset(OpARM64MOVDstoreidx) 9675 v.AddArg4(ptr, idx, val, mem) 9676 return true 9677 } 9678 // match: (MOVDstore [off] {sym} (ADDshiftLL [3] ptr idx) val mem) 9679 // cond: off == 0 && sym == nil 9680 // result: (MOVDstoreidx8 ptr idx val mem) 9681 for { 9682 off := auxIntToInt32(v.AuxInt) 9683 sym := auxToSym(v.Aux) 9684 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 { 9685 break 9686 } 9687 idx := v_0.Args[1] 9688 ptr := v_0.Args[0] 9689 val := v_1 9690 mem := v_2 9691 if !(off == 0 && sym == nil) { 9692 break 9693 } 9694 v.reset(OpARM64MOVDstoreidx8) 9695 v.AddArg4(ptr, idx, val, mem) 9696 return true 9697 } 9698 // match: (MOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 9699 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 9700 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 9701 for { 9702 off1 := auxIntToInt32(v.AuxInt) 9703 sym1 := auxToSym(v.Aux) 9704 if v_0.Op != OpARM64MOVDaddr { 9705 break 9706 } 9707 off2 := auxIntToInt32(v_0.AuxInt) 9708 sym2 := auxToSym(v_0.Aux) 9709 ptr := v_0.Args[0] 9710 val := v_1 9711 mem := v_2 9712 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 9713 break 9714 } 9715 v.reset(OpARM64MOVDstore) 9716 v.AuxInt = int32ToAuxInt(off1 + off2) 9717 v.Aux = symToAux(mergeSym(sym1, sym2)) 9718 v.AddArg3(ptr, val, mem) 9719 return true 9720 } 9721 // match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem) 9722 // result: (MOVDstorezero [off] {sym} ptr mem) 9723 for { 9724 off := auxIntToInt32(v.AuxInt) 9725 sym := auxToSym(v.Aux) 9726 ptr := v_0 9727 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 { 9728 break 9729 } 9730 mem := v_2 9731 v.reset(OpARM64MOVDstorezero) 9732 v.AuxInt = int32ToAuxInt(off) 9733 v.Aux = symToAux(sym) 9734 v.AddArg2(ptr, mem) 9735 return true 9736 } 9737 return false 9738 } 9739 func rewriteValueARM64_OpARM64MOVDstoreidx(v *Value) bool { 9740 v_3 := v.Args[3] 9741 v_2 := v.Args[2] 9742 v_1 := v.Args[1] 9743 v_0 := v.Args[0] 9744 // match: (MOVDstoreidx ptr (MOVDconst [c]) val mem) 9745 // cond: is32Bit(c) 9746 // result: (MOVDstore [int32(c)] ptr val mem) 9747 for { 9748 ptr := v_0 9749 if v_1.Op != OpARM64MOVDconst { 9750 break 9751 } 9752 c := auxIntToInt64(v_1.AuxInt) 9753 val := v_2 9754 mem := v_3 9755 if !(is32Bit(c)) { 9756 break 9757 } 9758 v.reset(OpARM64MOVDstore) 9759 v.AuxInt = int32ToAuxInt(int32(c)) 9760 v.AddArg3(ptr, val, mem) 9761 return true 9762 } 9763 // match: (MOVDstoreidx (MOVDconst [c]) idx val mem) 9764 // cond: is32Bit(c) 9765 // result: (MOVDstore [int32(c)] idx val mem) 9766 for { 9767 if v_0.Op != OpARM64MOVDconst { 9768 break 9769 } 9770 c := auxIntToInt64(v_0.AuxInt) 9771 idx := v_1 9772 val := v_2 9773 mem := v_3 9774 if !(is32Bit(c)) { 9775 break 9776 } 9777 v.reset(OpARM64MOVDstore) 9778 v.AuxInt = int32ToAuxInt(int32(c)) 9779 v.AddArg3(idx, val, mem) 9780 return true 9781 } 9782 // match: (MOVDstoreidx ptr (SLLconst [3] idx) val mem) 9783 // result: (MOVDstoreidx8 ptr idx val mem) 9784 for { 9785 ptr := v_0 9786 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 { 9787 break 9788 } 9789 idx := v_1.Args[0] 9790 val := v_2 9791 mem := v_3 9792 v.reset(OpARM64MOVDstoreidx8) 9793 v.AddArg4(ptr, idx, val, mem) 9794 return true 9795 } 9796 // match: (MOVDstoreidx (SLLconst [3] idx) ptr val mem) 9797 // result: (MOVDstoreidx8 ptr idx val mem) 9798 for { 9799 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 { 9800 break 9801 } 9802 idx := v_0.Args[0] 9803 ptr := v_1 9804 val := v_2 9805 mem := v_3 9806 v.reset(OpARM64MOVDstoreidx8) 9807 v.AddArg4(ptr, idx, val, mem) 9808 return true 9809 } 9810 // match: (MOVDstoreidx ptr idx (MOVDconst [0]) mem) 9811 // result: (MOVDstorezeroidx ptr idx mem) 9812 for { 9813 ptr := v_0 9814 idx := v_1 9815 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 { 9816 break 9817 } 9818 mem := v_3 9819 v.reset(OpARM64MOVDstorezeroidx) 9820 v.AddArg3(ptr, idx, mem) 9821 return true 9822 } 9823 return false 9824 } 9825 func rewriteValueARM64_OpARM64MOVDstoreidx8(v *Value) bool { 9826 v_3 := v.Args[3] 9827 v_2 := v.Args[2] 9828 v_1 := v.Args[1] 9829 v_0 := v.Args[0] 9830 // match: (MOVDstoreidx8 ptr (MOVDconst [c]) val mem) 9831 // cond: is32Bit(c<<3) 9832 // result: (MOVDstore [int32(c)<<3] ptr val mem) 9833 for { 9834 ptr := v_0 9835 if v_1.Op != OpARM64MOVDconst { 9836 break 9837 } 9838 c := auxIntToInt64(v_1.AuxInt) 9839 val := v_2 9840 mem := v_3 9841 if !(is32Bit(c << 3)) { 9842 break 9843 } 9844 v.reset(OpARM64MOVDstore) 9845 v.AuxInt = int32ToAuxInt(int32(c) << 3) 9846 v.AddArg3(ptr, val, mem) 9847 return true 9848 } 9849 // match: (MOVDstoreidx8 ptr idx (MOVDconst [0]) mem) 9850 // result: (MOVDstorezeroidx8 ptr idx mem) 9851 for { 9852 ptr := v_0 9853 idx := v_1 9854 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 { 9855 break 9856 } 9857 mem := v_3 9858 v.reset(OpARM64MOVDstorezeroidx8) 9859 v.AddArg3(ptr, idx, mem) 9860 return true 9861 } 9862 return false 9863 } 9864 func rewriteValueARM64_OpARM64MOVDstorezero(v *Value) bool { 9865 v_1 := v.Args[1] 9866 v_0 := v.Args[0] 9867 b := v.Block 9868 config := b.Func.Config 9869 // match: (MOVDstorezero {s} [i] ptr x:(MOVDstorezero {s} [i+8] ptr mem)) 9870 // cond: x.Uses == 1 && setPos(v, x.Pos) && clobber(x) 9871 // result: (MOVQstorezero {s} [i] ptr mem) 9872 for { 9873 i := auxIntToInt32(v.AuxInt) 9874 s := auxToSym(v.Aux) 9875 ptr := v_0 9876 x := v_1 9877 if x.Op != OpARM64MOVDstorezero || auxIntToInt32(x.AuxInt) != i+8 || auxToSym(x.Aux) != s { 9878 break 9879 } 9880 mem := x.Args[1] 9881 if ptr != x.Args[0] || !(x.Uses == 1 && setPos(v, x.Pos) && clobber(x)) { 9882 break 9883 } 9884 v.reset(OpARM64MOVQstorezero) 9885 v.AuxInt = int32ToAuxInt(i) 9886 v.Aux = symToAux(s) 9887 v.AddArg2(ptr, mem) 9888 return true 9889 } 9890 // match: (MOVDstorezero {s} [i] ptr x:(MOVDstorezero {s} [i-8] ptr mem)) 9891 // cond: x.Uses == 1 && setPos(v, x.Pos) && clobber(x) 9892 // result: (MOVQstorezero {s} [i-8] ptr mem) 9893 for { 9894 i := auxIntToInt32(v.AuxInt) 9895 s := auxToSym(v.Aux) 9896 ptr := v_0 9897 x := v_1 9898 if x.Op != OpARM64MOVDstorezero || auxIntToInt32(x.AuxInt) != i-8 || auxToSym(x.Aux) != s { 9899 break 9900 } 9901 mem := x.Args[1] 9902 if ptr != x.Args[0] || !(x.Uses == 1 && setPos(v, x.Pos) && clobber(x)) { 9903 break 9904 } 9905 v.reset(OpARM64MOVQstorezero) 9906 v.AuxInt = int32ToAuxInt(i - 8) 9907 v.Aux = symToAux(s) 9908 v.AddArg2(ptr, mem) 9909 return true 9910 } 9911 // match: (MOVDstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 9912 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 9913 // result: (MOVDstorezero [off1+int32(off2)] {sym} ptr mem) 9914 for { 9915 off1 := auxIntToInt32(v.AuxInt) 9916 sym := auxToSym(v.Aux) 9917 if v_0.Op != OpARM64ADDconst { 9918 break 9919 } 9920 off2 := auxIntToInt64(v_0.AuxInt) 9921 ptr := v_0.Args[0] 9922 mem := v_1 9923 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 9924 break 9925 } 9926 v.reset(OpARM64MOVDstorezero) 9927 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 9928 v.Aux = symToAux(sym) 9929 v.AddArg2(ptr, mem) 9930 return true 9931 } 9932 // match: (MOVDstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 9933 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 9934 // result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 9935 for { 9936 off1 := auxIntToInt32(v.AuxInt) 9937 sym1 := auxToSym(v.Aux) 9938 if v_0.Op != OpARM64MOVDaddr { 9939 break 9940 } 9941 off2 := auxIntToInt32(v_0.AuxInt) 9942 sym2 := auxToSym(v_0.Aux) 9943 ptr := v_0.Args[0] 9944 mem := v_1 9945 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 9946 break 9947 } 9948 v.reset(OpARM64MOVDstorezero) 9949 v.AuxInt = int32ToAuxInt(off1 + off2) 9950 v.Aux = symToAux(mergeSym(sym1, sym2)) 9951 v.AddArg2(ptr, mem) 9952 return true 9953 } 9954 // match: (MOVDstorezero [off] {sym} (ADD ptr idx) mem) 9955 // cond: off == 0 && sym == nil 9956 // result: (MOVDstorezeroidx ptr idx mem) 9957 for { 9958 off := auxIntToInt32(v.AuxInt) 9959 sym := auxToSym(v.Aux) 9960 if v_0.Op != OpARM64ADD { 9961 break 9962 } 9963 idx := v_0.Args[1] 9964 ptr := v_0.Args[0] 9965 mem := v_1 9966 if !(off == 0 && sym == nil) { 9967 break 9968 } 9969 v.reset(OpARM64MOVDstorezeroidx) 9970 v.AddArg3(ptr, idx, mem) 9971 return true 9972 } 9973 // match: (MOVDstorezero [off] {sym} (ADDshiftLL [3] ptr idx) mem) 9974 // cond: off == 0 && sym == nil 9975 // result: (MOVDstorezeroidx8 ptr idx mem) 9976 for { 9977 off := auxIntToInt32(v.AuxInt) 9978 sym := auxToSym(v.Aux) 9979 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 { 9980 break 9981 } 9982 idx := v_0.Args[1] 9983 ptr := v_0.Args[0] 9984 mem := v_1 9985 if !(off == 0 && sym == nil) { 9986 break 9987 } 9988 v.reset(OpARM64MOVDstorezeroidx8) 9989 v.AddArg3(ptr, idx, mem) 9990 return true 9991 } 9992 return false 9993 } 9994 func rewriteValueARM64_OpARM64MOVDstorezeroidx(v *Value) bool { 9995 v_2 := v.Args[2] 9996 v_1 := v.Args[1] 9997 v_0 := v.Args[0] 9998 // match: (MOVDstorezeroidx ptr (MOVDconst [c]) mem) 9999 // cond: is32Bit(c) 10000 // result: (MOVDstorezero [int32(c)] ptr mem) 10001 for { 10002 ptr := v_0 10003 if v_1.Op != OpARM64MOVDconst { 10004 break 10005 } 10006 c := auxIntToInt64(v_1.AuxInt) 10007 mem := v_2 10008 if !(is32Bit(c)) { 10009 break 10010 } 10011 v.reset(OpARM64MOVDstorezero) 10012 v.AuxInt = int32ToAuxInt(int32(c)) 10013 v.AddArg2(ptr, mem) 10014 return true 10015 } 10016 // match: (MOVDstorezeroidx (MOVDconst [c]) idx mem) 10017 // cond: is32Bit(c) 10018 // result: (MOVDstorezero [int32(c)] idx mem) 10019 for { 10020 if v_0.Op != OpARM64MOVDconst { 10021 break 10022 } 10023 c := auxIntToInt64(v_0.AuxInt) 10024 idx := v_1 10025 mem := v_2 10026 if !(is32Bit(c)) { 10027 break 10028 } 10029 v.reset(OpARM64MOVDstorezero) 10030 v.AuxInt = int32ToAuxInt(int32(c)) 10031 v.AddArg2(idx, mem) 10032 return true 10033 } 10034 // match: (MOVDstorezeroidx ptr (SLLconst [3] idx) mem) 10035 // result: (MOVDstorezeroidx8 ptr idx mem) 10036 for { 10037 ptr := v_0 10038 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 { 10039 break 10040 } 10041 idx := v_1.Args[0] 10042 mem := v_2 10043 v.reset(OpARM64MOVDstorezeroidx8) 10044 v.AddArg3(ptr, idx, mem) 10045 return true 10046 } 10047 // match: (MOVDstorezeroidx (SLLconst [3] idx) ptr mem) 10048 // result: (MOVDstorezeroidx8 ptr idx mem) 10049 for { 10050 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 { 10051 break 10052 } 10053 idx := v_0.Args[0] 10054 ptr := v_1 10055 mem := v_2 10056 v.reset(OpARM64MOVDstorezeroidx8) 10057 v.AddArg3(ptr, idx, mem) 10058 return true 10059 } 10060 return false 10061 } 10062 func rewriteValueARM64_OpARM64MOVDstorezeroidx8(v *Value) bool { 10063 v_2 := v.Args[2] 10064 v_1 := v.Args[1] 10065 v_0 := v.Args[0] 10066 // match: (MOVDstorezeroidx8 ptr (MOVDconst [c]) mem) 10067 // cond: is32Bit(c<<3) 10068 // result: (MOVDstorezero [int32(c<<3)] ptr mem) 10069 for { 10070 ptr := v_0 10071 if v_1.Op != OpARM64MOVDconst { 10072 break 10073 } 10074 c := auxIntToInt64(v_1.AuxInt) 10075 mem := v_2 10076 if !(is32Bit(c << 3)) { 10077 break 10078 } 10079 v.reset(OpARM64MOVDstorezero) 10080 v.AuxInt = int32ToAuxInt(int32(c << 3)) 10081 v.AddArg2(ptr, mem) 10082 return true 10083 } 10084 return false 10085 } 10086 func rewriteValueARM64_OpARM64MOVHUload(v *Value) bool { 10087 v_1 := v.Args[1] 10088 v_0 := v.Args[0] 10089 b := v.Block 10090 config := b.Func.Config 10091 // match: (MOVHUload [off1] {sym} (ADDconst [off2] ptr) mem) 10092 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 10093 // result: (MOVHUload [off1+int32(off2)] {sym} ptr mem) 10094 for { 10095 off1 := auxIntToInt32(v.AuxInt) 10096 sym := auxToSym(v.Aux) 10097 if v_0.Op != OpARM64ADDconst { 10098 break 10099 } 10100 off2 := auxIntToInt64(v_0.AuxInt) 10101 ptr := v_0.Args[0] 10102 mem := v_1 10103 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 10104 break 10105 } 10106 v.reset(OpARM64MOVHUload) 10107 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 10108 v.Aux = symToAux(sym) 10109 v.AddArg2(ptr, mem) 10110 return true 10111 } 10112 // match: (MOVHUload [off] {sym} (ADD ptr idx) mem) 10113 // cond: off == 0 && sym == nil 10114 // result: (MOVHUloadidx ptr idx mem) 10115 for { 10116 off := auxIntToInt32(v.AuxInt) 10117 sym := auxToSym(v.Aux) 10118 if v_0.Op != OpARM64ADD { 10119 break 10120 } 10121 idx := v_0.Args[1] 10122 ptr := v_0.Args[0] 10123 mem := v_1 10124 if !(off == 0 && sym == nil) { 10125 break 10126 } 10127 v.reset(OpARM64MOVHUloadidx) 10128 v.AddArg3(ptr, idx, mem) 10129 return true 10130 } 10131 // match: (MOVHUload [off] {sym} (ADDshiftLL [1] ptr idx) mem) 10132 // cond: off == 0 && sym == nil 10133 // result: (MOVHUloadidx2 ptr idx mem) 10134 for { 10135 off := auxIntToInt32(v.AuxInt) 10136 sym := auxToSym(v.Aux) 10137 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 { 10138 break 10139 } 10140 idx := v_0.Args[1] 10141 ptr := v_0.Args[0] 10142 mem := v_1 10143 if !(off == 0 && sym == nil) { 10144 break 10145 } 10146 v.reset(OpARM64MOVHUloadidx2) 10147 v.AddArg3(ptr, idx, mem) 10148 return true 10149 } 10150 // match: (MOVHUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 10151 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 10152 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 10153 for { 10154 off1 := auxIntToInt32(v.AuxInt) 10155 sym1 := auxToSym(v.Aux) 10156 if v_0.Op != OpARM64MOVDaddr { 10157 break 10158 } 10159 off2 := auxIntToInt32(v_0.AuxInt) 10160 sym2 := auxToSym(v_0.Aux) 10161 ptr := v_0.Args[0] 10162 mem := v_1 10163 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 10164 break 10165 } 10166 v.reset(OpARM64MOVHUload) 10167 v.AuxInt = int32ToAuxInt(off1 + off2) 10168 v.Aux = symToAux(mergeSym(sym1, sym2)) 10169 v.AddArg2(ptr, mem) 10170 return true 10171 } 10172 // match: (MOVHUload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 10173 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 10174 // result: (MOVDconst [0]) 10175 for { 10176 off := auxIntToInt32(v.AuxInt) 10177 sym := auxToSym(v.Aux) 10178 ptr := v_0 10179 if v_1.Op != OpARM64MOVHstorezero { 10180 break 10181 } 10182 off2 := auxIntToInt32(v_1.AuxInt) 10183 sym2 := auxToSym(v_1.Aux) 10184 ptr2 := v_1.Args[0] 10185 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 10186 break 10187 } 10188 v.reset(OpARM64MOVDconst) 10189 v.AuxInt = int64ToAuxInt(0) 10190 return true 10191 } 10192 // match: (MOVHUload [off] {sym} (SB) _) 10193 // cond: symIsRO(sym) 10194 // result: (MOVDconst [int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))]) 10195 for { 10196 off := auxIntToInt32(v.AuxInt) 10197 sym := auxToSym(v.Aux) 10198 if v_0.Op != OpSB || !(symIsRO(sym)) { 10199 break 10200 } 10201 v.reset(OpARM64MOVDconst) 10202 v.AuxInt = int64ToAuxInt(int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))) 10203 return true 10204 } 10205 return false 10206 } 10207 func rewriteValueARM64_OpARM64MOVHUloadidx(v *Value) bool { 10208 v_2 := v.Args[2] 10209 v_1 := v.Args[1] 10210 v_0 := v.Args[0] 10211 // match: (MOVHUloadidx ptr (MOVDconst [c]) mem) 10212 // cond: is32Bit(c) 10213 // result: (MOVHUload [int32(c)] ptr mem) 10214 for { 10215 ptr := v_0 10216 if v_1.Op != OpARM64MOVDconst { 10217 break 10218 } 10219 c := auxIntToInt64(v_1.AuxInt) 10220 mem := v_2 10221 if !(is32Bit(c)) { 10222 break 10223 } 10224 v.reset(OpARM64MOVHUload) 10225 v.AuxInt = int32ToAuxInt(int32(c)) 10226 v.AddArg2(ptr, mem) 10227 return true 10228 } 10229 // match: (MOVHUloadidx (MOVDconst [c]) ptr mem) 10230 // cond: is32Bit(c) 10231 // result: (MOVHUload [int32(c)] ptr mem) 10232 for { 10233 if v_0.Op != OpARM64MOVDconst { 10234 break 10235 } 10236 c := auxIntToInt64(v_0.AuxInt) 10237 ptr := v_1 10238 mem := v_2 10239 if !(is32Bit(c)) { 10240 break 10241 } 10242 v.reset(OpARM64MOVHUload) 10243 v.AuxInt = int32ToAuxInt(int32(c)) 10244 v.AddArg2(ptr, mem) 10245 return true 10246 } 10247 // match: (MOVHUloadidx ptr (SLLconst [1] idx) mem) 10248 // result: (MOVHUloadidx2 ptr idx mem) 10249 for { 10250 ptr := v_0 10251 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 1 { 10252 break 10253 } 10254 idx := v_1.Args[0] 10255 mem := v_2 10256 v.reset(OpARM64MOVHUloadidx2) 10257 v.AddArg3(ptr, idx, mem) 10258 return true 10259 } 10260 // match: (MOVHUloadidx ptr (ADD idx idx) mem) 10261 // result: (MOVHUloadidx2 ptr idx mem) 10262 for { 10263 ptr := v_0 10264 if v_1.Op != OpARM64ADD { 10265 break 10266 } 10267 idx := v_1.Args[1] 10268 if idx != v_1.Args[0] { 10269 break 10270 } 10271 mem := v_2 10272 v.reset(OpARM64MOVHUloadidx2) 10273 v.AddArg3(ptr, idx, mem) 10274 return true 10275 } 10276 // match: (MOVHUloadidx (ADD idx idx) ptr mem) 10277 // result: (MOVHUloadidx2 ptr idx mem) 10278 for { 10279 if v_0.Op != OpARM64ADD { 10280 break 10281 } 10282 idx := v_0.Args[1] 10283 if idx != v_0.Args[0] { 10284 break 10285 } 10286 ptr := v_1 10287 mem := v_2 10288 v.reset(OpARM64MOVHUloadidx2) 10289 v.AddArg3(ptr, idx, mem) 10290 return true 10291 } 10292 // match: (MOVHUloadidx ptr idx (MOVHstorezeroidx ptr2 idx2 _)) 10293 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 10294 // result: (MOVDconst [0]) 10295 for { 10296 ptr := v_0 10297 idx := v_1 10298 if v_2.Op != OpARM64MOVHstorezeroidx { 10299 break 10300 } 10301 idx2 := v_2.Args[1] 10302 ptr2 := v_2.Args[0] 10303 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 10304 break 10305 } 10306 v.reset(OpARM64MOVDconst) 10307 v.AuxInt = int64ToAuxInt(0) 10308 return true 10309 } 10310 return false 10311 } 10312 func rewriteValueARM64_OpARM64MOVHUloadidx2(v *Value) bool { 10313 v_2 := v.Args[2] 10314 v_1 := v.Args[1] 10315 v_0 := v.Args[0] 10316 // match: (MOVHUloadidx2 ptr (MOVDconst [c]) mem) 10317 // cond: is32Bit(c<<1) 10318 // result: (MOVHUload [int32(c)<<1] ptr mem) 10319 for { 10320 ptr := v_0 10321 if v_1.Op != OpARM64MOVDconst { 10322 break 10323 } 10324 c := auxIntToInt64(v_1.AuxInt) 10325 mem := v_2 10326 if !(is32Bit(c << 1)) { 10327 break 10328 } 10329 v.reset(OpARM64MOVHUload) 10330 v.AuxInt = int32ToAuxInt(int32(c) << 1) 10331 v.AddArg2(ptr, mem) 10332 return true 10333 } 10334 // match: (MOVHUloadidx2 ptr idx (MOVHstorezeroidx2 ptr2 idx2 _)) 10335 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 10336 // result: (MOVDconst [0]) 10337 for { 10338 ptr := v_0 10339 idx := v_1 10340 if v_2.Op != OpARM64MOVHstorezeroidx2 { 10341 break 10342 } 10343 idx2 := v_2.Args[1] 10344 ptr2 := v_2.Args[0] 10345 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 10346 break 10347 } 10348 v.reset(OpARM64MOVDconst) 10349 v.AuxInt = int64ToAuxInt(0) 10350 return true 10351 } 10352 return false 10353 } 10354 func rewriteValueARM64_OpARM64MOVHUreg(v *Value) bool { 10355 v_0 := v.Args[0] 10356 // match: (MOVHUreg x:(MOVBUload _ _)) 10357 // result: (MOVDreg x) 10358 for { 10359 x := v_0 10360 if x.Op != OpARM64MOVBUload { 10361 break 10362 } 10363 v.reset(OpARM64MOVDreg) 10364 v.AddArg(x) 10365 return true 10366 } 10367 // match: (MOVHUreg x:(MOVHUload _ _)) 10368 // result: (MOVDreg x) 10369 for { 10370 x := v_0 10371 if x.Op != OpARM64MOVHUload { 10372 break 10373 } 10374 v.reset(OpARM64MOVDreg) 10375 v.AddArg(x) 10376 return true 10377 } 10378 // match: (MOVHUreg x:(MOVBUloadidx _ _ _)) 10379 // result: (MOVDreg x) 10380 for { 10381 x := v_0 10382 if x.Op != OpARM64MOVBUloadidx { 10383 break 10384 } 10385 v.reset(OpARM64MOVDreg) 10386 v.AddArg(x) 10387 return true 10388 } 10389 // match: (MOVHUreg x:(MOVHUloadidx _ _ _)) 10390 // result: (MOVDreg x) 10391 for { 10392 x := v_0 10393 if x.Op != OpARM64MOVHUloadidx { 10394 break 10395 } 10396 v.reset(OpARM64MOVDreg) 10397 v.AddArg(x) 10398 return true 10399 } 10400 // match: (MOVHUreg x:(MOVHUloadidx2 _ _ _)) 10401 // result: (MOVDreg x) 10402 for { 10403 x := v_0 10404 if x.Op != OpARM64MOVHUloadidx2 { 10405 break 10406 } 10407 v.reset(OpARM64MOVDreg) 10408 v.AddArg(x) 10409 return true 10410 } 10411 // match: (MOVHUreg x:(MOVBUreg _)) 10412 // result: (MOVDreg x) 10413 for { 10414 x := v_0 10415 if x.Op != OpARM64MOVBUreg { 10416 break 10417 } 10418 v.reset(OpARM64MOVDreg) 10419 v.AddArg(x) 10420 return true 10421 } 10422 // match: (MOVHUreg x:(MOVHUreg _)) 10423 // result: (MOVDreg x) 10424 for { 10425 x := v_0 10426 if x.Op != OpARM64MOVHUreg { 10427 break 10428 } 10429 v.reset(OpARM64MOVDreg) 10430 v.AddArg(x) 10431 return true 10432 } 10433 // match: (MOVHUreg (ANDconst [c] x)) 10434 // result: (ANDconst [c&(1<<16-1)] x) 10435 for { 10436 if v_0.Op != OpARM64ANDconst { 10437 break 10438 } 10439 c := auxIntToInt64(v_0.AuxInt) 10440 x := v_0.Args[0] 10441 v.reset(OpARM64ANDconst) 10442 v.AuxInt = int64ToAuxInt(c & (1<<16 - 1)) 10443 v.AddArg(x) 10444 return true 10445 } 10446 // match: (MOVHUreg (MOVDconst [c])) 10447 // result: (MOVDconst [int64(uint16(c))]) 10448 for { 10449 if v_0.Op != OpARM64MOVDconst { 10450 break 10451 } 10452 c := auxIntToInt64(v_0.AuxInt) 10453 v.reset(OpARM64MOVDconst) 10454 v.AuxInt = int64ToAuxInt(int64(uint16(c))) 10455 return true 10456 } 10457 // match: (MOVHUreg x) 10458 // cond: v.Type.Size() <= 2 10459 // result: x 10460 for { 10461 x := v_0 10462 if !(v.Type.Size() <= 2) { 10463 break 10464 } 10465 v.copyOf(x) 10466 return true 10467 } 10468 // match: (MOVHUreg (SLLconst [lc] x)) 10469 // cond: lc >= 16 10470 // result: (MOVDconst [0]) 10471 for { 10472 if v_0.Op != OpARM64SLLconst { 10473 break 10474 } 10475 lc := auxIntToInt64(v_0.AuxInt) 10476 if !(lc >= 16) { 10477 break 10478 } 10479 v.reset(OpARM64MOVDconst) 10480 v.AuxInt = int64ToAuxInt(0) 10481 return true 10482 } 10483 // match: (MOVHUreg (SLLconst [lc] x)) 10484 // cond: lc < 16 10485 // result: (UBFIZ [armBFAuxInt(lc, 16-lc)] x) 10486 for { 10487 if v_0.Op != OpARM64SLLconst { 10488 break 10489 } 10490 lc := auxIntToInt64(v_0.AuxInt) 10491 x := v_0.Args[0] 10492 if !(lc < 16) { 10493 break 10494 } 10495 v.reset(OpARM64UBFIZ) 10496 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 16-lc)) 10497 v.AddArg(x) 10498 return true 10499 } 10500 // match: (MOVHUreg (SRLconst [rc] x)) 10501 // cond: rc < 16 10502 // result: (UBFX [armBFAuxInt(rc, 16)] x) 10503 for { 10504 if v_0.Op != OpARM64SRLconst { 10505 break 10506 } 10507 rc := auxIntToInt64(v_0.AuxInt) 10508 x := v_0.Args[0] 10509 if !(rc < 16) { 10510 break 10511 } 10512 v.reset(OpARM64UBFX) 10513 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 16)) 10514 v.AddArg(x) 10515 return true 10516 } 10517 // match: (MOVHUreg (UBFX [bfc] x)) 10518 // cond: bfc.getARM64BFwidth() <= 16 10519 // result: (UBFX [bfc] x) 10520 for { 10521 if v_0.Op != OpARM64UBFX { 10522 break 10523 } 10524 bfc := auxIntToArm64BitField(v_0.AuxInt) 10525 x := v_0.Args[0] 10526 if !(bfc.getARM64BFwidth() <= 16) { 10527 break 10528 } 10529 v.reset(OpARM64UBFX) 10530 v.AuxInt = arm64BitFieldToAuxInt(bfc) 10531 v.AddArg(x) 10532 return true 10533 } 10534 return false 10535 } 10536 func rewriteValueARM64_OpARM64MOVHload(v *Value) bool { 10537 v_1 := v.Args[1] 10538 v_0 := v.Args[0] 10539 b := v.Block 10540 config := b.Func.Config 10541 // match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem) 10542 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 10543 // result: (MOVHload [off1+int32(off2)] {sym} ptr mem) 10544 for { 10545 off1 := auxIntToInt32(v.AuxInt) 10546 sym := auxToSym(v.Aux) 10547 if v_0.Op != OpARM64ADDconst { 10548 break 10549 } 10550 off2 := auxIntToInt64(v_0.AuxInt) 10551 ptr := v_0.Args[0] 10552 mem := v_1 10553 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 10554 break 10555 } 10556 v.reset(OpARM64MOVHload) 10557 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 10558 v.Aux = symToAux(sym) 10559 v.AddArg2(ptr, mem) 10560 return true 10561 } 10562 // match: (MOVHload [off] {sym} (ADD ptr idx) mem) 10563 // cond: off == 0 && sym == nil 10564 // result: (MOVHloadidx ptr idx mem) 10565 for { 10566 off := auxIntToInt32(v.AuxInt) 10567 sym := auxToSym(v.Aux) 10568 if v_0.Op != OpARM64ADD { 10569 break 10570 } 10571 idx := v_0.Args[1] 10572 ptr := v_0.Args[0] 10573 mem := v_1 10574 if !(off == 0 && sym == nil) { 10575 break 10576 } 10577 v.reset(OpARM64MOVHloadidx) 10578 v.AddArg3(ptr, idx, mem) 10579 return true 10580 } 10581 // match: (MOVHload [off] {sym} (ADDshiftLL [1] ptr idx) mem) 10582 // cond: off == 0 && sym == nil 10583 // result: (MOVHloadidx2 ptr idx mem) 10584 for { 10585 off := auxIntToInt32(v.AuxInt) 10586 sym := auxToSym(v.Aux) 10587 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 { 10588 break 10589 } 10590 idx := v_0.Args[1] 10591 ptr := v_0.Args[0] 10592 mem := v_1 10593 if !(off == 0 && sym == nil) { 10594 break 10595 } 10596 v.reset(OpARM64MOVHloadidx2) 10597 v.AddArg3(ptr, idx, mem) 10598 return true 10599 } 10600 // match: (MOVHload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 10601 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 10602 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 10603 for { 10604 off1 := auxIntToInt32(v.AuxInt) 10605 sym1 := auxToSym(v.Aux) 10606 if v_0.Op != OpARM64MOVDaddr { 10607 break 10608 } 10609 off2 := auxIntToInt32(v_0.AuxInt) 10610 sym2 := auxToSym(v_0.Aux) 10611 ptr := v_0.Args[0] 10612 mem := v_1 10613 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 10614 break 10615 } 10616 v.reset(OpARM64MOVHload) 10617 v.AuxInt = int32ToAuxInt(off1 + off2) 10618 v.Aux = symToAux(mergeSym(sym1, sym2)) 10619 v.AddArg2(ptr, mem) 10620 return true 10621 } 10622 // match: (MOVHload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 10623 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 10624 // result: (MOVDconst [0]) 10625 for { 10626 off := auxIntToInt32(v.AuxInt) 10627 sym := auxToSym(v.Aux) 10628 ptr := v_0 10629 if v_1.Op != OpARM64MOVHstorezero { 10630 break 10631 } 10632 off2 := auxIntToInt32(v_1.AuxInt) 10633 sym2 := auxToSym(v_1.Aux) 10634 ptr2 := v_1.Args[0] 10635 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 10636 break 10637 } 10638 v.reset(OpARM64MOVDconst) 10639 v.AuxInt = int64ToAuxInt(0) 10640 return true 10641 } 10642 return false 10643 } 10644 func rewriteValueARM64_OpARM64MOVHloadidx(v *Value) bool { 10645 v_2 := v.Args[2] 10646 v_1 := v.Args[1] 10647 v_0 := v.Args[0] 10648 // match: (MOVHloadidx ptr (MOVDconst [c]) mem) 10649 // cond: is32Bit(c) 10650 // result: (MOVHload [int32(c)] ptr mem) 10651 for { 10652 ptr := v_0 10653 if v_1.Op != OpARM64MOVDconst { 10654 break 10655 } 10656 c := auxIntToInt64(v_1.AuxInt) 10657 mem := v_2 10658 if !(is32Bit(c)) { 10659 break 10660 } 10661 v.reset(OpARM64MOVHload) 10662 v.AuxInt = int32ToAuxInt(int32(c)) 10663 v.AddArg2(ptr, mem) 10664 return true 10665 } 10666 // match: (MOVHloadidx (MOVDconst [c]) ptr mem) 10667 // cond: is32Bit(c) 10668 // result: (MOVHload [int32(c)] ptr mem) 10669 for { 10670 if v_0.Op != OpARM64MOVDconst { 10671 break 10672 } 10673 c := auxIntToInt64(v_0.AuxInt) 10674 ptr := v_1 10675 mem := v_2 10676 if !(is32Bit(c)) { 10677 break 10678 } 10679 v.reset(OpARM64MOVHload) 10680 v.AuxInt = int32ToAuxInt(int32(c)) 10681 v.AddArg2(ptr, mem) 10682 return true 10683 } 10684 // match: (MOVHloadidx ptr (SLLconst [1] idx) mem) 10685 // result: (MOVHloadidx2 ptr idx mem) 10686 for { 10687 ptr := v_0 10688 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 1 { 10689 break 10690 } 10691 idx := v_1.Args[0] 10692 mem := v_2 10693 v.reset(OpARM64MOVHloadidx2) 10694 v.AddArg3(ptr, idx, mem) 10695 return true 10696 } 10697 // match: (MOVHloadidx ptr (ADD idx idx) mem) 10698 // result: (MOVHloadidx2 ptr idx mem) 10699 for { 10700 ptr := v_0 10701 if v_1.Op != OpARM64ADD { 10702 break 10703 } 10704 idx := v_1.Args[1] 10705 if idx != v_1.Args[0] { 10706 break 10707 } 10708 mem := v_2 10709 v.reset(OpARM64MOVHloadidx2) 10710 v.AddArg3(ptr, idx, mem) 10711 return true 10712 } 10713 // match: (MOVHloadidx (ADD idx idx) ptr mem) 10714 // result: (MOVHloadidx2 ptr idx mem) 10715 for { 10716 if v_0.Op != OpARM64ADD { 10717 break 10718 } 10719 idx := v_0.Args[1] 10720 if idx != v_0.Args[0] { 10721 break 10722 } 10723 ptr := v_1 10724 mem := v_2 10725 v.reset(OpARM64MOVHloadidx2) 10726 v.AddArg3(ptr, idx, mem) 10727 return true 10728 } 10729 // match: (MOVHloadidx ptr idx (MOVHstorezeroidx ptr2 idx2 _)) 10730 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 10731 // result: (MOVDconst [0]) 10732 for { 10733 ptr := v_0 10734 idx := v_1 10735 if v_2.Op != OpARM64MOVHstorezeroidx { 10736 break 10737 } 10738 idx2 := v_2.Args[1] 10739 ptr2 := v_2.Args[0] 10740 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 10741 break 10742 } 10743 v.reset(OpARM64MOVDconst) 10744 v.AuxInt = int64ToAuxInt(0) 10745 return true 10746 } 10747 return false 10748 } 10749 func rewriteValueARM64_OpARM64MOVHloadidx2(v *Value) bool { 10750 v_2 := v.Args[2] 10751 v_1 := v.Args[1] 10752 v_0 := v.Args[0] 10753 // match: (MOVHloadidx2 ptr (MOVDconst [c]) mem) 10754 // cond: is32Bit(c<<1) 10755 // result: (MOVHload [int32(c)<<1] ptr mem) 10756 for { 10757 ptr := v_0 10758 if v_1.Op != OpARM64MOVDconst { 10759 break 10760 } 10761 c := auxIntToInt64(v_1.AuxInt) 10762 mem := v_2 10763 if !(is32Bit(c << 1)) { 10764 break 10765 } 10766 v.reset(OpARM64MOVHload) 10767 v.AuxInt = int32ToAuxInt(int32(c) << 1) 10768 v.AddArg2(ptr, mem) 10769 return true 10770 } 10771 // match: (MOVHloadidx2 ptr idx (MOVHstorezeroidx2 ptr2 idx2 _)) 10772 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 10773 // result: (MOVDconst [0]) 10774 for { 10775 ptr := v_0 10776 idx := v_1 10777 if v_2.Op != OpARM64MOVHstorezeroidx2 { 10778 break 10779 } 10780 idx2 := v_2.Args[1] 10781 ptr2 := v_2.Args[0] 10782 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 10783 break 10784 } 10785 v.reset(OpARM64MOVDconst) 10786 v.AuxInt = int64ToAuxInt(0) 10787 return true 10788 } 10789 return false 10790 } 10791 func rewriteValueARM64_OpARM64MOVHreg(v *Value) bool { 10792 v_0 := v.Args[0] 10793 // match: (MOVHreg x:(MOVBload _ _)) 10794 // result: (MOVDreg x) 10795 for { 10796 x := v_0 10797 if x.Op != OpARM64MOVBload { 10798 break 10799 } 10800 v.reset(OpARM64MOVDreg) 10801 v.AddArg(x) 10802 return true 10803 } 10804 // match: (MOVHreg x:(MOVBUload _ _)) 10805 // result: (MOVDreg x) 10806 for { 10807 x := v_0 10808 if x.Op != OpARM64MOVBUload { 10809 break 10810 } 10811 v.reset(OpARM64MOVDreg) 10812 v.AddArg(x) 10813 return true 10814 } 10815 // match: (MOVHreg x:(MOVHload _ _)) 10816 // result: (MOVDreg x) 10817 for { 10818 x := v_0 10819 if x.Op != OpARM64MOVHload { 10820 break 10821 } 10822 v.reset(OpARM64MOVDreg) 10823 v.AddArg(x) 10824 return true 10825 } 10826 // match: (MOVHreg x:(MOVBloadidx _ _ _)) 10827 // result: (MOVDreg x) 10828 for { 10829 x := v_0 10830 if x.Op != OpARM64MOVBloadidx { 10831 break 10832 } 10833 v.reset(OpARM64MOVDreg) 10834 v.AddArg(x) 10835 return true 10836 } 10837 // match: (MOVHreg x:(MOVBUloadidx _ _ _)) 10838 // result: (MOVDreg x) 10839 for { 10840 x := v_0 10841 if x.Op != OpARM64MOVBUloadidx { 10842 break 10843 } 10844 v.reset(OpARM64MOVDreg) 10845 v.AddArg(x) 10846 return true 10847 } 10848 // match: (MOVHreg x:(MOVHloadidx _ _ _)) 10849 // result: (MOVDreg x) 10850 for { 10851 x := v_0 10852 if x.Op != OpARM64MOVHloadidx { 10853 break 10854 } 10855 v.reset(OpARM64MOVDreg) 10856 v.AddArg(x) 10857 return true 10858 } 10859 // match: (MOVHreg x:(MOVHloadidx2 _ _ _)) 10860 // result: (MOVDreg x) 10861 for { 10862 x := v_0 10863 if x.Op != OpARM64MOVHloadidx2 { 10864 break 10865 } 10866 v.reset(OpARM64MOVDreg) 10867 v.AddArg(x) 10868 return true 10869 } 10870 // match: (MOVHreg x:(MOVBreg _)) 10871 // result: (MOVDreg x) 10872 for { 10873 x := v_0 10874 if x.Op != OpARM64MOVBreg { 10875 break 10876 } 10877 v.reset(OpARM64MOVDreg) 10878 v.AddArg(x) 10879 return true 10880 } 10881 // match: (MOVHreg x:(MOVBUreg _)) 10882 // result: (MOVDreg x) 10883 for { 10884 x := v_0 10885 if x.Op != OpARM64MOVBUreg { 10886 break 10887 } 10888 v.reset(OpARM64MOVDreg) 10889 v.AddArg(x) 10890 return true 10891 } 10892 // match: (MOVHreg x:(MOVHreg _)) 10893 // result: (MOVDreg x) 10894 for { 10895 x := v_0 10896 if x.Op != OpARM64MOVHreg { 10897 break 10898 } 10899 v.reset(OpARM64MOVDreg) 10900 v.AddArg(x) 10901 return true 10902 } 10903 // match: (MOVHreg (MOVDconst [c])) 10904 // result: (MOVDconst [int64(int16(c))]) 10905 for { 10906 if v_0.Op != OpARM64MOVDconst { 10907 break 10908 } 10909 c := auxIntToInt64(v_0.AuxInt) 10910 v.reset(OpARM64MOVDconst) 10911 v.AuxInt = int64ToAuxInt(int64(int16(c))) 10912 return true 10913 } 10914 // match: (MOVHreg x) 10915 // cond: v.Type.Size() <= 2 10916 // result: x 10917 for { 10918 x := v_0 10919 if !(v.Type.Size() <= 2) { 10920 break 10921 } 10922 v.copyOf(x) 10923 return true 10924 } 10925 // match: (MOVHreg <t> (ANDconst x [c])) 10926 // cond: uint64(c) & uint64(0xffffffffffff8000) == 0 10927 // result: (ANDconst <t> x [c]) 10928 for { 10929 t := v.Type 10930 if v_0.Op != OpARM64ANDconst { 10931 break 10932 } 10933 c := auxIntToInt64(v_0.AuxInt) 10934 x := v_0.Args[0] 10935 if !(uint64(c)&uint64(0xffffffffffff8000) == 0) { 10936 break 10937 } 10938 v.reset(OpARM64ANDconst) 10939 v.Type = t 10940 v.AuxInt = int64ToAuxInt(c) 10941 v.AddArg(x) 10942 return true 10943 } 10944 // match: (MOVHreg (SLLconst [lc] x)) 10945 // cond: lc < 16 10946 // result: (SBFIZ [armBFAuxInt(lc, 16-lc)] x) 10947 for { 10948 if v_0.Op != OpARM64SLLconst { 10949 break 10950 } 10951 lc := auxIntToInt64(v_0.AuxInt) 10952 x := v_0.Args[0] 10953 if !(lc < 16) { 10954 break 10955 } 10956 v.reset(OpARM64SBFIZ) 10957 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 16-lc)) 10958 v.AddArg(x) 10959 return true 10960 } 10961 // match: (MOVHreg (SBFX [bfc] x)) 10962 // cond: bfc.getARM64BFwidth() <= 16 10963 // result: (SBFX [bfc] x) 10964 for { 10965 if v_0.Op != OpARM64SBFX { 10966 break 10967 } 10968 bfc := auxIntToArm64BitField(v_0.AuxInt) 10969 x := v_0.Args[0] 10970 if !(bfc.getARM64BFwidth() <= 16) { 10971 break 10972 } 10973 v.reset(OpARM64SBFX) 10974 v.AuxInt = arm64BitFieldToAuxInt(bfc) 10975 v.AddArg(x) 10976 return true 10977 } 10978 return false 10979 } 10980 func rewriteValueARM64_OpARM64MOVHstore(v *Value) bool { 10981 v_2 := v.Args[2] 10982 v_1 := v.Args[1] 10983 v_0 := v.Args[0] 10984 b := v.Block 10985 config := b.Func.Config 10986 // match: (MOVHstore [off1] {sym} (ADDconst [off2] ptr) val mem) 10987 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 10988 // result: (MOVHstore [off1+int32(off2)] {sym} ptr val mem) 10989 for { 10990 off1 := auxIntToInt32(v.AuxInt) 10991 sym := auxToSym(v.Aux) 10992 if v_0.Op != OpARM64ADDconst { 10993 break 10994 } 10995 off2 := auxIntToInt64(v_0.AuxInt) 10996 ptr := v_0.Args[0] 10997 val := v_1 10998 mem := v_2 10999 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 11000 break 11001 } 11002 v.reset(OpARM64MOVHstore) 11003 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 11004 v.Aux = symToAux(sym) 11005 v.AddArg3(ptr, val, mem) 11006 return true 11007 } 11008 // match: (MOVHstore [off] {sym} (ADD ptr idx) val mem) 11009 // cond: off == 0 && sym == nil 11010 // result: (MOVHstoreidx ptr idx val mem) 11011 for { 11012 off := auxIntToInt32(v.AuxInt) 11013 sym := auxToSym(v.Aux) 11014 if v_0.Op != OpARM64ADD { 11015 break 11016 } 11017 idx := v_0.Args[1] 11018 ptr := v_0.Args[0] 11019 val := v_1 11020 mem := v_2 11021 if !(off == 0 && sym == nil) { 11022 break 11023 } 11024 v.reset(OpARM64MOVHstoreidx) 11025 v.AddArg4(ptr, idx, val, mem) 11026 return true 11027 } 11028 // match: (MOVHstore [off] {sym} (ADDshiftLL [1] ptr idx) val mem) 11029 // cond: off == 0 && sym == nil 11030 // result: (MOVHstoreidx2 ptr idx val mem) 11031 for { 11032 off := auxIntToInt32(v.AuxInt) 11033 sym := auxToSym(v.Aux) 11034 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 { 11035 break 11036 } 11037 idx := v_0.Args[1] 11038 ptr := v_0.Args[0] 11039 val := v_1 11040 mem := v_2 11041 if !(off == 0 && sym == nil) { 11042 break 11043 } 11044 v.reset(OpARM64MOVHstoreidx2) 11045 v.AddArg4(ptr, idx, val, mem) 11046 return true 11047 } 11048 // match: (MOVHstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 11049 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 11050 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 11051 for { 11052 off1 := auxIntToInt32(v.AuxInt) 11053 sym1 := auxToSym(v.Aux) 11054 if v_0.Op != OpARM64MOVDaddr { 11055 break 11056 } 11057 off2 := auxIntToInt32(v_0.AuxInt) 11058 sym2 := auxToSym(v_0.Aux) 11059 ptr := v_0.Args[0] 11060 val := v_1 11061 mem := v_2 11062 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 11063 break 11064 } 11065 v.reset(OpARM64MOVHstore) 11066 v.AuxInt = int32ToAuxInt(off1 + off2) 11067 v.Aux = symToAux(mergeSym(sym1, sym2)) 11068 v.AddArg3(ptr, val, mem) 11069 return true 11070 } 11071 // match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem) 11072 // result: (MOVHstorezero [off] {sym} ptr mem) 11073 for { 11074 off := auxIntToInt32(v.AuxInt) 11075 sym := auxToSym(v.Aux) 11076 ptr := v_0 11077 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 { 11078 break 11079 } 11080 mem := v_2 11081 v.reset(OpARM64MOVHstorezero) 11082 v.AuxInt = int32ToAuxInt(off) 11083 v.Aux = symToAux(sym) 11084 v.AddArg2(ptr, mem) 11085 return true 11086 } 11087 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem) 11088 // result: (MOVHstore [off] {sym} ptr x mem) 11089 for { 11090 off := auxIntToInt32(v.AuxInt) 11091 sym := auxToSym(v.Aux) 11092 ptr := v_0 11093 if v_1.Op != OpARM64MOVHreg { 11094 break 11095 } 11096 x := v_1.Args[0] 11097 mem := v_2 11098 v.reset(OpARM64MOVHstore) 11099 v.AuxInt = int32ToAuxInt(off) 11100 v.Aux = symToAux(sym) 11101 v.AddArg3(ptr, x, mem) 11102 return true 11103 } 11104 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem) 11105 // result: (MOVHstore [off] {sym} ptr x mem) 11106 for { 11107 off := auxIntToInt32(v.AuxInt) 11108 sym := auxToSym(v.Aux) 11109 ptr := v_0 11110 if v_1.Op != OpARM64MOVHUreg { 11111 break 11112 } 11113 x := v_1.Args[0] 11114 mem := v_2 11115 v.reset(OpARM64MOVHstore) 11116 v.AuxInt = int32ToAuxInt(off) 11117 v.Aux = symToAux(sym) 11118 v.AddArg3(ptr, x, mem) 11119 return true 11120 } 11121 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem) 11122 // result: (MOVHstore [off] {sym} ptr x mem) 11123 for { 11124 off := auxIntToInt32(v.AuxInt) 11125 sym := auxToSym(v.Aux) 11126 ptr := v_0 11127 if v_1.Op != OpARM64MOVWreg { 11128 break 11129 } 11130 x := v_1.Args[0] 11131 mem := v_2 11132 v.reset(OpARM64MOVHstore) 11133 v.AuxInt = int32ToAuxInt(off) 11134 v.Aux = symToAux(sym) 11135 v.AddArg3(ptr, x, mem) 11136 return true 11137 } 11138 // match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem) 11139 // result: (MOVHstore [off] {sym} ptr x mem) 11140 for { 11141 off := auxIntToInt32(v.AuxInt) 11142 sym := auxToSym(v.Aux) 11143 ptr := v_0 11144 if v_1.Op != OpARM64MOVWUreg { 11145 break 11146 } 11147 x := v_1.Args[0] 11148 mem := v_2 11149 v.reset(OpARM64MOVHstore) 11150 v.AuxInt = int32ToAuxInt(off) 11151 v.Aux = symToAux(sym) 11152 v.AddArg3(ptr, x, mem) 11153 return true 11154 } 11155 return false 11156 } 11157 func rewriteValueARM64_OpARM64MOVHstoreidx(v *Value) bool { 11158 v_3 := v.Args[3] 11159 v_2 := v.Args[2] 11160 v_1 := v.Args[1] 11161 v_0 := v.Args[0] 11162 // match: (MOVHstoreidx ptr (MOVDconst [c]) val mem) 11163 // cond: is32Bit(c) 11164 // result: (MOVHstore [int32(c)] ptr val mem) 11165 for { 11166 ptr := v_0 11167 if v_1.Op != OpARM64MOVDconst { 11168 break 11169 } 11170 c := auxIntToInt64(v_1.AuxInt) 11171 val := v_2 11172 mem := v_3 11173 if !(is32Bit(c)) { 11174 break 11175 } 11176 v.reset(OpARM64MOVHstore) 11177 v.AuxInt = int32ToAuxInt(int32(c)) 11178 v.AddArg3(ptr, val, mem) 11179 return true 11180 } 11181 // match: (MOVHstoreidx (MOVDconst [c]) idx val mem) 11182 // cond: is32Bit(c) 11183 // result: (MOVHstore [int32(c)] idx val mem) 11184 for { 11185 if v_0.Op != OpARM64MOVDconst { 11186 break 11187 } 11188 c := auxIntToInt64(v_0.AuxInt) 11189 idx := v_1 11190 val := v_2 11191 mem := v_3 11192 if !(is32Bit(c)) { 11193 break 11194 } 11195 v.reset(OpARM64MOVHstore) 11196 v.AuxInt = int32ToAuxInt(int32(c)) 11197 v.AddArg3(idx, val, mem) 11198 return true 11199 } 11200 // match: (MOVHstoreidx ptr (SLLconst [1] idx) val mem) 11201 // result: (MOVHstoreidx2 ptr idx val mem) 11202 for { 11203 ptr := v_0 11204 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 1 { 11205 break 11206 } 11207 idx := v_1.Args[0] 11208 val := v_2 11209 mem := v_3 11210 v.reset(OpARM64MOVHstoreidx2) 11211 v.AddArg4(ptr, idx, val, mem) 11212 return true 11213 } 11214 // match: (MOVHstoreidx ptr (ADD idx idx) val mem) 11215 // result: (MOVHstoreidx2 ptr idx val mem) 11216 for { 11217 ptr := v_0 11218 if v_1.Op != OpARM64ADD { 11219 break 11220 } 11221 idx := v_1.Args[1] 11222 if idx != v_1.Args[0] { 11223 break 11224 } 11225 val := v_2 11226 mem := v_3 11227 v.reset(OpARM64MOVHstoreidx2) 11228 v.AddArg4(ptr, idx, val, mem) 11229 return true 11230 } 11231 // match: (MOVHstoreidx (SLLconst [1] idx) ptr val mem) 11232 // result: (MOVHstoreidx2 ptr idx val mem) 11233 for { 11234 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 1 { 11235 break 11236 } 11237 idx := v_0.Args[0] 11238 ptr := v_1 11239 val := v_2 11240 mem := v_3 11241 v.reset(OpARM64MOVHstoreidx2) 11242 v.AddArg4(ptr, idx, val, mem) 11243 return true 11244 } 11245 // match: (MOVHstoreidx (ADD idx idx) ptr val mem) 11246 // result: (MOVHstoreidx2 ptr idx val mem) 11247 for { 11248 if v_0.Op != OpARM64ADD { 11249 break 11250 } 11251 idx := v_0.Args[1] 11252 if idx != v_0.Args[0] { 11253 break 11254 } 11255 ptr := v_1 11256 val := v_2 11257 mem := v_3 11258 v.reset(OpARM64MOVHstoreidx2) 11259 v.AddArg4(ptr, idx, val, mem) 11260 return true 11261 } 11262 // match: (MOVHstoreidx ptr idx (MOVDconst [0]) mem) 11263 // result: (MOVHstorezeroidx ptr idx mem) 11264 for { 11265 ptr := v_0 11266 idx := v_1 11267 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 { 11268 break 11269 } 11270 mem := v_3 11271 v.reset(OpARM64MOVHstorezeroidx) 11272 v.AddArg3(ptr, idx, mem) 11273 return true 11274 } 11275 // match: (MOVHstoreidx ptr idx (MOVHreg x) mem) 11276 // result: (MOVHstoreidx ptr idx x mem) 11277 for { 11278 ptr := v_0 11279 idx := v_1 11280 if v_2.Op != OpARM64MOVHreg { 11281 break 11282 } 11283 x := v_2.Args[0] 11284 mem := v_3 11285 v.reset(OpARM64MOVHstoreidx) 11286 v.AddArg4(ptr, idx, x, mem) 11287 return true 11288 } 11289 // match: (MOVHstoreidx ptr idx (MOVHUreg x) mem) 11290 // result: (MOVHstoreidx ptr idx x mem) 11291 for { 11292 ptr := v_0 11293 idx := v_1 11294 if v_2.Op != OpARM64MOVHUreg { 11295 break 11296 } 11297 x := v_2.Args[0] 11298 mem := v_3 11299 v.reset(OpARM64MOVHstoreidx) 11300 v.AddArg4(ptr, idx, x, mem) 11301 return true 11302 } 11303 // match: (MOVHstoreidx ptr idx (MOVWreg x) mem) 11304 // result: (MOVHstoreidx ptr idx x mem) 11305 for { 11306 ptr := v_0 11307 idx := v_1 11308 if v_2.Op != OpARM64MOVWreg { 11309 break 11310 } 11311 x := v_2.Args[0] 11312 mem := v_3 11313 v.reset(OpARM64MOVHstoreidx) 11314 v.AddArg4(ptr, idx, x, mem) 11315 return true 11316 } 11317 // match: (MOVHstoreidx ptr idx (MOVWUreg x) mem) 11318 // result: (MOVHstoreidx ptr idx x mem) 11319 for { 11320 ptr := v_0 11321 idx := v_1 11322 if v_2.Op != OpARM64MOVWUreg { 11323 break 11324 } 11325 x := v_2.Args[0] 11326 mem := v_3 11327 v.reset(OpARM64MOVHstoreidx) 11328 v.AddArg4(ptr, idx, x, mem) 11329 return true 11330 } 11331 return false 11332 } 11333 func rewriteValueARM64_OpARM64MOVHstoreidx2(v *Value) bool { 11334 v_3 := v.Args[3] 11335 v_2 := v.Args[2] 11336 v_1 := v.Args[1] 11337 v_0 := v.Args[0] 11338 // match: (MOVHstoreidx2 ptr (MOVDconst [c]) val mem) 11339 // cond: is32Bit(c<<1) 11340 // result: (MOVHstore [int32(c)<<1] ptr val mem) 11341 for { 11342 ptr := v_0 11343 if v_1.Op != OpARM64MOVDconst { 11344 break 11345 } 11346 c := auxIntToInt64(v_1.AuxInt) 11347 val := v_2 11348 mem := v_3 11349 if !(is32Bit(c << 1)) { 11350 break 11351 } 11352 v.reset(OpARM64MOVHstore) 11353 v.AuxInt = int32ToAuxInt(int32(c) << 1) 11354 v.AddArg3(ptr, val, mem) 11355 return true 11356 } 11357 // match: (MOVHstoreidx2 ptr idx (MOVDconst [0]) mem) 11358 // result: (MOVHstorezeroidx2 ptr idx mem) 11359 for { 11360 ptr := v_0 11361 idx := v_1 11362 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 { 11363 break 11364 } 11365 mem := v_3 11366 v.reset(OpARM64MOVHstorezeroidx2) 11367 v.AddArg3(ptr, idx, mem) 11368 return true 11369 } 11370 // match: (MOVHstoreidx2 ptr idx (MOVHreg x) mem) 11371 // result: (MOVHstoreidx2 ptr idx x mem) 11372 for { 11373 ptr := v_0 11374 idx := v_1 11375 if v_2.Op != OpARM64MOVHreg { 11376 break 11377 } 11378 x := v_2.Args[0] 11379 mem := v_3 11380 v.reset(OpARM64MOVHstoreidx2) 11381 v.AddArg4(ptr, idx, x, mem) 11382 return true 11383 } 11384 // match: (MOVHstoreidx2 ptr idx (MOVHUreg x) mem) 11385 // result: (MOVHstoreidx2 ptr idx x mem) 11386 for { 11387 ptr := v_0 11388 idx := v_1 11389 if v_2.Op != OpARM64MOVHUreg { 11390 break 11391 } 11392 x := v_2.Args[0] 11393 mem := v_3 11394 v.reset(OpARM64MOVHstoreidx2) 11395 v.AddArg4(ptr, idx, x, mem) 11396 return true 11397 } 11398 // match: (MOVHstoreidx2 ptr idx (MOVWreg x) mem) 11399 // result: (MOVHstoreidx2 ptr idx x mem) 11400 for { 11401 ptr := v_0 11402 idx := v_1 11403 if v_2.Op != OpARM64MOVWreg { 11404 break 11405 } 11406 x := v_2.Args[0] 11407 mem := v_3 11408 v.reset(OpARM64MOVHstoreidx2) 11409 v.AddArg4(ptr, idx, x, mem) 11410 return true 11411 } 11412 // match: (MOVHstoreidx2 ptr idx (MOVWUreg x) mem) 11413 // result: (MOVHstoreidx2 ptr idx x mem) 11414 for { 11415 ptr := v_0 11416 idx := v_1 11417 if v_2.Op != OpARM64MOVWUreg { 11418 break 11419 } 11420 x := v_2.Args[0] 11421 mem := v_3 11422 v.reset(OpARM64MOVHstoreidx2) 11423 v.AddArg4(ptr, idx, x, mem) 11424 return true 11425 } 11426 return false 11427 } 11428 func rewriteValueARM64_OpARM64MOVHstorezero(v *Value) bool { 11429 v_1 := v.Args[1] 11430 v_0 := v.Args[0] 11431 b := v.Block 11432 config := b.Func.Config 11433 // match: (MOVHstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 11434 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 11435 // result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem) 11436 for { 11437 off1 := auxIntToInt32(v.AuxInt) 11438 sym := auxToSym(v.Aux) 11439 if v_0.Op != OpARM64ADDconst { 11440 break 11441 } 11442 off2 := auxIntToInt64(v_0.AuxInt) 11443 ptr := v_0.Args[0] 11444 mem := v_1 11445 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 11446 break 11447 } 11448 v.reset(OpARM64MOVHstorezero) 11449 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 11450 v.Aux = symToAux(sym) 11451 v.AddArg2(ptr, mem) 11452 return true 11453 } 11454 // match: (MOVHstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 11455 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 11456 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 11457 for { 11458 off1 := auxIntToInt32(v.AuxInt) 11459 sym1 := auxToSym(v.Aux) 11460 if v_0.Op != OpARM64MOVDaddr { 11461 break 11462 } 11463 off2 := auxIntToInt32(v_0.AuxInt) 11464 sym2 := auxToSym(v_0.Aux) 11465 ptr := v_0.Args[0] 11466 mem := v_1 11467 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 11468 break 11469 } 11470 v.reset(OpARM64MOVHstorezero) 11471 v.AuxInt = int32ToAuxInt(off1 + off2) 11472 v.Aux = symToAux(mergeSym(sym1, sym2)) 11473 v.AddArg2(ptr, mem) 11474 return true 11475 } 11476 // match: (MOVHstorezero [off] {sym} (ADD ptr idx) mem) 11477 // cond: off == 0 && sym == nil 11478 // result: (MOVHstorezeroidx ptr idx mem) 11479 for { 11480 off := auxIntToInt32(v.AuxInt) 11481 sym := auxToSym(v.Aux) 11482 if v_0.Op != OpARM64ADD { 11483 break 11484 } 11485 idx := v_0.Args[1] 11486 ptr := v_0.Args[0] 11487 mem := v_1 11488 if !(off == 0 && sym == nil) { 11489 break 11490 } 11491 v.reset(OpARM64MOVHstorezeroidx) 11492 v.AddArg3(ptr, idx, mem) 11493 return true 11494 } 11495 // match: (MOVHstorezero [off] {sym} (ADDshiftLL [1] ptr idx) mem) 11496 // cond: off == 0 && sym == nil 11497 // result: (MOVHstorezeroidx2 ptr idx mem) 11498 for { 11499 off := auxIntToInt32(v.AuxInt) 11500 sym := auxToSym(v.Aux) 11501 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 { 11502 break 11503 } 11504 idx := v_0.Args[1] 11505 ptr := v_0.Args[0] 11506 mem := v_1 11507 if !(off == 0 && sym == nil) { 11508 break 11509 } 11510 v.reset(OpARM64MOVHstorezeroidx2) 11511 v.AddArg3(ptr, idx, mem) 11512 return true 11513 } 11514 return false 11515 } 11516 func rewriteValueARM64_OpARM64MOVHstorezeroidx(v *Value) bool { 11517 v_2 := v.Args[2] 11518 v_1 := v.Args[1] 11519 v_0 := v.Args[0] 11520 // match: (MOVHstorezeroidx ptr (MOVDconst [c]) mem) 11521 // cond: is32Bit(c) 11522 // result: (MOVHstorezero [int32(c)] ptr mem) 11523 for { 11524 ptr := v_0 11525 if v_1.Op != OpARM64MOVDconst { 11526 break 11527 } 11528 c := auxIntToInt64(v_1.AuxInt) 11529 mem := v_2 11530 if !(is32Bit(c)) { 11531 break 11532 } 11533 v.reset(OpARM64MOVHstorezero) 11534 v.AuxInt = int32ToAuxInt(int32(c)) 11535 v.AddArg2(ptr, mem) 11536 return true 11537 } 11538 // match: (MOVHstorezeroidx (MOVDconst [c]) idx mem) 11539 // cond: is32Bit(c) 11540 // result: (MOVHstorezero [int32(c)] idx mem) 11541 for { 11542 if v_0.Op != OpARM64MOVDconst { 11543 break 11544 } 11545 c := auxIntToInt64(v_0.AuxInt) 11546 idx := v_1 11547 mem := v_2 11548 if !(is32Bit(c)) { 11549 break 11550 } 11551 v.reset(OpARM64MOVHstorezero) 11552 v.AuxInt = int32ToAuxInt(int32(c)) 11553 v.AddArg2(idx, mem) 11554 return true 11555 } 11556 // match: (MOVHstorezeroidx ptr (SLLconst [1] idx) mem) 11557 // result: (MOVHstorezeroidx2 ptr idx mem) 11558 for { 11559 ptr := v_0 11560 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 1 { 11561 break 11562 } 11563 idx := v_1.Args[0] 11564 mem := v_2 11565 v.reset(OpARM64MOVHstorezeroidx2) 11566 v.AddArg3(ptr, idx, mem) 11567 return true 11568 } 11569 // match: (MOVHstorezeroidx ptr (ADD idx idx) mem) 11570 // result: (MOVHstorezeroidx2 ptr idx mem) 11571 for { 11572 ptr := v_0 11573 if v_1.Op != OpARM64ADD { 11574 break 11575 } 11576 idx := v_1.Args[1] 11577 if idx != v_1.Args[0] { 11578 break 11579 } 11580 mem := v_2 11581 v.reset(OpARM64MOVHstorezeroidx2) 11582 v.AddArg3(ptr, idx, mem) 11583 return true 11584 } 11585 // match: (MOVHstorezeroidx (SLLconst [1] idx) ptr mem) 11586 // result: (MOVHstorezeroidx2 ptr idx mem) 11587 for { 11588 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 1 { 11589 break 11590 } 11591 idx := v_0.Args[0] 11592 ptr := v_1 11593 mem := v_2 11594 v.reset(OpARM64MOVHstorezeroidx2) 11595 v.AddArg3(ptr, idx, mem) 11596 return true 11597 } 11598 // match: (MOVHstorezeroidx (ADD idx idx) ptr mem) 11599 // result: (MOVHstorezeroidx2 ptr idx mem) 11600 for { 11601 if v_0.Op != OpARM64ADD { 11602 break 11603 } 11604 idx := v_0.Args[1] 11605 if idx != v_0.Args[0] { 11606 break 11607 } 11608 ptr := v_1 11609 mem := v_2 11610 v.reset(OpARM64MOVHstorezeroidx2) 11611 v.AddArg3(ptr, idx, mem) 11612 return true 11613 } 11614 return false 11615 } 11616 func rewriteValueARM64_OpARM64MOVHstorezeroidx2(v *Value) bool { 11617 v_2 := v.Args[2] 11618 v_1 := v.Args[1] 11619 v_0 := v.Args[0] 11620 // match: (MOVHstorezeroidx2 ptr (MOVDconst [c]) mem) 11621 // cond: is32Bit(c<<1) 11622 // result: (MOVHstorezero [int32(c<<1)] ptr mem) 11623 for { 11624 ptr := v_0 11625 if v_1.Op != OpARM64MOVDconst { 11626 break 11627 } 11628 c := auxIntToInt64(v_1.AuxInt) 11629 mem := v_2 11630 if !(is32Bit(c << 1)) { 11631 break 11632 } 11633 v.reset(OpARM64MOVHstorezero) 11634 v.AuxInt = int32ToAuxInt(int32(c << 1)) 11635 v.AddArg2(ptr, mem) 11636 return true 11637 } 11638 return false 11639 } 11640 func rewriteValueARM64_OpARM64MOVQstorezero(v *Value) bool { 11641 v_1 := v.Args[1] 11642 v_0 := v.Args[0] 11643 b := v.Block 11644 config := b.Func.Config 11645 // match: (MOVQstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 11646 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 11647 // result: (MOVQstorezero [off1+int32(off2)] {sym} ptr mem) 11648 for { 11649 off1 := auxIntToInt32(v.AuxInt) 11650 sym := auxToSym(v.Aux) 11651 if v_0.Op != OpARM64ADDconst { 11652 break 11653 } 11654 off2 := auxIntToInt64(v_0.AuxInt) 11655 ptr := v_0.Args[0] 11656 mem := v_1 11657 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 11658 break 11659 } 11660 v.reset(OpARM64MOVQstorezero) 11661 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 11662 v.Aux = symToAux(sym) 11663 v.AddArg2(ptr, mem) 11664 return true 11665 } 11666 // match: (MOVQstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 11667 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 11668 // result: (MOVQstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 11669 for { 11670 off1 := auxIntToInt32(v.AuxInt) 11671 sym1 := auxToSym(v.Aux) 11672 if v_0.Op != OpARM64MOVDaddr { 11673 break 11674 } 11675 off2 := auxIntToInt32(v_0.AuxInt) 11676 sym2 := auxToSym(v_0.Aux) 11677 ptr := v_0.Args[0] 11678 mem := v_1 11679 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 11680 break 11681 } 11682 v.reset(OpARM64MOVQstorezero) 11683 v.AuxInt = int32ToAuxInt(off1 + off2) 11684 v.Aux = symToAux(mergeSym(sym1, sym2)) 11685 v.AddArg2(ptr, mem) 11686 return true 11687 } 11688 return false 11689 } 11690 func rewriteValueARM64_OpARM64MOVWUload(v *Value) bool { 11691 v_1 := v.Args[1] 11692 v_0 := v.Args[0] 11693 b := v.Block 11694 config := b.Func.Config 11695 // match: (MOVWUload [off] {sym} ptr (FMOVSstore [off] {sym} ptr val _)) 11696 // result: (FMOVSfpgp val) 11697 for { 11698 off := auxIntToInt32(v.AuxInt) 11699 sym := auxToSym(v.Aux) 11700 ptr := v_0 11701 if v_1.Op != OpARM64FMOVSstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym { 11702 break 11703 } 11704 val := v_1.Args[1] 11705 if ptr != v_1.Args[0] { 11706 break 11707 } 11708 v.reset(OpARM64FMOVSfpgp) 11709 v.AddArg(val) 11710 return true 11711 } 11712 // match: (MOVWUload [off1] {sym} (ADDconst [off2] ptr) mem) 11713 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 11714 // result: (MOVWUload [off1+int32(off2)] {sym} ptr mem) 11715 for { 11716 off1 := auxIntToInt32(v.AuxInt) 11717 sym := auxToSym(v.Aux) 11718 if v_0.Op != OpARM64ADDconst { 11719 break 11720 } 11721 off2 := auxIntToInt64(v_0.AuxInt) 11722 ptr := v_0.Args[0] 11723 mem := v_1 11724 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 11725 break 11726 } 11727 v.reset(OpARM64MOVWUload) 11728 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 11729 v.Aux = symToAux(sym) 11730 v.AddArg2(ptr, mem) 11731 return true 11732 } 11733 // match: (MOVWUload [off] {sym} (ADD ptr idx) mem) 11734 // cond: off == 0 && sym == nil 11735 // result: (MOVWUloadidx ptr idx mem) 11736 for { 11737 off := auxIntToInt32(v.AuxInt) 11738 sym := auxToSym(v.Aux) 11739 if v_0.Op != OpARM64ADD { 11740 break 11741 } 11742 idx := v_0.Args[1] 11743 ptr := v_0.Args[0] 11744 mem := v_1 11745 if !(off == 0 && sym == nil) { 11746 break 11747 } 11748 v.reset(OpARM64MOVWUloadidx) 11749 v.AddArg3(ptr, idx, mem) 11750 return true 11751 } 11752 // match: (MOVWUload [off] {sym} (ADDshiftLL [2] ptr idx) mem) 11753 // cond: off == 0 && sym == nil 11754 // result: (MOVWUloadidx4 ptr idx mem) 11755 for { 11756 off := auxIntToInt32(v.AuxInt) 11757 sym := auxToSym(v.Aux) 11758 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 { 11759 break 11760 } 11761 idx := v_0.Args[1] 11762 ptr := v_0.Args[0] 11763 mem := v_1 11764 if !(off == 0 && sym == nil) { 11765 break 11766 } 11767 v.reset(OpARM64MOVWUloadidx4) 11768 v.AddArg3(ptr, idx, mem) 11769 return true 11770 } 11771 // match: (MOVWUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 11772 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 11773 // result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 11774 for { 11775 off1 := auxIntToInt32(v.AuxInt) 11776 sym1 := auxToSym(v.Aux) 11777 if v_0.Op != OpARM64MOVDaddr { 11778 break 11779 } 11780 off2 := auxIntToInt32(v_0.AuxInt) 11781 sym2 := auxToSym(v_0.Aux) 11782 ptr := v_0.Args[0] 11783 mem := v_1 11784 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 11785 break 11786 } 11787 v.reset(OpARM64MOVWUload) 11788 v.AuxInt = int32ToAuxInt(off1 + off2) 11789 v.Aux = symToAux(mergeSym(sym1, sym2)) 11790 v.AddArg2(ptr, mem) 11791 return true 11792 } 11793 // match: (MOVWUload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 11794 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 11795 // result: (MOVDconst [0]) 11796 for { 11797 off := auxIntToInt32(v.AuxInt) 11798 sym := auxToSym(v.Aux) 11799 ptr := v_0 11800 if v_1.Op != OpARM64MOVWstorezero { 11801 break 11802 } 11803 off2 := auxIntToInt32(v_1.AuxInt) 11804 sym2 := auxToSym(v_1.Aux) 11805 ptr2 := v_1.Args[0] 11806 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 11807 break 11808 } 11809 v.reset(OpARM64MOVDconst) 11810 v.AuxInt = int64ToAuxInt(0) 11811 return true 11812 } 11813 // match: (MOVWUload [off] {sym} (SB) _) 11814 // cond: symIsRO(sym) 11815 // result: (MOVDconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))]) 11816 for { 11817 off := auxIntToInt32(v.AuxInt) 11818 sym := auxToSym(v.Aux) 11819 if v_0.Op != OpSB || !(symIsRO(sym)) { 11820 break 11821 } 11822 v.reset(OpARM64MOVDconst) 11823 v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))) 11824 return true 11825 } 11826 return false 11827 } 11828 func rewriteValueARM64_OpARM64MOVWUloadidx(v *Value) bool { 11829 v_2 := v.Args[2] 11830 v_1 := v.Args[1] 11831 v_0 := v.Args[0] 11832 // match: (MOVWUloadidx ptr (MOVDconst [c]) mem) 11833 // cond: is32Bit(c) 11834 // result: (MOVWUload [int32(c)] ptr mem) 11835 for { 11836 ptr := v_0 11837 if v_1.Op != OpARM64MOVDconst { 11838 break 11839 } 11840 c := auxIntToInt64(v_1.AuxInt) 11841 mem := v_2 11842 if !(is32Bit(c)) { 11843 break 11844 } 11845 v.reset(OpARM64MOVWUload) 11846 v.AuxInt = int32ToAuxInt(int32(c)) 11847 v.AddArg2(ptr, mem) 11848 return true 11849 } 11850 // match: (MOVWUloadidx (MOVDconst [c]) ptr mem) 11851 // cond: is32Bit(c) 11852 // result: (MOVWUload [int32(c)] ptr mem) 11853 for { 11854 if v_0.Op != OpARM64MOVDconst { 11855 break 11856 } 11857 c := auxIntToInt64(v_0.AuxInt) 11858 ptr := v_1 11859 mem := v_2 11860 if !(is32Bit(c)) { 11861 break 11862 } 11863 v.reset(OpARM64MOVWUload) 11864 v.AuxInt = int32ToAuxInt(int32(c)) 11865 v.AddArg2(ptr, mem) 11866 return true 11867 } 11868 // match: (MOVWUloadidx ptr (SLLconst [2] idx) mem) 11869 // result: (MOVWUloadidx4 ptr idx mem) 11870 for { 11871 ptr := v_0 11872 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 { 11873 break 11874 } 11875 idx := v_1.Args[0] 11876 mem := v_2 11877 v.reset(OpARM64MOVWUloadidx4) 11878 v.AddArg3(ptr, idx, mem) 11879 return true 11880 } 11881 // match: (MOVWUloadidx (SLLconst [2] idx) ptr mem) 11882 // result: (MOVWUloadidx4 ptr idx mem) 11883 for { 11884 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 { 11885 break 11886 } 11887 idx := v_0.Args[0] 11888 ptr := v_1 11889 mem := v_2 11890 v.reset(OpARM64MOVWUloadidx4) 11891 v.AddArg3(ptr, idx, mem) 11892 return true 11893 } 11894 // match: (MOVWUloadidx ptr idx (MOVWstorezeroidx ptr2 idx2 _)) 11895 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 11896 // result: (MOVDconst [0]) 11897 for { 11898 ptr := v_0 11899 idx := v_1 11900 if v_2.Op != OpARM64MOVWstorezeroidx { 11901 break 11902 } 11903 idx2 := v_2.Args[1] 11904 ptr2 := v_2.Args[0] 11905 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 11906 break 11907 } 11908 v.reset(OpARM64MOVDconst) 11909 v.AuxInt = int64ToAuxInt(0) 11910 return true 11911 } 11912 return false 11913 } 11914 func rewriteValueARM64_OpARM64MOVWUloadidx4(v *Value) bool { 11915 v_2 := v.Args[2] 11916 v_1 := v.Args[1] 11917 v_0 := v.Args[0] 11918 // match: (MOVWUloadidx4 ptr (MOVDconst [c]) mem) 11919 // cond: is32Bit(c<<2) 11920 // result: (MOVWUload [int32(c)<<2] ptr mem) 11921 for { 11922 ptr := v_0 11923 if v_1.Op != OpARM64MOVDconst { 11924 break 11925 } 11926 c := auxIntToInt64(v_1.AuxInt) 11927 mem := v_2 11928 if !(is32Bit(c << 2)) { 11929 break 11930 } 11931 v.reset(OpARM64MOVWUload) 11932 v.AuxInt = int32ToAuxInt(int32(c) << 2) 11933 v.AddArg2(ptr, mem) 11934 return true 11935 } 11936 // match: (MOVWUloadidx4 ptr idx (MOVWstorezeroidx4 ptr2 idx2 _)) 11937 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 11938 // result: (MOVDconst [0]) 11939 for { 11940 ptr := v_0 11941 idx := v_1 11942 if v_2.Op != OpARM64MOVWstorezeroidx4 { 11943 break 11944 } 11945 idx2 := v_2.Args[1] 11946 ptr2 := v_2.Args[0] 11947 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 11948 break 11949 } 11950 v.reset(OpARM64MOVDconst) 11951 v.AuxInt = int64ToAuxInt(0) 11952 return true 11953 } 11954 return false 11955 } 11956 func rewriteValueARM64_OpARM64MOVWUreg(v *Value) bool { 11957 v_0 := v.Args[0] 11958 // match: (MOVWUreg x:(MOVBUload _ _)) 11959 // result: (MOVDreg x) 11960 for { 11961 x := v_0 11962 if x.Op != OpARM64MOVBUload { 11963 break 11964 } 11965 v.reset(OpARM64MOVDreg) 11966 v.AddArg(x) 11967 return true 11968 } 11969 // match: (MOVWUreg x:(MOVHUload _ _)) 11970 // result: (MOVDreg x) 11971 for { 11972 x := v_0 11973 if x.Op != OpARM64MOVHUload { 11974 break 11975 } 11976 v.reset(OpARM64MOVDreg) 11977 v.AddArg(x) 11978 return true 11979 } 11980 // match: (MOVWUreg x:(MOVWUload _ _)) 11981 // result: (MOVDreg x) 11982 for { 11983 x := v_0 11984 if x.Op != OpARM64MOVWUload { 11985 break 11986 } 11987 v.reset(OpARM64MOVDreg) 11988 v.AddArg(x) 11989 return true 11990 } 11991 // match: (MOVWUreg x:(MOVBUloadidx _ _ _)) 11992 // result: (MOVDreg x) 11993 for { 11994 x := v_0 11995 if x.Op != OpARM64MOVBUloadidx { 11996 break 11997 } 11998 v.reset(OpARM64MOVDreg) 11999 v.AddArg(x) 12000 return true 12001 } 12002 // match: (MOVWUreg x:(MOVHUloadidx _ _ _)) 12003 // result: (MOVDreg x) 12004 for { 12005 x := v_0 12006 if x.Op != OpARM64MOVHUloadidx { 12007 break 12008 } 12009 v.reset(OpARM64MOVDreg) 12010 v.AddArg(x) 12011 return true 12012 } 12013 // match: (MOVWUreg x:(MOVWUloadidx _ _ _)) 12014 // result: (MOVDreg x) 12015 for { 12016 x := v_0 12017 if x.Op != OpARM64MOVWUloadidx { 12018 break 12019 } 12020 v.reset(OpARM64MOVDreg) 12021 v.AddArg(x) 12022 return true 12023 } 12024 // match: (MOVWUreg x:(MOVHUloadidx2 _ _ _)) 12025 // result: (MOVDreg x) 12026 for { 12027 x := v_0 12028 if x.Op != OpARM64MOVHUloadidx2 { 12029 break 12030 } 12031 v.reset(OpARM64MOVDreg) 12032 v.AddArg(x) 12033 return true 12034 } 12035 // match: (MOVWUreg x:(MOVWUloadidx4 _ _ _)) 12036 // result: (MOVDreg x) 12037 for { 12038 x := v_0 12039 if x.Op != OpARM64MOVWUloadidx4 { 12040 break 12041 } 12042 v.reset(OpARM64MOVDreg) 12043 v.AddArg(x) 12044 return true 12045 } 12046 // match: (MOVWUreg x:(MOVBUreg _)) 12047 // result: (MOVDreg x) 12048 for { 12049 x := v_0 12050 if x.Op != OpARM64MOVBUreg { 12051 break 12052 } 12053 v.reset(OpARM64MOVDreg) 12054 v.AddArg(x) 12055 return true 12056 } 12057 // match: (MOVWUreg x:(MOVHUreg _)) 12058 // result: (MOVDreg x) 12059 for { 12060 x := v_0 12061 if x.Op != OpARM64MOVHUreg { 12062 break 12063 } 12064 v.reset(OpARM64MOVDreg) 12065 v.AddArg(x) 12066 return true 12067 } 12068 // match: (MOVWUreg x:(MOVWUreg _)) 12069 // result: (MOVDreg x) 12070 for { 12071 x := v_0 12072 if x.Op != OpARM64MOVWUreg { 12073 break 12074 } 12075 v.reset(OpARM64MOVDreg) 12076 v.AddArg(x) 12077 return true 12078 } 12079 // match: (MOVWUreg (ANDconst [c] x)) 12080 // result: (ANDconst [c&(1<<32-1)] x) 12081 for { 12082 if v_0.Op != OpARM64ANDconst { 12083 break 12084 } 12085 c := auxIntToInt64(v_0.AuxInt) 12086 x := v_0.Args[0] 12087 v.reset(OpARM64ANDconst) 12088 v.AuxInt = int64ToAuxInt(c & (1<<32 - 1)) 12089 v.AddArg(x) 12090 return true 12091 } 12092 // match: (MOVWUreg (MOVDconst [c])) 12093 // result: (MOVDconst [int64(uint32(c))]) 12094 for { 12095 if v_0.Op != OpARM64MOVDconst { 12096 break 12097 } 12098 c := auxIntToInt64(v_0.AuxInt) 12099 v.reset(OpARM64MOVDconst) 12100 v.AuxInt = int64ToAuxInt(int64(uint32(c))) 12101 return true 12102 } 12103 // match: (MOVWUreg x) 12104 // cond: v.Type.Size() <= 4 12105 // result: x 12106 for { 12107 x := v_0 12108 if !(v.Type.Size() <= 4) { 12109 break 12110 } 12111 v.copyOf(x) 12112 return true 12113 } 12114 // match: (MOVWUreg x) 12115 // cond: zeroUpper32Bits(x, 3) 12116 // result: x 12117 for { 12118 x := v_0 12119 if !(zeroUpper32Bits(x, 3)) { 12120 break 12121 } 12122 v.copyOf(x) 12123 return true 12124 } 12125 // match: (MOVWUreg (SLLconst [lc] x)) 12126 // cond: lc >= 32 12127 // result: (MOVDconst [0]) 12128 for { 12129 if v_0.Op != OpARM64SLLconst { 12130 break 12131 } 12132 lc := auxIntToInt64(v_0.AuxInt) 12133 if !(lc >= 32) { 12134 break 12135 } 12136 v.reset(OpARM64MOVDconst) 12137 v.AuxInt = int64ToAuxInt(0) 12138 return true 12139 } 12140 // match: (MOVWUreg (SLLconst [lc] x)) 12141 // cond: lc < 32 12142 // result: (UBFIZ [armBFAuxInt(lc, 32-lc)] x) 12143 for { 12144 if v_0.Op != OpARM64SLLconst { 12145 break 12146 } 12147 lc := auxIntToInt64(v_0.AuxInt) 12148 x := v_0.Args[0] 12149 if !(lc < 32) { 12150 break 12151 } 12152 v.reset(OpARM64UBFIZ) 12153 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 32-lc)) 12154 v.AddArg(x) 12155 return true 12156 } 12157 // match: (MOVWUreg (SRLconst [rc] x)) 12158 // cond: rc < 32 12159 // result: (UBFX [armBFAuxInt(rc, 32)] x) 12160 for { 12161 if v_0.Op != OpARM64SRLconst { 12162 break 12163 } 12164 rc := auxIntToInt64(v_0.AuxInt) 12165 x := v_0.Args[0] 12166 if !(rc < 32) { 12167 break 12168 } 12169 v.reset(OpARM64UBFX) 12170 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 32)) 12171 v.AddArg(x) 12172 return true 12173 } 12174 // match: (MOVWUreg (UBFX [bfc] x)) 12175 // cond: bfc.getARM64BFwidth() <= 32 12176 // result: (UBFX [bfc] x) 12177 for { 12178 if v_0.Op != OpARM64UBFX { 12179 break 12180 } 12181 bfc := auxIntToArm64BitField(v_0.AuxInt) 12182 x := v_0.Args[0] 12183 if !(bfc.getARM64BFwidth() <= 32) { 12184 break 12185 } 12186 v.reset(OpARM64UBFX) 12187 v.AuxInt = arm64BitFieldToAuxInt(bfc) 12188 v.AddArg(x) 12189 return true 12190 } 12191 return false 12192 } 12193 func rewriteValueARM64_OpARM64MOVWload(v *Value) bool { 12194 v_1 := v.Args[1] 12195 v_0 := v.Args[0] 12196 b := v.Block 12197 config := b.Func.Config 12198 // match: (MOVWload [off1] {sym} (ADDconst [off2] ptr) mem) 12199 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 12200 // result: (MOVWload [off1+int32(off2)] {sym} ptr mem) 12201 for { 12202 off1 := auxIntToInt32(v.AuxInt) 12203 sym := auxToSym(v.Aux) 12204 if v_0.Op != OpARM64ADDconst { 12205 break 12206 } 12207 off2 := auxIntToInt64(v_0.AuxInt) 12208 ptr := v_0.Args[0] 12209 mem := v_1 12210 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 12211 break 12212 } 12213 v.reset(OpARM64MOVWload) 12214 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 12215 v.Aux = symToAux(sym) 12216 v.AddArg2(ptr, mem) 12217 return true 12218 } 12219 // match: (MOVWload [off] {sym} (ADD ptr idx) mem) 12220 // cond: off == 0 && sym == nil 12221 // result: (MOVWloadidx ptr idx mem) 12222 for { 12223 off := auxIntToInt32(v.AuxInt) 12224 sym := auxToSym(v.Aux) 12225 if v_0.Op != OpARM64ADD { 12226 break 12227 } 12228 idx := v_0.Args[1] 12229 ptr := v_0.Args[0] 12230 mem := v_1 12231 if !(off == 0 && sym == nil) { 12232 break 12233 } 12234 v.reset(OpARM64MOVWloadidx) 12235 v.AddArg3(ptr, idx, mem) 12236 return true 12237 } 12238 // match: (MOVWload [off] {sym} (ADDshiftLL [2] ptr idx) mem) 12239 // cond: off == 0 && sym == nil 12240 // result: (MOVWloadidx4 ptr idx mem) 12241 for { 12242 off := auxIntToInt32(v.AuxInt) 12243 sym := auxToSym(v.Aux) 12244 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 { 12245 break 12246 } 12247 idx := v_0.Args[1] 12248 ptr := v_0.Args[0] 12249 mem := v_1 12250 if !(off == 0 && sym == nil) { 12251 break 12252 } 12253 v.reset(OpARM64MOVWloadidx4) 12254 v.AddArg3(ptr, idx, mem) 12255 return true 12256 } 12257 // match: (MOVWload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 12258 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 12259 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 12260 for { 12261 off1 := auxIntToInt32(v.AuxInt) 12262 sym1 := auxToSym(v.Aux) 12263 if v_0.Op != OpARM64MOVDaddr { 12264 break 12265 } 12266 off2 := auxIntToInt32(v_0.AuxInt) 12267 sym2 := auxToSym(v_0.Aux) 12268 ptr := v_0.Args[0] 12269 mem := v_1 12270 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 12271 break 12272 } 12273 v.reset(OpARM64MOVWload) 12274 v.AuxInt = int32ToAuxInt(off1 + off2) 12275 v.Aux = symToAux(mergeSym(sym1, sym2)) 12276 v.AddArg2(ptr, mem) 12277 return true 12278 } 12279 // match: (MOVWload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 12280 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 12281 // result: (MOVDconst [0]) 12282 for { 12283 off := auxIntToInt32(v.AuxInt) 12284 sym := auxToSym(v.Aux) 12285 ptr := v_0 12286 if v_1.Op != OpARM64MOVWstorezero { 12287 break 12288 } 12289 off2 := auxIntToInt32(v_1.AuxInt) 12290 sym2 := auxToSym(v_1.Aux) 12291 ptr2 := v_1.Args[0] 12292 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 12293 break 12294 } 12295 v.reset(OpARM64MOVDconst) 12296 v.AuxInt = int64ToAuxInt(0) 12297 return true 12298 } 12299 return false 12300 } 12301 func rewriteValueARM64_OpARM64MOVWloadidx(v *Value) bool { 12302 v_2 := v.Args[2] 12303 v_1 := v.Args[1] 12304 v_0 := v.Args[0] 12305 // match: (MOVWloadidx ptr (MOVDconst [c]) mem) 12306 // cond: is32Bit(c) 12307 // result: (MOVWload [int32(c)] ptr mem) 12308 for { 12309 ptr := v_0 12310 if v_1.Op != OpARM64MOVDconst { 12311 break 12312 } 12313 c := auxIntToInt64(v_1.AuxInt) 12314 mem := v_2 12315 if !(is32Bit(c)) { 12316 break 12317 } 12318 v.reset(OpARM64MOVWload) 12319 v.AuxInt = int32ToAuxInt(int32(c)) 12320 v.AddArg2(ptr, mem) 12321 return true 12322 } 12323 // match: (MOVWloadidx (MOVDconst [c]) ptr mem) 12324 // cond: is32Bit(c) 12325 // result: (MOVWload [int32(c)] ptr mem) 12326 for { 12327 if v_0.Op != OpARM64MOVDconst { 12328 break 12329 } 12330 c := auxIntToInt64(v_0.AuxInt) 12331 ptr := v_1 12332 mem := v_2 12333 if !(is32Bit(c)) { 12334 break 12335 } 12336 v.reset(OpARM64MOVWload) 12337 v.AuxInt = int32ToAuxInt(int32(c)) 12338 v.AddArg2(ptr, mem) 12339 return true 12340 } 12341 // match: (MOVWloadidx ptr (SLLconst [2] idx) mem) 12342 // result: (MOVWloadidx4 ptr idx mem) 12343 for { 12344 ptr := v_0 12345 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 { 12346 break 12347 } 12348 idx := v_1.Args[0] 12349 mem := v_2 12350 v.reset(OpARM64MOVWloadidx4) 12351 v.AddArg3(ptr, idx, mem) 12352 return true 12353 } 12354 // match: (MOVWloadidx (SLLconst [2] idx) ptr mem) 12355 // result: (MOVWloadidx4 ptr idx mem) 12356 for { 12357 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 { 12358 break 12359 } 12360 idx := v_0.Args[0] 12361 ptr := v_1 12362 mem := v_2 12363 v.reset(OpARM64MOVWloadidx4) 12364 v.AddArg3(ptr, idx, mem) 12365 return true 12366 } 12367 // match: (MOVWloadidx ptr idx (MOVWstorezeroidx ptr2 idx2 _)) 12368 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 12369 // result: (MOVDconst [0]) 12370 for { 12371 ptr := v_0 12372 idx := v_1 12373 if v_2.Op != OpARM64MOVWstorezeroidx { 12374 break 12375 } 12376 idx2 := v_2.Args[1] 12377 ptr2 := v_2.Args[0] 12378 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 12379 break 12380 } 12381 v.reset(OpARM64MOVDconst) 12382 v.AuxInt = int64ToAuxInt(0) 12383 return true 12384 } 12385 return false 12386 } 12387 func rewriteValueARM64_OpARM64MOVWloadidx4(v *Value) bool { 12388 v_2 := v.Args[2] 12389 v_1 := v.Args[1] 12390 v_0 := v.Args[0] 12391 // match: (MOVWloadidx4 ptr (MOVDconst [c]) mem) 12392 // cond: is32Bit(c<<2) 12393 // result: (MOVWload [int32(c)<<2] ptr mem) 12394 for { 12395 ptr := v_0 12396 if v_1.Op != OpARM64MOVDconst { 12397 break 12398 } 12399 c := auxIntToInt64(v_1.AuxInt) 12400 mem := v_2 12401 if !(is32Bit(c << 2)) { 12402 break 12403 } 12404 v.reset(OpARM64MOVWload) 12405 v.AuxInt = int32ToAuxInt(int32(c) << 2) 12406 v.AddArg2(ptr, mem) 12407 return true 12408 } 12409 // match: (MOVWloadidx4 ptr idx (MOVWstorezeroidx4 ptr2 idx2 _)) 12410 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 12411 // result: (MOVDconst [0]) 12412 for { 12413 ptr := v_0 12414 idx := v_1 12415 if v_2.Op != OpARM64MOVWstorezeroidx4 { 12416 break 12417 } 12418 idx2 := v_2.Args[1] 12419 ptr2 := v_2.Args[0] 12420 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 12421 break 12422 } 12423 v.reset(OpARM64MOVDconst) 12424 v.AuxInt = int64ToAuxInt(0) 12425 return true 12426 } 12427 return false 12428 } 12429 func rewriteValueARM64_OpARM64MOVWreg(v *Value) bool { 12430 v_0 := v.Args[0] 12431 // match: (MOVWreg x:(MOVBload _ _)) 12432 // result: (MOVDreg x) 12433 for { 12434 x := v_0 12435 if x.Op != OpARM64MOVBload { 12436 break 12437 } 12438 v.reset(OpARM64MOVDreg) 12439 v.AddArg(x) 12440 return true 12441 } 12442 // match: (MOVWreg x:(MOVBUload _ _)) 12443 // result: (MOVDreg x) 12444 for { 12445 x := v_0 12446 if x.Op != OpARM64MOVBUload { 12447 break 12448 } 12449 v.reset(OpARM64MOVDreg) 12450 v.AddArg(x) 12451 return true 12452 } 12453 // match: (MOVWreg x:(MOVHload _ _)) 12454 // result: (MOVDreg x) 12455 for { 12456 x := v_0 12457 if x.Op != OpARM64MOVHload { 12458 break 12459 } 12460 v.reset(OpARM64MOVDreg) 12461 v.AddArg(x) 12462 return true 12463 } 12464 // match: (MOVWreg x:(MOVHUload _ _)) 12465 // result: (MOVDreg x) 12466 for { 12467 x := v_0 12468 if x.Op != OpARM64MOVHUload { 12469 break 12470 } 12471 v.reset(OpARM64MOVDreg) 12472 v.AddArg(x) 12473 return true 12474 } 12475 // match: (MOVWreg x:(MOVWload _ _)) 12476 // result: (MOVDreg x) 12477 for { 12478 x := v_0 12479 if x.Op != OpARM64MOVWload { 12480 break 12481 } 12482 v.reset(OpARM64MOVDreg) 12483 v.AddArg(x) 12484 return true 12485 } 12486 // match: (MOVWreg x:(MOVBloadidx _ _ _)) 12487 // result: (MOVDreg x) 12488 for { 12489 x := v_0 12490 if x.Op != OpARM64MOVBloadidx { 12491 break 12492 } 12493 v.reset(OpARM64MOVDreg) 12494 v.AddArg(x) 12495 return true 12496 } 12497 // match: (MOVWreg x:(MOVBUloadidx _ _ _)) 12498 // result: (MOVDreg x) 12499 for { 12500 x := v_0 12501 if x.Op != OpARM64MOVBUloadidx { 12502 break 12503 } 12504 v.reset(OpARM64MOVDreg) 12505 v.AddArg(x) 12506 return true 12507 } 12508 // match: (MOVWreg x:(MOVHloadidx _ _ _)) 12509 // result: (MOVDreg x) 12510 for { 12511 x := v_0 12512 if x.Op != OpARM64MOVHloadidx { 12513 break 12514 } 12515 v.reset(OpARM64MOVDreg) 12516 v.AddArg(x) 12517 return true 12518 } 12519 // match: (MOVWreg x:(MOVHUloadidx _ _ _)) 12520 // result: (MOVDreg x) 12521 for { 12522 x := v_0 12523 if x.Op != OpARM64MOVHUloadidx { 12524 break 12525 } 12526 v.reset(OpARM64MOVDreg) 12527 v.AddArg(x) 12528 return true 12529 } 12530 // match: (MOVWreg x:(MOVWloadidx _ _ _)) 12531 // result: (MOVDreg x) 12532 for { 12533 x := v_0 12534 if x.Op != OpARM64MOVWloadidx { 12535 break 12536 } 12537 v.reset(OpARM64MOVDreg) 12538 v.AddArg(x) 12539 return true 12540 } 12541 // match: (MOVWreg x:(MOVHloadidx2 _ _ _)) 12542 // result: (MOVDreg x) 12543 for { 12544 x := v_0 12545 if x.Op != OpARM64MOVHloadidx2 { 12546 break 12547 } 12548 v.reset(OpARM64MOVDreg) 12549 v.AddArg(x) 12550 return true 12551 } 12552 // match: (MOVWreg x:(MOVHUloadidx2 _ _ _)) 12553 // result: (MOVDreg x) 12554 for { 12555 x := v_0 12556 if x.Op != OpARM64MOVHUloadidx2 { 12557 break 12558 } 12559 v.reset(OpARM64MOVDreg) 12560 v.AddArg(x) 12561 return true 12562 } 12563 // match: (MOVWreg x:(MOVWloadidx4 _ _ _)) 12564 // result: (MOVDreg x) 12565 for { 12566 x := v_0 12567 if x.Op != OpARM64MOVWloadidx4 { 12568 break 12569 } 12570 v.reset(OpARM64MOVDreg) 12571 v.AddArg(x) 12572 return true 12573 } 12574 // match: (MOVWreg x:(MOVBreg _)) 12575 // result: (MOVDreg x) 12576 for { 12577 x := v_0 12578 if x.Op != OpARM64MOVBreg { 12579 break 12580 } 12581 v.reset(OpARM64MOVDreg) 12582 v.AddArg(x) 12583 return true 12584 } 12585 // match: (MOVWreg x:(MOVBUreg _)) 12586 // result: (MOVDreg x) 12587 for { 12588 x := v_0 12589 if x.Op != OpARM64MOVBUreg { 12590 break 12591 } 12592 v.reset(OpARM64MOVDreg) 12593 v.AddArg(x) 12594 return true 12595 } 12596 // match: (MOVWreg x:(MOVHreg _)) 12597 // result: (MOVDreg x) 12598 for { 12599 x := v_0 12600 if x.Op != OpARM64MOVHreg { 12601 break 12602 } 12603 v.reset(OpARM64MOVDreg) 12604 v.AddArg(x) 12605 return true 12606 } 12607 // match: (MOVWreg x:(MOVWreg _)) 12608 // result: (MOVDreg x) 12609 for { 12610 x := v_0 12611 if x.Op != OpARM64MOVWreg { 12612 break 12613 } 12614 v.reset(OpARM64MOVDreg) 12615 v.AddArg(x) 12616 return true 12617 } 12618 // match: (MOVWreg (MOVDconst [c])) 12619 // result: (MOVDconst [int64(int32(c))]) 12620 for { 12621 if v_0.Op != OpARM64MOVDconst { 12622 break 12623 } 12624 c := auxIntToInt64(v_0.AuxInt) 12625 v.reset(OpARM64MOVDconst) 12626 v.AuxInt = int64ToAuxInt(int64(int32(c))) 12627 return true 12628 } 12629 // match: (MOVWreg x) 12630 // cond: v.Type.Size() <= 4 12631 // result: x 12632 for { 12633 x := v_0 12634 if !(v.Type.Size() <= 4) { 12635 break 12636 } 12637 v.copyOf(x) 12638 return true 12639 } 12640 // match: (MOVWreg <t> (ANDconst x [c])) 12641 // cond: uint64(c) & uint64(0xffffffff80000000) == 0 12642 // result: (ANDconst <t> x [c]) 12643 for { 12644 t := v.Type 12645 if v_0.Op != OpARM64ANDconst { 12646 break 12647 } 12648 c := auxIntToInt64(v_0.AuxInt) 12649 x := v_0.Args[0] 12650 if !(uint64(c)&uint64(0xffffffff80000000) == 0) { 12651 break 12652 } 12653 v.reset(OpARM64ANDconst) 12654 v.Type = t 12655 v.AuxInt = int64ToAuxInt(c) 12656 v.AddArg(x) 12657 return true 12658 } 12659 // match: (MOVWreg (SLLconst [lc] x)) 12660 // cond: lc < 32 12661 // result: (SBFIZ [armBFAuxInt(lc, 32-lc)] x) 12662 for { 12663 if v_0.Op != OpARM64SLLconst { 12664 break 12665 } 12666 lc := auxIntToInt64(v_0.AuxInt) 12667 x := v_0.Args[0] 12668 if !(lc < 32) { 12669 break 12670 } 12671 v.reset(OpARM64SBFIZ) 12672 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 32-lc)) 12673 v.AddArg(x) 12674 return true 12675 } 12676 // match: (MOVWreg (SBFX [bfc] x)) 12677 // cond: bfc.getARM64BFwidth() <= 32 12678 // result: (SBFX [bfc] x) 12679 for { 12680 if v_0.Op != OpARM64SBFX { 12681 break 12682 } 12683 bfc := auxIntToArm64BitField(v_0.AuxInt) 12684 x := v_0.Args[0] 12685 if !(bfc.getARM64BFwidth() <= 32) { 12686 break 12687 } 12688 v.reset(OpARM64SBFX) 12689 v.AuxInt = arm64BitFieldToAuxInt(bfc) 12690 v.AddArg(x) 12691 return true 12692 } 12693 return false 12694 } 12695 func rewriteValueARM64_OpARM64MOVWstore(v *Value) bool { 12696 v_2 := v.Args[2] 12697 v_1 := v.Args[1] 12698 v_0 := v.Args[0] 12699 b := v.Block 12700 config := b.Func.Config 12701 // match: (MOVWstore [off] {sym} ptr (FMOVSfpgp val) mem) 12702 // result: (FMOVSstore [off] {sym} ptr val mem) 12703 for { 12704 off := auxIntToInt32(v.AuxInt) 12705 sym := auxToSym(v.Aux) 12706 ptr := v_0 12707 if v_1.Op != OpARM64FMOVSfpgp { 12708 break 12709 } 12710 val := v_1.Args[0] 12711 mem := v_2 12712 v.reset(OpARM64FMOVSstore) 12713 v.AuxInt = int32ToAuxInt(off) 12714 v.Aux = symToAux(sym) 12715 v.AddArg3(ptr, val, mem) 12716 return true 12717 } 12718 // match: (MOVWstore [off1] {sym} (ADDconst [off2] ptr) val mem) 12719 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 12720 // result: (MOVWstore [off1+int32(off2)] {sym} ptr val mem) 12721 for { 12722 off1 := auxIntToInt32(v.AuxInt) 12723 sym := auxToSym(v.Aux) 12724 if v_0.Op != OpARM64ADDconst { 12725 break 12726 } 12727 off2 := auxIntToInt64(v_0.AuxInt) 12728 ptr := v_0.Args[0] 12729 val := v_1 12730 mem := v_2 12731 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 12732 break 12733 } 12734 v.reset(OpARM64MOVWstore) 12735 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 12736 v.Aux = symToAux(sym) 12737 v.AddArg3(ptr, val, mem) 12738 return true 12739 } 12740 // match: (MOVWstore [off] {sym} (ADD ptr idx) val mem) 12741 // cond: off == 0 && sym == nil 12742 // result: (MOVWstoreidx ptr idx val mem) 12743 for { 12744 off := auxIntToInt32(v.AuxInt) 12745 sym := auxToSym(v.Aux) 12746 if v_0.Op != OpARM64ADD { 12747 break 12748 } 12749 idx := v_0.Args[1] 12750 ptr := v_0.Args[0] 12751 val := v_1 12752 mem := v_2 12753 if !(off == 0 && sym == nil) { 12754 break 12755 } 12756 v.reset(OpARM64MOVWstoreidx) 12757 v.AddArg4(ptr, idx, val, mem) 12758 return true 12759 } 12760 // match: (MOVWstore [off] {sym} (ADDshiftLL [2] ptr idx) val mem) 12761 // cond: off == 0 && sym == nil 12762 // result: (MOVWstoreidx4 ptr idx val mem) 12763 for { 12764 off := auxIntToInt32(v.AuxInt) 12765 sym := auxToSym(v.Aux) 12766 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 { 12767 break 12768 } 12769 idx := v_0.Args[1] 12770 ptr := v_0.Args[0] 12771 val := v_1 12772 mem := v_2 12773 if !(off == 0 && sym == nil) { 12774 break 12775 } 12776 v.reset(OpARM64MOVWstoreidx4) 12777 v.AddArg4(ptr, idx, val, mem) 12778 return true 12779 } 12780 // match: (MOVWstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 12781 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 12782 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 12783 for { 12784 off1 := auxIntToInt32(v.AuxInt) 12785 sym1 := auxToSym(v.Aux) 12786 if v_0.Op != OpARM64MOVDaddr { 12787 break 12788 } 12789 off2 := auxIntToInt32(v_0.AuxInt) 12790 sym2 := auxToSym(v_0.Aux) 12791 ptr := v_0.Args[0] 12792 val := v_1 12793 mem := v_2 12794 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 12795 break 12796 } 12797 v.reset(OpARM64MOVWstore) 12798 v.AuxInt = int32ToAuxInt(off1 + off2) 12799 v.Aux = symToAux(mergeSym(sym1, sym2)) 12800 v.AddArg3(ptr, val, mem) 12801 return true 12802 } 12803 // match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem) 12804 // result: (MOVWstorezero [off] {sym} ptr mem) 12805 for { 12806 off := auxIntToInt32(v.AuxInt) 12807 sym := auxToSym(v.Aux) 12808 ptr := v_0 12809 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 { 12810 break 12811 } 12812 mem := v_2 12813 v.reset(OpARM64MOVWstorezero) 12814 v.AuxInt = int32ToAuxInt(off) 12815 v.Aux = symToAux(sym) 12816 v.AddArg2(ptr, mem) 12817 return true 12818 } 12819 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem) 12820 // result: (MOVWstore [off] {sym} ptr x mem) 12821 for { 12822 off := auxIntToInt32(v.AuxInt) 12823 sym := auxToSym(v.Aux) 12824 ptr := v_0 12825 if v_1.Op != OpARM64MOVWreg { 12826 break 12827 } 12828 x := v_1.Args[0] 12829 mem := v_2 12830 v.reset(OpARM64MOVWstore) 12831 v.AuxInt = int32ToAuxInt(off) 12832 v.Aux = symToAux(sym) 12833 v.AddArg3(ptr, x, mem) 12834 return true 12835 } 12836 // match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem) 12837 // result: (MOVWstore [off] {sym} ptr x mem) 12838 for { 12839 off := auxIntToInt32(v.AuxInt) 12840 sym := auxToSym(v.Aux) 12841 ptr := v_0 12842 if v_1.Op != OpARM64MOVWUreg { 12843 break 12844 } 12845 x := v_1.Args[0] 12846 mem := v_2 12847 v.reset(OpARM64MOVWstore) 12848 v.AuxInt = int32ToAuxInt(off) 12849 v.Aux = symToAux(sym) 12850 v.AddArg3(ptr, x, mem) 12851 return true 12852 } 12853 return false 12854 } 12855 func rewriteValueARM64_OpARM64MOVWstoreidx(v *Value) bool { 12856 v_3 := v.Args[3] 12857 v_2 := v.Args[2] 12858 v_1 := v.Args[1] 12859 v_0 := v.Args[0] 12860 // match: (MOVWstoreidx ptr (MOVDconst [c]) val mem) 12861 // cond: is32Bit(c) 12862 // result: (MOVWstore [int32(c)] ptr val mem) 12863 for { 12864 ptr := v_0 12865 if v_1.Op != OpARM64MOVDconst { 12866 break 12867 } 12868 c := auxIntToInt64(v_1.AuxInt) 12869 val := v_2 12870 mem := v_3 12871 if !(is32Bit(c)) { 12872 break 12873 } 12874 v.reset(OpARM64MOVWstore) 12875 v.AuxInt = int32ToAuxInt(int32(c)) 12876 v.AddArg3(ptr, val, mem) 12877 return true 12878 } 12879 // match: (MOVWstoreidx (MOVDconst [c]) idx val mem) 12880 // cond: is32Bit(c) 12881 // result: (MOVWstore [int32(c)] idx val mem) 12882 for { 12883 if v_0.Op != OpARM64MOVDconst { 12884 break 12885 } 12886 c := auxIntToInt64(v_0.AuxInt) 12887 idx := v_1 12888 val := v_2 12889 mem := v_3 12890 if !(is32Bit(c)) { 12891 break 12892 } 12893 v.reset(OpARM64MOVWstore) 12894 v.AuxInt = int32ToAuxInt(int32(c)) 12895 v.AddArg3(idx, val, mem) 12896 return true 12897 } 12898 // match: (MOVWstoreidx ptr (SLLconst [2] idx) val mem) 12899 // result: (MOVWstoreidx4 ptr idx val mem) 12900 for { 12901 ptr := v_0 12902 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 { 12903 break 12904 } 12905 idx := v_1.Args[0] 12906 val := v_2 12907 mem := v_3 12908 v.reset(OpARM64MOVWstoreidx4) 12909 v.AddArg4(ptr, idx, val, mem) 12910 return true 12911 } 12912 // match: (MOVWstoreidx (SLLconst [2] idx) ptr val mem) 12913 // result: (MOVWstoreidx4 ptr idx val mem) 12914 for { 12915 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 { 12916 break 12917 } 12918 idx := v_0.Args[0] 12919 ptr := v_1 12920 val := v_2 12921 mem := v_3 12922 v.reset(OpARM64MOVWstoreidx4) 12923 v.AddArg4(ptr, idx, val, mem) 12924 return true 12925 } 12926 // match: (MOVWstoreidx ptr idx (MOVDconst [0]) mem) 12927 // result: (MOVWstorezeroidx ptr idx mem) 12928 for { 12929 ptr := v_0 12930 idx := v_1 12931 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 { 12932 break 12933 } 12934 mem := v_3 12935 v.reset(OpARM64MOVWstorezeroidx) 12936 v.AddArg3(ptr, idx, mem) 12937 return true 12938 } 12939 // match: (MOVWstoreidx ptr idx (MOVWreg x) mem) 12940 // result: (MOVWstoreidx ptr idx x mem) 12941 for { 12942 ptr := v_0 12943 idx := v_1 12944 if v_2.Op != OpARM64MOVWreg { 12945 break 12946 } 12947 x := v_2.Args[0] 12948 mem := v_3 12949 v.reset(OpARM64MOVWstoreidx) 12950 v.AddArg4(ptr, idx, x, mem) 12951 return true 12952 } 12953 // match: (MOVWstoreidx ptr idx (MOVWUreg x) mem) 12954 // result: (MOVWstoreidx ptr idx x mem) 12955 for { 12956 ptr := v_0 12957 idx := v_1 12958 if v_2.Op != OpARM64MOVWUreg { 12959 break 12960 } 12961 x := v_2.Args[0] 12962 mem := v_3 12963 v.reset(OpARM64MOVWstoreidx) 12964 v.AddArg4(ptr, idx, x, mem) 12965 return true 12966 } 12967 return false 12968 } 12969 func rewriteValueARM64_OpARM64MOVWstoreidx4(v *Value) bool { 12970 v_3 := v.Args[3] 12971 v_2 := v.Args[2] 12972 v_1 := v.Args[1] 12973 v_0 := v.Args[0] 12974 // match: (MOVWstoreidx4 ptr (MOVDconst [c]) val mem) 12975 // cond: is32Bit(c<<2) 12976 // result: (MOVWstore [int32(c)<<2] ptr val mem) 12977 for { 12978 ptr := v_0 12979 if v_1.Op != OpARM64MOVDconst { 12980 break 12981 } 12982 c := auxIntToInt64(v_1.AuxInt) 12983 val := v_2 12984 mem := v_3 12985 if !(is32Bit(c << 2)) { 12986 break 12987 } 12988 v.reset(OpARM64MOVWstore) 12989 v.AuxInt = int32ToAuxInt(int32(c) << 2) 12990 v.AddArg3(ptr, val, mem) 12991 return true 12992 } 12993 // match: (MOVWstoreidx4 ptr idx (MOVDconst [0]) mem) 12994 // result: (MOVWstorezeroidx4 ptr idx mem) 12995 for { 12996 ptr := v_0 12997 idx := v_1 12998 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 { 12999 break 13000 } 13001 mem := v_3 13002 v.reset(OpARM64MOVWstorezeroidx4) 13003 v.AddArg3(ptr, idx, mem) 13004 return true 13005 } 13006 // match: (MOVWstoreidx4 ptr idx (MOVWreg x) mem) 13007 // result: (MOVWstoreidx4 ptr idx x mem) 13008 for { 13009 ptr := v_0 13010 idx := v_1 13011 if v_2.Op != OpARM64MOVWreg { 13012 break 13013 } 13014 x := v_2.Args[0] 13015 mem := v_3 13016 v.reset(OpARM64MOVWstoreidx4) 13017 v.AddArg4(ptr, idx, x, mem) 13018 return true 13019 } 13020 // match: (MOVWstoreidx4 ptr idx (MOVWUreg x) mem) 13021 // result: (MOVWstoreidx4 ptr idx x mem) 13022 for { 13023 ptr := v_0 13024 idx := v_1 13025 if v_2.Op != OpARM64MOVWUreg { 13026 break 13027 } 13028 x := v_2.Args[0] 13029 mem := v_3 13030 v.reset(OpARM64MOVWstoreidx4) 13031 v.AddArg4(ptr, idx, x, mem) 13032 return true 13033 } 13034 return false 13035 } 13036 func rewriteValueARM64_OpARM64MOVWstorezero(v *Value) bool { 13037 v_1 := v.Args[1] 13038 v_0 := v.Args[0] 13039 b := v.Block 13040 config := b.Func.Config 13041 // match: (MOVWstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 13042 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 13043 // result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem) 13044 for { 13045 off1 := auxIntToInt32(v.AuxInt) 13046 sym := auxToSym(v.Aux) 13047 if v_0.Op != OpARM64ADDconst { 13048 break 13049 } 13050 off2 := auxIntToInt64(v_0.AuxInt) 13051 ptr := v_0.Args[0] 13052 mem := v_1 13053 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 13054 break 13055 } 13056 v.reset(OpARM64MOVWstorezero) 13057 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 13058 v.Aux = symToAux(sym) 13059 v.AddArg2(ptr, mem) 13060 return true 13061 } 13062 // match: (MOVWstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 13063 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 13064 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 13065 for { 13066 off1 := auxIntToInt32(v.AuxInt) 13067 sym1 := auxToSym(v.Aux) 13068 if v_0.Op != OpARM64MOVDaddr { 13069 break 13070 } 13071 off2 := auxIntToInt32(v_0.AuxInt) 13072 sym2 := auxToSym(v_0.Aux) 13073 ptr := v_0.Args[0] 13074 mem := v_1 13075 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 13076 break 13077 } 13078 v.reset(OpARM64MOVWstorezero) 13079 v.AuxInt = int32ToAuxInt(off1 + off2) 13080 v.Aux = symToAux(mergeSym(sym1, sym2)) 13081 v.AddArg2(ptr, mem) 13082 return true 13083 } 13084 // match: (MOVWstorezero [off] {sym} (ADD ptr idx) mem) 13085 // cond: off == 0 && sym == nil 13086 // result: (MOVWstorezeroidx ptr idx mem) 13087 for { 13088 off := auxIntToInt32(v.AuxInt) 13089 sym := auxToSym(v.Aux) 13090 if v_0.Op != OpARM64ADD { 13091 break 13092 } 13093 idx := v_0.Args[1] 13094 ptr := v_0.Args[0] 13095 mem := v_1 13096 if !(off == 0 && sym == nil) { 13097 break 13098 } 13099 v.reset(OpARM64MOVWstorezeroidx) 13100 v.AddArg3(ptr, idx, mem) 13101 return true 13102 } 13103 // match: (MOVWstorezero [off] {sym} (ADDshiftLL [2] ptr idx) mem) 13104 // cond: off == 0 && sym == nil 13105 // result: (MOVWstorezeroidx4 ptr idx mem) 13106 for { 13107 off := auxIntToInt32(v.AuxInt) 13108 sym := auxToSym(v.Aux) 13109 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 { 13110 break 13111 } 13112 idx := v_0.Args[1] 13113 ptr := v_0.Args[0] 13114 mem := v_1 13115 if !(off == 0 && sym == nil) { 13116 break 13117 } 13118 v.reset(OpARM64MOVWstorezeroidx4) 13119 v.AddArg3(ptr, idx, mem) 13120 return true 13121 } 13122 return false 13123 } 13124 func rewriteValueARM64_OpARM64MOVWstorezeroidx(v *Value) bool { 13125 v_2 := v.Args[2] 13126 v_1 := v.Args[1] 13127 v_0 := v.Args[0] 13128 // match: (MOVWstorezeroidx ptr (MOVDconst [c]) mem) 13129 // cond: is32Bit(c) 13130 // result: (MOVWstorezero [int32(c)] ptr mem) 13131 for { 13132 ptr := v_0 13133 if v_1.Op != OpARM64MOVDconst { 13134 break 13135 } 13136 c := auxIntToInt64(v_1.AuxInt) 13137 mem := v_2 13138 if !(is32Bit(c)) { 13139 break 13140 } 13141 v.reset(OpARM64MOVWstorezero) 13142 v.AuxInt = int32ToAuxInt(int32(c)) 13143 v.AddArg2(ptr, mem) 13144 return true 13145 } 13146 // match: (MOVWstorezeroidx (MOVDconst [c]) idx mem) 13147 // cond: is32Bit(c) 13148 // result: (MOVWstorezero [int32(c)] idx mem) 13149 for { 13150 if v_0.Op != OpARM64MOVDconst { 13151 break 13152 } 13153 c := auxIntToInt64(v_0.AuxInt) 13154 idx := v_1 13155 mem := v_2 13156 if !(is32Bit(c)) { 13157 break 13158 } 13159 v.reset(OpARM64MOVWstorezero) 13160 v.AuxInt = int32ToAuxInt(int32(c)) 13161 v.AddArg2(idx, mem) 13162 return true 13163 } 13164 // match: (MOVWstorezeroidx ptr (SLLconst [2] idx) mem) 13165 // result: (MOVWstorezeroidx4 ptr idx mem) 13166 for { 13167 ptr := v_0 13168 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 { 13169 break 13170 } 13171 idx := v_1.Args[0] 13172 mem := v_2 13173 v.reset(OpARM64MOVWstorezeroidx4) 13174 v.AddArg3(ptr, idx, mem) 13175 return true 13176 } 13177 // match: (MOVWstorezeroidx (SLLconst [2] idx) ptr mem) 13178 // result: (MOVWstorezeroidx4 ptr idx mem) 13179 for { 13180 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 { 13181 break 13182 } 13183 idx := v_0.Args[0] 13184 ptr := v_1 13185 mem := v_2 13186 v.reset(OpARM64MOVWstorezeroidx4) 13187 v.AddArg3(ptr, idx, mem) 13188 return true 13189 } 13190 return false 13191 } 13192 func rewriteValueARM64_OpARM64MOVWstorezeroidx4(v *Value) bool { 13193 v_2 := v.Args[2] 13194 v_1 := v.Args[1] 13195 v_0 := v.Args[0] 13196 // match: (MOVWstorezeroidx4 ptr (MOVDconst [c]) mem) 13197 // cond: is32Bit(c<<2) 13198 // result: (MOVWstorezero [int32(c<<2)] ptr mem) 13199 for { 13200 ptr := v_0 13201 if v_1.Op != OpARM64MOVDconst { 13202 break 13203 } 13204 c := auxIntToInt64(v_1.AuxInt) 13205 mem := v_2 13206 if !(is32Bit(c << 2)) { 13207 break 13208 } 13209 v.reset(OpARM64MOVWstorezero) 13210 v.AuxInt = int32ToAuxInt(int32(c << 2)) 13211 v.AddArg2(ptr, mem) 13212 return true 13213 } 13214 return false 13215 } 13216 func rewriteValueARM64_OpARM64MSUB(v *Value) bool { 13217 v_2 := v.Args[2] 13218 v_1 := v.Args[1] 13219 v_0 := v.Args[0] 13220 b := v.Block 13221 // match: (MSUB a x (MOVDconst [-1])) 13222 // result: (ADD a x) 13223 for { 13224 a := v_0 13225 x := v_1 13226 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != -1 { 13227 break 13228 } 13229 v.reset(OpARM64ADD) 13230 v.AddArg2(a, x) 13231 return true 13232 } 13233 // match: (MSUB a _ (MOVDconst [0])) 13234 // result: a 13235 for { 13236 a := v_0 13237 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 { 13238 break 13239 } 13240 v.copyOf(a) 13241 return true 13242 } 13243 // match: (MSUB a x (MOVDconst [1])) 13244 // result: (SUB a x) 13245 for { 13246 a := v_0 13247 x := v_1 13248 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 1 { 13249 break 13250 } 13251 v.reset(OpARM64SUB) 13252 v.AddArg2(a, x) 13253 return true 13254 } 13255 // match: (MSUB a x (MOVDconst [c])) 13256 // cond: isPowerOfTwo64(c) 13257 // result: (SUBshiftLL a x [log64(c)]) 13258 for { 13259 a := v_0 13260 x := v_1 13261 if v_2.Op != OpARM64MOVDconst { 13262 break 13263 } 13264 c := auxIntToInt64(v_2.AuxInt) 13265 if !(isPowerOfTwo64(c)) { 13266 break 13267 } 13268 v.reset(OpARM64SUBshiftLL) 13269 v.AuxInt = int64ToAuxInt(log64(c)) 13270 v.AddArg2(a, x) 13271 return true 13272 } 13273 // match: (MSUB a x (MOVDconst [c])) 13274 // cond: isPowerOfTwo64(c-1) && c>=3 13275 // result: (SUB a (ADDshiftLL <x.Type> x x [log64(c-1)])) 13276 for { 13277 a := v_0 13278 x := v_1 13279 if v_2.Op != OpARM64MOVDconst { 13280 break 13281 } 13282 c := auxIntToInt64(v_2.AuxInt) 13283 if !(isPowerOfTwo64(c-1) && c >= 3) { 13284 break 13285 } 13286 v.reset(OpARM64SUB) 13287 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 13288 v0.AuxInt = int64ToAuxInt(log64(c - 1)) 13289 v0.AddArg2(x, x) 13290 v.AddArg2(a, v0) 13291 return true 13292 } 13293 // match: (MSUB a x (MOVDconst [c])) 13294 // cond: isPowerOfTwo64(c+1) && c>=7 13295 // result: (ADD a (SUBshiftLL <x.Type> x x [log64(c+1)])) 13296 for { 13297 a := v_0 13298 x := v_1 13299 if v_2.Op != OpARM64MOVDconst { 13300 break 13301 } 13302 c := auxIntToInt64(v_2.AuxInt) 13303 if !(isPowerOfTwo64(c+1) && c >= 7) { 13304 break 13305 } 13306 v.reset(OpARM64ADD) 13307 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 13308 v0.AuxInt = int64ToAuxInt(log64(c + 1)) 13309 v0.AddArg2(x, x) 13310 v.AddArg2(a, v0) 13311 return true 13312 } 13313 // match: (MSUB a x (MOVDconst [c])) 13314 // cond: c%3 == 0 && isPowerOfTwo64(c/3) 13315 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)]) 13316 for { 13317 a := v_0 13318 x := v_1 13319 if v_2.Op != OpARM64MOVDconst { 13320 break 13321 } 13322 c := auxIntToInt64(v_2.AuxInt) 13323 if !(c%3 == 0 && isPowerOfTwo64(c/3)) { 13324 break 13325 } 13326 v.reset(OpARM64ADDshiftLL) 13327 v.AuxInt = int64ToAuxInt(log64(c / 3)) 13328 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 13329 v0.AuxInt = int64ToAuxInt(2) 13330 v0.AddArg2(x, x) 13331 v.AddArg2(a, v0) 13332 return true 13333 } 13334 // match: (MSUB a x (MOVDconst [c])) 13335 // cond: c%5 == 0 && isPowerOfTwo64(c/5) 13336 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)]) 13337 for { 13338 a := v_0 13339 x := v_1 13340 if v_2.Op != OpARM64MOVDconst { 13341 break 13342 } 13343 c := auxIntToInt64(v_2.AuxInt) 13344 if !(c%5 == 0 && isPowerOfTwo64(c/5)) { 13345 break 13346 } 13347 v.reset(OpARM64SUBshiftLL) 13348 v.AuxInt = int64ToAuxInt(log64(c / 5)) 13349 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 13350 v0.AuxInt = int64ToAuxInt(2) 13351 v0.AddArg2(x, x) 13352 v.AddArg2(a, v0) 13353 return true 13354 } 13355 // match: (MSUB a x (MOVDconst [c])) 13356 // cond: c%7 == 0 && isPowerOfTwo64(c/7) 13357 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)]) 13358 for { 13359 a := v_0 13360 x := v_1 13361 if v_2.Op != OpARM64MOVDconst { 13362 break 13363 } 13364 c := auxIntToInt64(v_2.AuxInt) 13365 if !(c%7 == 0 && isPowerOfTwo64(c/7)) { 13366 break 13367 } 13368 v.reset(OpARM64ADDshiftLL) 13369 v.AuxInt = int64ToAuxInt(log64(c / 7)) 13370 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 13371 v0.AuxInt = int64ToAuxInt(3) 13372 v0.AddArg2(x, x) 13373 v.AddArg2(a, v0) 13374 return true 13375 } 13376 // match: (MSUB a x (MOVDconst [c])) 13377 // cond: c%9 == 0 && isPowerOfTwo64(c/9) 13378 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)]) 13379 for { 13380 a := v_0 13381 x := v_1 13382 if v_2.Op != OpARM64MOVDconst { 13383 break 13384 } 13385 c := auxIntToInt64(v_2.AuxInt) 13386 if !(c%9 == 0 && isPowerOfTwo64(c/9)) { 13387 break 13388 } 13389 v.reset(OpARM64SUBshiftLL) 13390 v.AuxInt = int64ToAuxInt(log64(c / 9)) 13391 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 13392 v0.AuxInt = int64ToAuxInt(3) 13393 v0.AddArg2(x, x) 13394 v.AddArg2(a, v0) 13395 return true 13396 } 13397 // match: (MSUB a (MOVDconst [-1]) x) 13398 // result: (ADD a x) 13399 for { 13400 a := v_0 13401 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 { 13402 break 13403 } 13404 x := v_2 13405 v.reset(OpARM64ADD) 13406 v.AddArg2(a, x) 13407 return true 13408 } 13409 // match: (MSUB a (MOVDconst [0]) _) 13410 // result: a 13411 for { 13412 a := v_0 13413 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 { 13414 break 13415 } 13416 v.copyOf(a) 13417 return true 13418 } 13419 // match: (MSUB a (MOVDconst [1]) x) 13420 // result: (SUB a x) 13421 for { 13422 a := v_0 13423 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 { 13424 break 13425 } 13426 x := v_2 13427 v.reset(OpARM64SUB) 13428 v.AddArg2(a, x) 13429 return true 13430 } 13431 // match: (MSUB a (MOVDconst [c]) x) 13432 // cond: isPowerOfTwo64(c) 13433 // result: (SUBshiftLL a x [log64(c)]) 13434 for { 13435 a := v_0 13436 if v_1.Op != OpARM64MOVDconst { 13437 break 13438 } 13439 c := auxIntToInt64(v_1.AuxInt) 13440 x := v_2 13441 if !(isPowerOfTwo64(c)) { 13442 break 13443 } 13444 v.reset(OpARM64SUBshiftLL) 13445 v.AuxInt = int64ToAuxInt(log64(c)) 13446 v.AddArg2(a, x) 13447 return true 13448 } 13449 // match: (MSUB a (MOVDconst [c]) x) 13450 // cond: isPowerOfTwo64(c-1) && c>=3 13451 // result: (SUB a (ADDshiftLL <x.Type> x x [log64(c-1)])) 13452 for { 13453 a := v_0 13454 if v_1.Op != OpARM64MOVDconst { 13455 break 13456 } 13457 c := auxIntToInt64(v_1.AuxInt) 13458 x := v_2 13459 if !(isPowerOfTwo64(c-1) && c >= 3) { 13460 break 13461 } 13462 v.reset(OpARM64SUB) 13463 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 13464 v0.AuxInt = int64ToAuxInt(log64(c - 1)) 13465 v0.AddArg2(x, x) 13466 v.AddArg2(a, v0) 13467 return true 13468 } 13469 // match: (MSUB a (MOVDconst [c]) x) 13470 // cond: isPowerOfTwo64(c+1) && c>=7 13471 // result: (ADD a (SUBshiftLL <x.Type> x x [log64(c+1)])) 13472 for { 13473 a := v_0 13474 if v_1.Op != OpARM64MOVDconst { 13475 break 13476 } 13477 c := auxIntToInt64(v_1.AuxInt) 13478 x := v_2 13479 if !(isPowerOfTwo64(c+1) && c >= 7) { 13480 break 13481 } 13482 v.reset(OpARM64ADD) 13483 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 13484 v0.AuxInt = int64ToAuxInt(log64(c + 1)) 13485 v0.AddArg2(x, x) 13486 v.AddArg2(a, v0) 13487 return true 13488 } 13489 // match: (MSUB a (MOVDconst [c]) x) 13490 // cond: c%3 == 0 && isPowerOfTwo64(c/3) 13491 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)]) 13492 for { 13493 a := v_0 13494 if v_1.Op != OpARM64MOVDconst { 13495 break 13496 } 13497 c := auxIntToInt64(v_1.AuxInt) 13498 x := v_2 13499 if !(c%3 == 0 && isPowerOfTwo64(c/3)) { 13500 break 13501 } 13502 v.reset(OpARM64ADDshiftLL) 13503 v.AuxInt = int64ToAuxInt(log64(c / 3)) 13504 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 13505 v0.AuxInt = int64ToAuxInt(2) 13506 v0.AddArg2(x, x) 13507 v.AddArg2(a, v0) 13508 return true 13509 } 13510 // match: (MSUB a (MOVDconst [c]) x) 13511 // cond: c%5 == 0 && isPowerOfTwo64(c/5) 13512 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)]) 13513 for { 13514 a := v_0 13515 if v_1.Op != OpARM64MOVDconst { 13516 break 13517 } 13518 c := auxIntToInt64(v_1.AuxInt) 13519 x := v_2 13520 if !(c%5 == 0 && isPowerOfTwo64(c/5)) { 13521 break 13522 } 13523 v.reset(OpARM64SUBshiftLL) 13524 v.AuxInt = int64ToAuxInt(log64(c / 5)) 13525 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 13526 v0.AuxInt = int64ToAuxInt(2) 13527 v0.AddArg2(x, x) 13528 v.AddArg2(a, v0) 13529 return true 13530 } 13531 // match: (MSUB a (MOVDconst [c]) x) 13532 // cond: c%7 == 0 && isPowerOfTwo64(c/7) 13533 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)]) 13534 for { 13535 a := v_0 13536 if v_1.Op != OpARM64MOVDconst { 13537 break 13538 } 13539 c := auxIntToInt64(v_1.AuxInt) 13540 x := v_2 13541 if !(c%7 == 0 && isPowerOfTwo64(c/7)) { 13542 break 13543 } 13544 v.reset(OpARM64ADDshiftLL) 13545 v.AuxInt = int64ToAuxInt(log64(c / 7)) 13546 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 13547 v0.AuxInt = int64ToAuxInt(3) 13548 v0.AddArg2(x, x) 13549 v.AddArg2(a, v0) 13550 return true 13551 } 13552 // match: (MSUB a (MOVDconst [c]) x) 13553 // cond: c%9 == 0 && isPowerOfTwo64(c/9) 13554 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)]) 13555 for { 13556 a := v_0 13557 if v_1.Op != OpARM64MOVDconst { 13558 break 13559 } 13560 c := auxIntToInt64(v_1.AuxInt) 13561 x := v_2 13562 if !(c%9 == 0 && isPowerOfTwo64(c/9)) { 13563 break 13564 } 13565 v.reset(OpARM64SUBshiftLL) 13566 v.AuxInt = int64ToAuxInt(log64(c / 9)) 13567 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 13568 v0.AuxInt = int64ToAuxInt(3) 13569 v0.AddArg2(x, x) 13570 v.AddArg2(a, v0) 13571 return true 13572 } 13573 // match: (MSUB (MOVDconst [c]) x y) 13574 // result: (ADDconst [c] (MNEG <x.Type> x y)) 13575 for { 13576 if v_0.Op != OpARM64MOVDconst { 13577 break 13578 } 13579 c := auxIntToInt64(v_0.AuxInt) 13580 x := v_1 13581 y := v_2 13582 v.reset(OpARM64ADDconst) 13583 v.AuxInt = int64ToAuxInt(c) 13584 v0 := b.NewValue0(v.Pos, OpARM64MNEG, x.Type) 13585 v0.AddArg2(x, y) 13586 v.AddArg(v0) 13587 return true 13588 } 13589 // match: (MSUB a (MOVDconst [c]) (MOVDconst [d])) 13590 // result: (SUBconst [c*d] a) 13591 for { 13592 a := v_0 13593 if v_1.Op != OpARM64MOVDconst { 13594 break 13595 } 13596 c := auxIntToInt64(v_1.AuxInt) 13597 if v_2.Op != OpARM64MOVDconst { 13598 break 13599 } 13600 d := auxIntToInt64(v_2.AuxInt) 13601 v.reset(OpARM64SUBconst) 13602 v.AuxInt = int64ToAuxInt(c * d) 13603 v.AddArg(a) 13604 return true 13605 } 13606 return false 13607 } 13608 func rewriteValueARM64_OpARM64MSUBW(v *Value) bool { 13609 v_2 := v.Args[2] 13610 v_1 := v.Args[1] 13611 v_0 := v.Args[0] 13612 b := v.Block 13613 // match: (MSUBW a x (MOVDconst [c])) 13614 // cond: int32(c)==-1 13615 // result: (MOVWUreg (ADD <a.Type> a x)) 13616 for { 13617 a := v_0 13618 x := v_1 13619 if v_2.Op != OpARM64MOVDconst { 13620 break 13621 } 13622 c := auxIntToInt64(v_2.AuxInt) 13623 if !(int32(c) == -1) { 13624 break 13625 } 13626 v.reset(OpARM64MOVWUreg) 13627 v0 := b.NewValue0(v.Pos, OpARM64ADD, a.Type) 13628 v0.AddArg2(a, x) 13629 v.AddArg(v0) 13630 return true 13631 } 13632 // match: (MSUBW a _ (MOVDconst [c])) 13633 // cond: int32(c)==0 13634 // result: (MOVWUreg a) 13635 for { 13636 a := v_0 13637 if v_2.Op != OpARM64MOVDconst { 13638 break 13639 } 13640 c := auxIntToInt64(v_2.AuxInt) 13641 if !(int32(c) == 0) { 13642 break 13643 } 13644 v.reset(OpARM64MOVWUreg) 13645 v.AddArg(a) 13646 return true 13647 } 13648 // match: (MSUBW a x (MOVDconst [c])) 13649 // cond: int32(c)==1 13650 // result: (MOVWUreg (SUB <a.Type> a x)) 13651 for { 13652 a := v_0 13653 x := v_1 13654 if v_2.Op != OpARM64MOVDconst { 13655 break 13656 } 13657 c := auxIntToInt64(v_2.AuxInt) 13658 if !(int32(c) == 1) { 13659 break 13660 } 13661 v.reset(OpARM64MOVWUreg) 13662 v0 := b.NewValue0(v.Pos, OpARM64SUB, a.Type) 13663 v0.AddArg2(a, x) 13664 v.AddArg(v0) 13665 return true 13666 } 13667 // match: (MSUBW a x (MOVDconst [c])) 13668 // cond: isPowerOfTwo64(c) 13669 // result: (MOVWUreg (SUBshiftLL <a.Type> a x [log64(c)])) 13670 for { 13671 a := v_0 13672 x := v_1 13673 if v_2.Op != OpARM64MOVDconst { 13674 break 13675 } 13676 c := auxIntToInt64(v_2.AuxInt) 13677 if !(isPowerOfTwo64(c)) { 13678 break 13679 } 13680 v.reset(OpARM64MOVWUreg) 13681 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type) 13682 v0.AuxInt = int64ToAuxInt(log64(c)) 13683 v0.AddArg2(a, x) 13684 v.AddArg(v0) 13685 return true 13686 } 13687 // match: (MSUBW a x (MOVDconst [c])) 13688 // cond: isPowerOfTwo64(c-1) && int32(c)>=3 13689 // result: (MOVWUreg (SUB <a.Type> a (ADDshiftLL <x.Type> x x [log64(c-1)]))) 13690 for { 13691 a := v_0 13692 x := v_1 13693 if v_2.Op != OpARM64MOVDconst { 13694 break 13695 } 13696 c := auxIntToInt64(v_2.AuxInt) 13697 if !(isPowerOfTwo64(c-1) && int32(c) >= 3) { 13698 break 13699 } 13700 v.reset(OpARM64MOVWUreg) 13701 v0 := b.NewValue0(v.Pos, OpARM64SUB, a.Type) 13702 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 13703 v1.AuxInt = int64ToAuxInt(log64(c - 1)) 13704 v1.AddArg2(x, x) 13705 v0.AddArg2(a, v1) 13706 v.AddArg(v0) 13707 return true 13708 } 13709 // match: (MSUBW a x (MOVDconst [c])) 13710 // cond: isPowerOfTwo64(c+1) && int32(c)>=7 13711 // result: (MOVWUreg (ADD <a.Type> a (SUBshiftLL <x.Type> x x [log64(c+1)]))) 13712 for { 13713 a := v_0 13714 x := v_1 13715 if v_2.Op != OpARM64MOVDconst { 13716 break 13717 } 13718 c := auxIntToInt64(v_2.AuxInt) 13719 if !(isPowerOfTwo64(c+1) && int32(c) >= 7) { 13720 break 13721 } 13722 v.reset(OpARM64MOVWUreg) 13723 v0 := b.NewValue0(v.Pos, OpARM64ADD, a.Type) 13724 v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 13725 v1.AuxInt = int64ToAuxInt(log64(c + 1)) 13726 v1.AddArg2(x, x) 13727 v0.AddArg2(a, v1) 13728 v.AddArg(v0) 13729 return true 13730 } 13731 // match: (MSUBW a x (MOVDconst [c])) 13732 // cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c) 13733 // result: (MOVWUreg (ADDshiftLL <a.Type> a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)])) 13734 for { 13735 a := v_0 13736 x := v_1 13737 if v_2.Op != OpARM64MOVDconst { 13738 break 13739 } 13740 c := auxIntToInt64(v_2.AuxInt) 13741 if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) { 13742 break 13743 } 13744 v.reset(OpARM64MOVWUreg) 13745 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type) 13746 v0.AuxInt = int64ToAuxInt(log64(c / 3)) 13747 v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 13748 v1.AuxInt = int64ToAuxInt(2) 13749 v1.AddArg2(x, x) 13750 v0.AddArg2(a, v1) 13751 v.AddArg(v0) 13752 return true 13753 } 13754 // match: (MSUBW a x (MOVDconst [c])) 13755 // cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c) 13756 // result: (MOVWUreg (SUBshiftLL <a.Type> a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)])) 13757 for { 13758 a := v_0 13759 x := v_1 13760 if v_2.Op != OpARM64MOVDconst { 13761 break 13762 } 13763 c := auxIntToInt64(v_2.AuxInt) 13764 if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) { 13765 break 13766 } 13767 v.reset(OpARM64MOVWUreg) 13768 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type) 13769 v0.AuxInt = int64ToAuxInt(log64(c / 5)) 13770 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 13771 v1.AuxInt = int64ToAuxInt(2) 13772 v1.AddArg2(x, x) 13773 v0.AddArg2(a, v1) 13774 v.AddArg(v0) 13775 return true 13776 } 13777 // match: (MSUBW a x (MOVDconst [c])) 13778 // cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c) 13779 // result: (MOVWUreg (ADDshiftLL <a.Type> a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)])) 13780 for { 13781 a := v_0 13782 x := v_1 13783 if v_2.Op != OpARM64MOVDconst { 13784 break 13785 } 13786 c := auxIntToInt64(v_2.AuxInt) 13787 if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) { 13788 break 13789 } 13790 v.reset(OpARM64MOVWUreg) 13791 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type) 13792 v0.AuxInt = int64ToAuxInt(log64(c / 7)) 13793 v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 13794 v1.AuxInt = int64ToAuxInt(3) 13795 v1.AddArg2(x, x) 13796 v0.AddArg2(a, v1) 13797 v.AddArg(v0) 13798 return true 13799 } 13800 // match: (MSUBW a x (MOVDconst [c])) 13801 // cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c) 13802 // result: (MOVWUreg (SUBshiftLL <a.Type> a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)])) 13803 for { 13804 a := v_0 13805 x := v_1 13806 if v_2.Op != OpARM64MOVDconst { 13807 break 13808 } 13809 c := auxIntToInt64(v_2.AuxInt) 13810 if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) { 13811 break 13812 } 13813 v.reset(OpARM64MOVWUreg) 13814 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type) 13815 v0.AuxInt = int64ToAuxInt(log64(c / 9)) 13816 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 13817 v1.AuxInt = int64ToAuxInt(3) 13818 v1.AddArg2(x, x) 13819 v0.AddArg2(a, v1) 13820 v.AddArg(v0) 13821 return true 13822 } 13823 // match: (MSUBW a (MOVDconst [c]) x) 13824 // cond: int32(c)==-1 13825 // result: (MOVWUreg (ADD <a.Type> a x)) 13826 for { 13827 a := v_0 13828 if v_1.Op != OpARM64MOVDconst { 13829 break 13830 } 13831 c := auxIntToInt64(v_1.AuxInt) 13832 x := v_2 13833 if !(int32(c) == -1) { 13834 break 13835 } 13836 v.reset(OpARM64MOVWUreg) 13837 v0 := b.NewValue0(v.Pos, OpARM64ADD, a.Type) 13838 v0.AddArg2(a, x) 13839 v.AddArg(v0) 13840 return true 13841 } 13842 // match: (MSUBW a (MOVDconst [c]) _) 13843 // cond: int32(c)==0 13844 // result: (MOVWUreg a) 13845 for { 13846 a := v_0 13847 if v_1.Op != OpARM64MOVDconst { 13848 break 13849 } 13850 c := auxIntToInt64(v_1.AuxInt) 13851 if !(int32(c) == 0) { 13852 break 13853 } 13854 v.reset(OpARM64MOVWUreg) 13855 v.AddArg(a) 13856 return true 13857 } 13858 // match: (MSUBW a (MOVDconst [c]) x) 13859 // cond: int32(c)==1 13860 // result: (MOVWUreg (SUB <a.Type> a x)) 13861 for { 13862 a := v_0 13863 if v_1.Op != OpARM64MOVDconst { 13864 break 13865 } 13866 c := auxIntToInt64(v_1.AuxInt) 13867 x := v_2 13868 if !(int32(c) == 1) { 13869 break 13870 } 13871 v.reset(OpARM64MOVWUreg) 13872 v0 := b.NewValue0(v.Pos, OpARM64SUB, a.Type) 13873 v0.AddArg2(a, x) 13874 v.AddArg(v0) 13875 return true 13876 } 13877 // match: (MSUBW a (MOVDconst [c]) x) 13878 // cond: isPowerOfTwo64(c) 13879 // result: (MOVWUreg (SUBshiftLL <a.Type> a x [log64(c)])) 13880 for { 13881 a := v_0 13882 if v_1.Op != OpARM64MOVDconst { 13883 break 13884 } 13885 c := auxIntToInt64(v_1.AuxInt) 13886 x := v_2 13887 if !(isPowerOfTwo64(c)) { 13888 break 13889 } 13890 v.reset(OpARM64MOVWUreg) 13891 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type) 13892 v0.AuxInt = int64ToAuxInt(log64(c)) 13893 v0.AddArg2(a, x) 13894 v.AddArg(v0) 13895 return true 13896 } 13897 // match: (MSUBW a (MOVDconst [c]) x) 13898 // cond: isPowerOfTwo64(c-1) && int32(c)>=3 13899 // result: (MOVWUreg (SUB <a.Type> a (ADDshiftLL <x.Type> x x [log64(c-1)]))) 13900 for { 13901 a := v_0 13902 if v_1.Op != OpARM64MOVDconst { 13903 break 13904 } 13905 c := auxIntToInt64(v_1.AuxInt) 13906 x := v_2 13907 if !(isPowerOfTwo64(c-1) && int32(c) >= 3) { 13908 break 13909 } 13910 v.reset(OpARM64MOVWUreg) 13911 v0 := b.NewValue0(v.Pos, OpARM64SUB, a.Type) 13912 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 13913 v1.AuxInt = int64ToAuxInt(log64(c - 1)) 13914 v1.AddArg2(x, x) 13915 v0.AddArg2(a, v1) 13916 v.AddArg(v0) 13917 return true 13918 } 13919 // match: (MSUBW a (MOVDconst [c]) x) 13920 // cond: isPowerOfTwo64(c+1) && int32(c)>=7 13921 // result: (MOVWUreg (ADD <a.Type> a (SUBshiftLL <x.Type> x x [log64(c+1)]))) 13922 for { 13923 a := v_0 13924 if v_1.Op != OpARM64MOVDconst { 13925 break 13926 } 13927 c := auxIntToInt64(v_1.AuxInt) 13928 x := v_2 13929 if !(isPowerOfTwo64(c+1) && int32(c) >= 7) { 13930 break 13931 } 13932 v.reset(OpARM64MOVWUreg) 13933 v0 := b.NewValue0(v.Pos, OpARM64ADD, a.Type) 13934 v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 13935 v1.AuxInt = int64ToAuxInt(log64(c + 1)) 13936 v1.AddArg2(x, x) 13937 v0.AddArg2(a, v1) 13938 v.AddArg(v0) 13939 return true 13940 } 13941 // match: (MSUBW a (MOVDconst [c]) x) 13942 // cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c) 13943 // result: (MOVWUreg (ADDshiftLL <a.Type> a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)])) 13944 for { 13945 a := v_0 13946 if v_1.Op != OpARM64MOVDconst { 13947 break 13948 } 13949 c := auxIntToInt64(v_1.AuxInt) 13950 x := v_2 13951 if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) { 13952 break 13953 } 13954 v.reset(OpARM64MOVWUreg) 13955 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type) 13956 v0.AuxInt = int64ToAuxInt(log64(c / 3)) 13957 v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 13958 v1.AuxInt = int64ToAuxInt(2) 13959 v1.AddArg2(x, x) 13960 v0.AddArg2(a, v1) 13961 v.AddArg(v0) 13962 return true 13963 } 13964 // match: (MSUBW a (MOVDconst [c]) x) 13965 // cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c) 13966 // result: (MOVWUreg (SUBshiftLL <a.Type> a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)])) 13967 for { 13968 a := v_0 13969 if v_1.Op != OpARM64MOVDconst { 13970 break 13971 } 13972 c := auxIntToInt64(v_1.AuxInt) 13973 x := v_2 13974 if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) { 13975 break 13976 } 13977 v.reset(OpARM64MOVWUreg) 13978 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type) 13979 v0.AuxInt = int64ToAuxInt(log64(c / 5)) 13980 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 13981 v1.AuxInt = int64ToAuxInt(2) 13982 v1.AddArg2(x, x) 13983 v0.AddArg2(a, v1) 13984 v.AddArg(v0) 13985 return true 13986 } 13987 // match: (MSUBW a (MOVDconst [c]) x) 13988 // cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c) 13989 // result: (MOVWUreg (ADDshiftLL <a.Type> a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)])) 13990 for { 13991 a := v_0 13992 if v_1.Op != OpARM64MOVDconst { 13993 break 13994 } 13995 c := auxIntToInt64(v_1.AuxInt) 13996 x := v_2 13997 if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) { 13998 break 13999 } 14000 v.reset(OpARM64MOVWUreg) 14001 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type) 14002 v0.AuxInt = int64ToAuxInt(log64(c / 7)) 14003 v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 14004 v1.AuxInt = int64ToAuxInt(3) 14005 v1.AddArg2(x, x) 14006 v0.AddArg2(a, v1) 14007 v.AddArg(v0) 14008 return true 14009 } 14010 // match: (MSUBW a (MOVDconst [c]) x) 14011 // cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c) 14012 // result: (MOVWUreg (SUBshiftLL <a.Type> a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)])) 14013 for { 14014 a := v_0 14015 if v_1.Op != OpARM64MOVDconst { 14016 break 14017 } 14018 c := auxIntToInt64(v_1.AuxInt) 14019 x := v_2 14020 if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) { 14021 break 14022 } 14023 v.reset(OpARM64MOVWUreg) 14024 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type) 14025 v0.AuxInt = int64ToAuxInt(log64(c / 9)) 14026 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 14027 v1.AuxInt = int64ToAuxInt(3) 14028 v1.AddArg2(x, x) 14029 v0.AddArg2(a, v1) 14030 v.AddArg(v0) 14031 return true 14032 } 14033 // match: (MSUBW (MOVDconst [c]) x y) 14034 // result: (MOVWUreg (ADDconst <x.Type> [c] (MNEGW <x.Type> x y))) 14035 for { 14036 if v_0.Op != OpARM64MOVDconst { 14037 break 14038 } 14039 c := auxIntToInt64(v_0.AuxInt) 14040 x := v_1 14041 y := v_2 14042 v.reset(OpARM64MOVWUreg) 14043 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, x.Type) 14044 v0.AuxInt = int64ToAuxInt(c) 14045 v1 := b.NewValue0(v.Pos, OpARM64MNEGW, x.Type) 14046 v1.AddArg2(x, y) 14047 v0.AddArg(v1) 14048 v.AddArg(v0) 14049 return true 14050 } 14051 // match: (MSUBW a (MOVDconst [c]) (MOVDconst [d])) 14052 // result: (MOVWUreg (SUBconst <a.Type> [c*d] a)) 14053 for { 14054 a := v_0 14055 if v_1.Op != OpARM64MOVDconst { 14056 break 14057 } 14058 c := auxIntToInt64(v_1.AuxInt) 14059 if v_2.Op != OpARM64MOVDconst { 14060 break 14061 } 14062 d := auxIntToInt64(v_2.AuxInt) 14063 v.reset(OpARM64MOVWUreg) 14064 v0 := b.NewValue0(v.Pos, OpARM64SUBconst, a.Type) 14065 v0.AuxInt = int64ToAuxInt(c * d) 14066 v0.AddArg(a) 14067 v.AddArg(v0) 14068 return true 14069 } 14070 return false 14071 } 14072 func rewriteValueARM64_OpARM64MUL(v *Value) bool { 14073 v_1 := v.Args[1] 14074 v_0 := v.Args[0] 14075 b := v.Block 14076 // match: (MUL (NEG x) y) 14077 // result: (MNEG x y) 14078 for { 14079 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14080 if v_0.Op != OpARM64NEG { 14081 continue 14082 } 14083 x := v_0.Args[0] 14084 y := v_1 14085 v.reset(OpARM64MNEG) 14086 v.AddArg2(x, y) 14087 return true 14088 } 14089 break 14090 } 14091 // match: (MUL x (MOVDconst [-1])) 14092 // result: (NEG x) 14093 for { 14094 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14095 x := v_0 14096 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 { 14097 continue 14098 } 14099 v.reset(OpARM64NEG) 14100 v.AddArg(x) 14101 return true 14102 } 14103 break 14104 } 14105 // match: (MUL _ (MOVDconst [0])) 14106 // result: (MOVDconst [0]) 14107 for { 14108 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14109 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 { 14110 continue 14111 } 14112 v.reset(OpARM64MOVDconst) 14113 v.AuxInt = int64ToAuxInt(0) 14114 return true 14115 } 14116 break 14117 } 14118 // match: (MUL x (MOVDconst [1])) 14119 // result: x 14120 for { 14121 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14122 x := v_0 14123 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 { 14124 continue 14125 } 14126 v.copyOf(x) 14127 return true 14128 } 14129 break 14130 } 14131 // match: (MUL x (MOVDconst [c])) 14132 // cond: isPowerOfTwo64(c) 14133 // result: (SLLconst [log64(c)] x) 14134 for { 14135 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14136 x := v_0 14137 if v_1.Op != OpARM64MOVDconst { 14138 continue 14139 } 14140 c := auxIntToInt64(v_1.AuxInt) 14141 if !(isPowerOfTwo64(c)) { 14142 continue 14143 } 14144 v.reset(OpARM64SLLconst) 14145 v.AuxInt = int64ToAuxInt(log64(c)) 14146 v.AddArg(x) 14147 return true 14148 } 14149 break 14150 } 14151 // match: (MUL x (MOVDconst [c])) 14152 // cond: isPowerOfTwo64(c-1) && c >= 3 14153 // result: (ADDshiftLL x x [log64(c-1)]) 14154 for { 14155 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14156 x := v_0 14157 if v_1.Op != OpARM64MOVDconst { 14158 continue 14159 } 14160 c := auxIntToInt64(v_1.AuxInt) 14161 if !(isPowerOfTwo64(c-1) && c >= 3) { 14162 continue 14163 } 14164 v.reset(OpARM64ADDshiftLL) 14165 v.AuxInt = int64ToAuxInt(log64(c - 1)) 14166 v.AddArg2(x, x) 14167 return true 14168 } 14169 break 14170 } 14171 // match: (MUL x (MOVDconst [c])) 14172 // cond: isPowerOfTwo64(c+1) && c >= 7 14173 // result: (ADDshiftLL (NEG <x.Type> x) x [log64(c+1)]) 14174 for { 14175 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14176 x := v_0 14177 if v_1.Op != OpARM64MOVDconst { 14178 continue 14179 } 14180 c := auxIntToInt64(v_1.AuxInt) 14181 if !(isPowerOfTwo64(c+1) && c >= 7) { 14182 continue 14183 } 14184 v.reset(OpARM64ADDshiftLL) 14185 v.AuxInt = int64ToAuxInt(log64(c + 1)) 14186 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 14187 v0.AddArg(x) 14188 v.AddArg2(v0, x) 14189 return true 14190 } 14191 break 14192 } 14193 // match: (MUL x (MOVDconst [c])) 14194 // cond: c%3 == 0 && isPowerOfTwo64(c/3) 14195 // result: (SLLconst [log64(c/3)] (ADDshiftLL <x.Type> x x [1])) 14196 for { 14197 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14198 x := v_0 14199 if v_1.Op != OpARM64MOVDconst { 14200 continue 14201 } 14202 c := auxIntToInt64(v_1.AuxInt) 14203 if !(c%3 == 0 && isPowerOfTwo64(c/3)) { 14204 continue 14205 } 14206 v.reset(OpARM64SLLconst) 14207 v.AuxInt = int64ToAuxInt(log64(c / 3)) 14208 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 14209 v0.AuxInt = int64ToAuxInt(1) 14210 v0.AddArg2(x, x) 14211 v.AddArg(v0) 14212 return true 14213 } 14214 break 14215 } 14216 // match: (MUL x (MOVDconst [c])) 14217 // cond: c%5 == 0 && isPowerOfTwo64(c/5) 14218 // result: (SLLconst [log64(c/5)] (ADDshiftLL <x.Type> x x [2])) 14219 for { 14220 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14221 x := v_0 14222 if v_1.Op != OpARM64MOVDconst { 14223 continue 14224 } 14225 c := auxIntToInt64(v_1.AuxInt) 14226 if !(c%5 == 0 && isPowerOfTwo64(c/5)) { 14227 continue 14228 } 14229 v.reset(OpARM64SLLconst) 14230 v.AuxInt = int64ToAuxInt(log64(c / 5)) 14231 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 14232 v0.AuxInt = int64ToAuxInt(2) 14233 v0.AddArg2(x, x) 14234 v.AddArg(v0) 14235 return true 14236 } 14237 break 14238 } 14239 // match: (MUL x (MOVDconst [c])) 14240 // cond: c%7 == 0 && isPowerOfTwo64(c/7) 14241 // result: (SLLconst [log64(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 14242 for { 14243 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14244 x := v_0 14245 if v_1.Op != OpARM64MOVDconst { 14246 continue 14247 } 14248 c := auxIntToInt64(v_1.AuxInt) 14249 if !(c%7 == 0 && isPowerOfTwo64(c/7)) { 14250 continue 14251 } 14252 v.reset(OpARM64SLLconst) 14253 v.AuxInt = int64ToAuxInt(log64(c / 7)) 14254 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 14255 v0.AuxInt = int64ToAuxInt(3) 14256 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 14257 v1.AddArg(x) 14258 v0.AddArg2(v1, x) 14259 v.AddArg(v0) 14260 return true 14261 } 14262 break 14263 } 14264 // match: (MUL x (MOVDconst [c])) 14265 // cond: c%9 == 0 && isPowerOfTwo64(c/9) 14266 // result: (SLLconst [log64(c/9)] (ADDshiftLL <x.Type> x x [3])) 14267 for { 14268 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14269 x := v_0 14270 if v_1.Op != OpARM64MOVDconst { 14271 continue 14272 } 14273 c := auxIntToInt64(v_1.AuxInt) 14274 if !(c%9 == 0 && isPowerOfTwo64(c/9)) { 14275 continue 14276 } 14277 v.reset(OpARM64SLLconst) 14278 v.AuxInt = int64ToAuxInt(log64(c / 9)) 14279 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 14280 v0.AuxInt = int64ToAuxInt(3) 14281 v0.AddArg2(x, x) 14282 v.AddArg(v0) 14283 return true 14284 } 14285 break 14286 } 14287 // match: (MUL (MOVDconst [c]) (MOVDconst [d])) 14288 // result: (MOVDconst [c*d]) 14289 for { 14290 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14291 if v_0.Op != OpARM64MOVDconst { 14292 continue 14293 } 14294 c := auxIntToInt64(v_0.AuxInt) 14295 if v_1.Op != OpARM64MOVDconst { 14296 continue 14297 } 14298 d := auxIntToInt64(v_1.AuxInt) 14299 v.reset(OpARM64MOVDconst) 14300 v.AuxInt = int64ToAuxInt(c * d) 14301 return true 14302 } 14303 break 14304 } 14305 return false 14306 } 14307 func rewriteValueARM64_OpARM64MULW(v *Value) bool { 14308 v_1 := v.Args[1] 14309 v_0 := v.Args[0] 14310 b := v.Block 14311 // match: (MULW (NEG x) y) 14312 // result: (MNEGW x y) 14313 for { 14314 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14315 if v_0.Op != OpARM64NEG { 14316 continue 14317 } 14318 x := v_0.Args[0] 14319 y := v_1 14320 v.reset(OpARM64MNEGW) 14321 v.AddArg2(x, y) 14322 return true 14323 } 14324 break 14325 } 14326 // match: (MULW x (MOVDconst [c])) 14327 // cond: int32(c)==-1 14328 // result: (MOVWUreg (NEG <x.Type> x)) 14329 for { 14330 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14331 x := v_0 14332 if v_1.Op != OpARM64MOVDconst { 14333 continue 14334 } 14335 c := auxIntToInt64(v_1.AuxInt) 14336 if !(int32(c) == -1) { 14337 continue 14338 } 14339 v.reset(OpARM64MOVWUreg) 14340 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 14341 v0.AddArg(x) 14342 v.AddArg(v0) 14343 return true 14344 } 14345 break 14346 } 14347 // match: (MULW _ (MOVDconst [c])) 14348 // cond: int32(c)==0 14349 // result: (MOVDconst [0]) 14350 for { 14351 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14352 if v_1.Op != OpARM64MOVDconst { 14353 continue 14354 } 14355 c := auxIntToInt64(v_1.AuxInt) 14356 if !(int32(c) == 0) { 14357 continue 14358 } 14359 v.reset(OpARM64MOVDconst) 14360 v.AuxInt = int64ToAuxInt(0) 14361 return true 14362 } 14363 break 14364 } 14365 // match: (MULW x (MOVDconst [c])) 14366 // cond: int32(c)==1 14367 // result: (MOVWUreg x) 14368 for { 14369 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14370 x := v_0 14371 if v_1.Op != OpARM64MOVDconst { 14372 continue 14373 } 14374 c := auxIntToInt64(v_1.AuxInt) 14375 if !(int32(c) == 1) { 14376 continue 14377 } 14378 v.reset(OpARM64MOVWUreg) 14379 v.AddArg(x) 14380 return true 14381 } 14382 break 14383 } 14384 // match: (MULW x (MOVDconst [c])) 14385 // cond: isPowerOfTwo64(c) 14386 // result: (MOVWUreg (SLLconst <x.Type> [log64(c)] x)) 14387 for { 14388 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14389 x := v_0 14390 if v_1.Op != OpARM64MOVDconst { 14391 continue 14392 } 14393 c := auxIntToInt64(v_1.AuxInt) 14394 if !(isPowerOfTwo64(c)) { 14395 continue 14396 } 14397 v.reset(OpARM64MOVWUreg) 14398 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 14399 v0.AuxInt = int64ToAuxInt(log64(c)) 14400 v0.AddArg(x) 14401 v.AddArg(v0) 14402 return true 14403 } 14404 break 14405 } 14406 // match: (MULW x (MOVDconst [c])) 14407 // cond: isPowerOfTwo64(c-1) && int32(c) >= 3 14408 // result: (MOVWUreg (ADDshiftLL <x.Type> x x [log64(c-1)])) 14409 for { 14410 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14411 x := v_0 14412 if v_1.Op != OpARM64MOVDconst { 14413 continue 14414 } 14415 c := auxIntToInt64(v_1.AuxInt) 14416 if !(isPowerOfTwo64(c-1) && int32(c) >= 3) { 14417 continue 14418 } 14419 v.reset(OpARM64MOVWUreg) 14420 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 14421 v0.AuxInt = int64ToAuxInt(log64(c - 1)) 14422 v0.AddArg2(x, x) 14423 v.AddArg(v0) 14424 return true 14425 } 14426 break 14427 } 14428 // match: (MULW x (MOVDconst [c])) 14429 // cond: isPowerOfTwo64(c+1) && int32(c) >= 7 14430 // result: (MOVWUreg (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log64(c+1)])) 14431 for { 14432 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14433 x := v_0 14434 if v_1.Op != OpARM64MOVDconst { 14435 continue 14436 } 14437 c := auxIntToInt64(v_1.AuxInt) 14438 if !(isPowerOfTwo64(c+1) && int32(c) >= 7) { 14439 continue 14440 } 14441 v.reset(OpARM64MOVWUreg) 14442 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 14443 v0.AuxInt = int64ToAuxInt(log64(c + 1)) 14444 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 14445 v1.AddArg(x) 14446 v0.AddArg2(v1, x) 14447 v.AddArg(v0) 14448 return true 14449 } 14450 break 14451 } 14452 // match: (MULW x (MOVDconst [c])) 14453 // cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c) 14454 // result: (MOVWUreg (SLLconst <x.Type> [log64(c/3)] (ADDshiftLL <x.Type> x x [1]))) 14455 for { 14456 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14457 x := v_0 14458 if v_1.Op != OpARM64MOVDconst { 14459 continue 14460 } 14461 c := auxIntToInt64(v_1.AuxInt) 14462 if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) { 14463 continue 14464 } 14465 v.reset(OpARM64MOVWUreg) 14466 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 14467 v0.AuxInt = int64ToAuxInt(log64(c / 3)) 14468 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 14469 v1.AuxInt = int64ToAuxInt(1) 14470 v1.AddArg2(x, x) 14471 v0.AddArg(v1) 14472 v.AddArg(v0) 14473 return true 14474 } 14475 break 14476 } 14477 // match: (MULW x (MOVDconst [c])) 14478 // cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c) 14479 // result: (MOVWUreg (SLLconst <x.Type> [log64(c/5)] (ADDshiftLL <x.Type> x x [2]))) 14480 for { 14481 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14482 x := v_0 14483 if v_1.Op != OpARM64MOVDconst { 14484 continue 14485 } 14486 c := auxIntToInt64(v_1.AuxInt) 14487 if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) { 14488 continue 14489 } 14490 v.reset(OpARM64MOVWUreg) 14491 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 14492 v0.AuxInt = int64ToAuxInt(log64(c / 5)) 14493 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 14494 v1.AuxInt = int64ToAuxInt(2) 14495 v1.AddArg2(x, x) 14496 v0.AddArg(v1) 14497 v.AddArg(v0) 14498 return true 14499 } 14500 break 14501 } 14502 // match: (MULW x (MOVDconst [c])) 14503 // cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c) 14504 // result: (MOVWUreg (SLLconst <x.Type> [log64(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3]))) 14505 for { 14506 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14507 x := v_0 14508 if v_1.Op != OpARM64MOVDconst { 14509 continue 14510 } 14511 c := auxIntToInt64(v_1.AuxInt) 14512 if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) { 14513 continue 14514 } 14515 v.reset(OpARM64MOVWUreg) 14516 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 14517 v0.AuxInt = int64ToAuxInt(log64(c / 7)) 14518 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 14519 v1.AuxInt = int64ToAuxInt(3) 14520 v2 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 14521 v2.AddArg(x) 14522 v1.AddArg2(v2, x) 14523 v0.AddArg(v1) 14524 v.AddArg(v0) 14525 return true 14526 } 14527 break 14528 } 14529 // match: (MULW x (MOVDconst [c])) 14530 // cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c) 14531 // result: (MOVWUreg (SLLconst <x.Type> [log64(c/9)] (ADDshiftLL <x.Type> x x [3]))) 14532 for { 14533 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14534 x := v_0 14535 if v_1.Op != OpARM64MOVDconst { 14536 continue 14537 } 14538 c := auxIntToInt64(v_1.AuxInt) 14539 if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) { 14540 continue 14541 } 14542 v.reset(OpARM64MOVWUreg) 14543 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 14544 v0.AuxInt = int64ToAuxInt(log64(c / 9)) 14545 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 14546 v1.AuxInt = int64ToAuxInt(3) 14547 v1.AddArg2(x, x) 14548 v0.AddArg(v1) 14549 v.AddArg(v0) 14550 return true 14551 } 14552 break 14553 } 14554 // match: (MULW (MOVDconst [c]) (MOVDconst [d])) 14555 // result: (MOVDconst [int64(uint32(c*d))]) 14556 for { 14557 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 14558 if v_0.Op != OpARM64MOVDconst { 14559 continue 14560 } 14561 c := auxIntToInt64(v_0.AuxInt) 14562 if v_1.Op != OpARM64MOVDconst { 14563 continue 14564 } 14565 d := auxIntToInt64(v_1.AuxInt) 14566 v.reset(OpARM64MOVDconst) 14567 v.AuxInt = int64ToAuxInt(int64(uint32(c * d))) 14568 return true 14569 } 14570 break 14571 } 14572 return false 14573 } 14574 func rewriteValueARM64_OpARM64MVN(v *Value) bool { 14575 v_0 := v.Args[0] 14576 // match: (MVN (XOR x y)) 14577 // result: (EON x y) 14578 for { 14579 if v_0.Op != OpARM64XOR { 14580 break 14581 } 14582 y := v_0.Args[1] 14583 x := v_0.Args[0] 14584 v.reset(OpARM64EON) 14585 v.AddArg2(x, y) 14586 return true 14587 } 14588 // match: (MVN (MOVDconst [c])) 14589 // result: (MOVDconst [^c]) 14590 for { 14591 if v_0.Op != OpARM64MOVDconst { 14592 break 14593 } 14594 c := auxIntToInt64(v_0.AuxInt) 14595 v.reset(OpARM64MOVDconst) 14596 v.AuxInt = int64ToAuxInt(^c) 14597 return true 14598 } 14599 // match: (MVN x:(SLLconst [c] y)) 14600 // cond: clobberIfDead(x) 14601 // result: (MVNshiftLL [c] y) 14602 for { 14603 x := v_0 14604 if x.Op != OpARM64SLLconst { 14605 break 14606 } 14607 c := auxIntToInt64(x.AuxInt) 14608 y := x.Args[0] 14609 if !(clobberIfDead(x)) { 14610 break 14611 } 14612 v.reset(OpARM64MVNshiftLL) 14613 v.AuxInt = int64ToAuxInt(c) 14614 v.AddArg(y) 14615 return true 14616 } 14617 // match: (MVN x:(SRLconst [c] y)) 14618 // cond: clobberIfDead(x) 14619 // result: (MVNshiftRL [c] y) 14620 for { 14621 x := v_0 14622 if x.Op != OpARM64SRLconst { 14623 break 14624 } 14625 c := auxIntToInt64(x.AuxInt) 14626 y := x.Args[0] 14627 if !(clobberIfDead(x)) { 14628 break 14629 } 14630 v.reset(OpARM64MVNshiftRL) 14631 v.AuxInt = int64ToAuxInt(c) 14632 v.AddArg(y) 14633 return true 14634 } 14635 // match: (MVN x:(SRAconst [c] y)) 14636 // cond: clobberIfDead(x) 14637 // result: (MVNshiftRA [c] y) 14638 for { 14639 x := v_0 14640 if x.Op != OpARM64SRAconst { 14641 break 14642 } 14643 c := auxIntToInt64(x.AuxInt) 14644 y := x.Args[0] 14645 if !(clobberIfDead(x)) { 14646 break 14647 } 14648 v.reset(OpARM64MVNshiftRA) 14649 v.AuxInt = int64ToAuxInt(c) 14650 v.AddArg(y) 14651 return true 14652 } 14653 // match: (MVN x:(RORconst [c] y)) 14654 // cond: clobberIfDead(x) 14655 // result: (MVNshiftRO [c] y) 14656 for { 14657 x := v_0 14658 if x.Op != OpARM64RORconst { 14659 break 14660 } 14661 c := auxIntToInt64(x.AuxInt) 14662 y := x.Args[0] 14663 if !(clobberIfDead(x)) { 14664 break 14665 } 14666 v.reset(OpARM64MVNshiftRO) 14667 v.AuxInt = int64ToAuxInt(c) 14668 v.AddArg(y) 14669 return true 14670 } 14671 return false 14672 } 14673 func rewriteValueARM64_OpARM64MVNshiftLL(v *Value) bool { 14674 v_0 := v.Args[0] 14675 // match: (MVNshiftLL (MOVDconst [c]) [d]) 14676 // result: (MOVDconst [^int64(uint64(c)<<uint64(d))]) 14677 for { 14678 d := auxIntToInt64(v.AuxInt) 14679 if v_0.Op != OpARM64MOVDconst { 14680 break 14681 } 14682 c := auxIntToInt64(v_0.AuxInt) 14683 v.reset(OpARM64MOVDconst) 14684 v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d))) 14685 return true 14686 } 14687 return false 14688 } 14689 func rewriteValueARM64_OpARM64MVNshiftRA(v *Value) bool { 14690 v_0 := v.Args[0] 14691 // match: (MVNshiftRA (MOVDconst [c]) [d]) 14692 // result: (MOVDconst [^(c>>uint64(d))]) 14693 for { 14694 d := auxIntToInt64(v.AuxInt) 14695 if v_0.Op != OpARM64MOVDconst { 14696 break 14697 } 14698 c := auxIntToInt64(v_0.AuxInt) 14699 v.reset(OpARM64MOVDconst) 14700 v.AuxInt = int64ToAuxInt(^(c >> uint64(d))) 14701 return true 14702 } 14703 return false 14704 } 14705 func rewriteValueARM64_OpARM64MVNshiftRL(v *Value) bool { 14706 v_0 := v.Args[0] 14707 // match: (MVNshiftRL (MOVDconst [c]) [d]) 14708 // result: (MOVDconst [^int64(uint64(c)>>uint64(d))]) 14709 for { 14710 d := auxIntToInt64(v.AuxInt) 14711 if v_0.Op != OpARM64MOVDconst { 14712 break 14713 } 14714 c := auxIntToInt64(v_0.AuxInt) 14715 v.reset(OpARM64MOVDconst) 14716 v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d))) 14717 return true 14718 } 14719 return false 14720 } 14721 func rewriteValueARM64_OpARM64MVNshiftRO(v *Value) bool { 14722 v_0 := v.Args[0] 14723 // match: (MVNshiftRO (MOVDconst [c]) [d]) 14724 // result: (MOVDconst [^rotateRight64(c, d)]) 14725 for { 14726 d := auxIntToInt64(v.AuxInt) 14727 if v_0.Op != OpARM64MOVDconst { 14728 break 14729 } 14730 c := auxIntToInt64(v_0.AuxInt) 14731 v.reset(OpARM64MOVDconst) 14732 v.AuxInt = int64ToAuxInt(^rotateRight64(c, d)) 14733 return true 14734 } 14735 return false 14736 } 14737 func rewriteValueARM64_OpARM64NEG(v *Value) bool { 14738 v_0 := v.Args[0] 14739 // match: (NEG (MUL x y)) 14740 // result: (MNEG x y) 14741 for { 14742 if v_0.Op != OpARM64MUL { 14743 break 14744 } 14745 y := v_0.Args[1] 14746 x := v_0.Args[0] 14747 v.reset(OpARM64MNEG) 14748 v.AddArg2(x, y) 14749 return true 14750 } 14751 // match: (NEG (MULW x y)) 14752 // cond: v.Type.Size() <= 4 14753 // result: (MNEGW x y) 14754 for { 14755 if v_0.Op != OpARM64MULW { 14756 break 14757 } 14758 y := v_0.Args[1] 14759 x := v_0.Args[0] 14760 if !(v.Type.Size() <= 4) { 14761 break 14762 } 14763 v.reset(OpARM64MNEGW) 14764 v.AddArg2(x, y) 14765 return true 14766 } 14767 // match: (NEG (NEG x)) 14768 // result: x 14769 for { 14770 if v_0.Op != OpARM64NEG { 14771 break 14772 } 14773 x := v_0.Args[0] 14774 v.copyOf(x) 14775 return true 14776 } 14777 // match: (NEG (MOVDconst [c])) 14778 // result: (MOVDconst [-c]) 14779 for { 14780 if v_0.Op != OpARM64MOVDconst { 14781 break 14782 } 14783 c := auxIntToInt64(v_0.AuxInt) 14784 v.reset(OpARM64MOVDconst) 14785 v.AuxInt = int64ToAuxInt(-c) 14786 return true 14787 } 14788 // match: (NEG x:(SLLconst [c] y)) 14789 // cond: clobberIfDead(x) 14790 // result: (NEGshiftLL [c] y) 14791 for { 14792 x := v_0 14793 if x.Op != OpARM64SLLconst { 14794 break 14795 } 14796 c := auxIntToInt64(x.AuxInt) 14797 y := x.Args[0] 14798 if !(clobberIfDead(x)) { 14799 break 14800 } 14801 v.reset(OpARM64NEGshiftLL) 14802 v.AuxInt = int64ToAuxInt(c) 14803 v.AddArg(y) 14804 return true 14805 } 14806 // match: (NEG x:(SRLconst [c] y)) 14807 // cond: clobberIfDead(x) 14808 // result: (NEGshiftRL [c] y) 14809 for { 14810 x := v_0 14811 if x.Op != OpARM64SRLconst { 14812 break 14813 } 14814 c := auxIntToInt64(x.AuxInt) 14815 y := x.Args[0] 14816 if !(clobberIfDead(x)) { 14817 break 14818 } 14819 v.reset(OpARM64NEGshiftRL) 14820 v.AuxInt = int64ToAuxInt(c) 14821 v.AddArg(y) 14822 return true 14823 } 14824 // match: (NEG x:(SRAconst [c] y)) 14825 // cond: clobberIfDead(x) 14826 // result: (NEGshiftRA [c] y) 14827 for { 14828 x := v_0 14829 if x.Op != OpARM64SRAconst { 14830 break 14831 } 14832 c := auxIntToInt64(x.AuxInt) 14833 y := x.Args[0] 14834 if !(clobberIfDead(x)) { 14835 break 14836 } 14837 v.reset(OpARM64NEGshiftRA) 14838 v.AuxInt = int64ToAuxInt(c) 14839 v.AddArg(y) 14840 return true 14841 } 14842 return false 14843 } 14844 func rewriteValueARM64_OpARM64NEGshiftLL(v *Value) bool { 14845 v_0 := v.Args[0] 14846 // match: (NEGshiftLL (MOVDconst [c]) [d]) 14847 // result: (MOVDconst [-int64(uint64(c)<<uint64(d))]) 14848 for { 14849 d := auxIntToInt64(v.AuxInt) 14850 if v_0.Op != OpARM64MOVDconst { 14851 break 14852 } 14853 c := auxIntToInt64(v_0.AuxInt) 14854 v.reset(OpARM64MOVDconst) 14855 v.AuxInt = int64ToAuxInt(-int64(uint64(c) << uint64(d))) 14856 return true 14857 } 14858 return false 14859 } 14860 func rewriteValueARM64_OpARM64NEGshiftRA(v *Value) bool { 14861 v_0 := v.Args[0] 14862 // match: (NEGshiftRA (MOVDconst [c]) [d]) 14863 // result: (MOVDconst [-(c>>uint64(d))]) 14864 for { 14865 d := auxIntToInt64(v.AuxInt) 14866 if v_0.Op != OpARM64MOVDconst { 14867 break 14868 } 14869 c := auxIntToInt64(v_0.AuxInt) 14870 v.reset(OpARM64MOVDconst) 14871 v.AuxInt = int64ToAuxInt(-(c >> uint64(d))) 14872 return true 14873 } 14874 return false 14875 } 14876 func rewriteValueARM64_OpARM64NEGshiftRL(v *Value) bool { 14877 v_0 := v.Args[0] 14878 // match: (NEGshiftRL (MOVDconst [c]) [d]) 14879 // result: (MOVDconst [-int64(uint64(c)>>uint64(d))]) 14880 for { 14881 d := auxIntToInt64(v.AuxInt) 14882 if v_0.Op != OpARM64MOVDconst { 14883 break 14884 } 14885 c := auxIntToInt64(v_0.AuxInt) 14886 v.reset(OpARM64MOVDconst) 14887 v.AuxInt = int64ToAuxInt(-int64(uint64(c) >> uint64(d))) 14888 return true 14889 } 14890 return false 14891 } 14892 func rewriteValueARM64_OpARM64NotEqual(v *Value) bool { 14893 v_0 := v.Args[0] 14894 b := v.Block 14895 // match: (NotEqual (CMPconst [0] z:(AND x y))) 14896 // cond: z.Uses == 1 14897 // result: (NotEqual (TST x y)) 14898 for { 14899 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 14900 break 14901 } 14902 z := v_0.Args[0] 14903 if z.Op != OpARM64AND { 14904 break 14905 } 14906 y := z.Args[1] 14907 x := z.Args[0] 14908 if !(z.Uses == 1) { 14909 break 14910 } 14911 v.reset(OpARM64NotEqual) 14912 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 14913 v0.AddArg2(x, y) 14914 v.AddArg(v0) 14915 return true 14916 } 14917 // match: (NotEqual (CMPWconst [0] x:(ANDconst [c] y))) 14918 // cond: x.Uses == 1 14919 // result: (NotEqual (TSTWconst [int32(c)] y)) 14920 for { 14921 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 14922 break 14923 } 14924 x := v_0.Args[0] 14925 if x.Op != OpARM64ANDconst { 14926 break 14927 } 14928 c := auxIntToInt64(x.AuxInt) 14929 y := x.Args[0] 14930 if !(x.Uses == 1) { 14931 break 14932 } 14933 v.reset(OpARM64NotEqual) 14934 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 14935 v0.AuxInt = int32ToAuxInt(int32(c)) 14936 v0.AddArg(y) 14937 v.AddArg(v0) 14938 return true 14939 } 14940 // match: (NotEqual (CMPWconst [0] z:(AND x y))) 14941 // cond: z.Uses == 1 14942 // result: (NotEqual (TSTW x y)) 14943 for { 14944 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 14945 break 14946 } 14947 z := v_0.Args[0] 14948 if z.Op != OpARM64AND { 14949 break 14950 } 14951 y := z.Args[1] 14952 x := z.Args[0] 14953 if !(z.Uses == 1) { 14954 break 14955 } 14956 v.reset(OpARM64NotEqual) 14957 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 14958 v0.AddArg2(x, y) 14959 v.AddArg(v0) 14960 return true 14961 } 14962 // match: (NotEqual (CMPconst [0] x:(ANDconst [c] y))) 14963 // cond: x.Uses == 1 14964 // result: (NotEqual (TSTconst [c] y)) 14965 for { 14966 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 14967 break 14968 } 14969 x := v_0.Args[0] 14970 if x.Op != OpARM64ANDconst { 14971 break 14972 } 14973 c := auxIntToInt64(x.AuxInt) 14974 y := x.Args[0] 14975 if !(x.Uses == 1) { 14976 break 14977 } 14978 v.reset(OpARM64NotEqual) 14979 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 14980 v0.AuxInt = int64ToAuxInt(c) 14981 v0.AddArg(y) 14982 v.AddArg(v0) 14983 return true 14984 } 14985 // match: (NotEqual (CMP x z:(NEG y))) 14986 // cond: z.Uses == 1 14987 // result: (NotEqual (CMN x y)) 14988 for { 14989 if v_0.Op != OpARM64CMP { 14990 break 14991 } 14992 _ = v_0.Args[1] 14993 x := v_0.Args[0] 14994 z := v_0.Args[1] 14995 if z.Op != OpARM64NEG { 14996 break 14997 } 14998 y := z.Args[0] 14999 if !(z.Uses == 1) { 15000 break 15001 } 15002 v.reset(OpARM64NotEqual) 15003 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 15004 v0.AddArg2(x, y) 15005 v.AddArg(v0) 15006 return true 15007 } 15008 // match: (NotEqual (CMPW x z:(NEG y))) 15009 // cond: z.Uses == 1 15010 // result: (NotEqual (CMNW x y)) 15011 for { 15012 if v_0.Op != OpARM64CMPW { 15013 break 15014 } 15015 _ = v_0.Args[1] 15016 x := v_0.Args[0] 15017 z := v_0.Args[1] 15018 if z.Op != OpARM64NEG { 15019 break 15020 } 15021 y := z.Args[0] 15022 if !(z.Uses == 1) { 15023 break 15024 } 15025 v.reset(OpARM64NotEqual) 15026 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 15027 v0.AddArg2(x, y) 15028 v.AddArg(v0) 15029 return true 15030 } 15031 // match: (NotEqual (CMPconst [0] x:(ADDconst [c] y))) 15032 // cond: x.Uses == 1 15033 // result: (NotEqual (CMNconst [c] y)) 15034 for { 15035 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 15036 break 15037 } 15038 x := v_0.Args[0] 15039 if x.Op != OpARM64ADDconst { 15040 break 15041 } 15042 c := auxIntToInt64(x.AuxInt) 15043 y := x.Args[0] 15044 if !(x.Uses == 1) { 15045 break 15046 } 15047 v.reset(OpARM64NotEqual) 15048 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags) 15049 v0.AuxInt = int64ToAuxInt(c) 15050 v0.AddArg(y) 15051 v.AddArg(v0) 15052 return true 15053 } 15054 // match: (NotEqual (CMPWconst [0] x:(ADDconst [c] y))) 15055 // cond: x.Uses == 1 15056 // result: (NotEqual (CMNWconst [int32(c)] y)) 15057 for { 15058 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 15059 break 15060 } 15061 x := v_0.Args[0] 15062 if x.Op != OpARM64ADDconst { 15063 break 15064 } 15065 c := auxIntToInt64(x.AuxInt) 15066 y := x.Args[0] 15067 if !(x.Uses == 1) { 15068 break 15069 } 15070 v.reset(OpARM64NotEqual) 15071 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags) 15072 v0.AuxInt = int32ToAuxInt(int32(c)) 15073 v0.AddArg(y) 15074 v.AddArg(v0) 15075 return true 15076 } 15077 // match: (NotEqual (CMPconst [0] z:(ADD x y))) 15078 // cond: z.Uses == 1 15079 // result: (NotEqual (CMN x y)) 15080 for { 15081 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 15082 break 15083 } 15084 z := v_0.Args[0] 15085 if z.Op != OpARM64ADD { 15086 break 15087 } 15088 y := z.Args[1] 15089 x := z.Args[0] 15090 if !(z.Uses == 1) { 15091 break 15092 } 15093 v.reset(OpARM64NotEqual) 15094 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 15095 v0.AddArg2(x, y) 15096 v.AddArg(v0) 15097 return true 15098 } 15099 // match: (NotEqual (CMPWconst [0] z:(ADD x y))) 15100 // cond: z.Uses == 1 15101 // result: (NotEqual (CMNW x y)) 15102 for { 15103 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 15104 break 15105 } 15106 z := v_0.Args[0] 15107 if z.Op != OpARM64ADD { 15108 break 15109 } 15110 y := z.Args[1] 15111 x := z.Args[0] 15112 if !(z.Uses == 1) { 15113 break 15114 } 15115 v.reset(OpARM64NotEqual) 15116 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 15117 v0.AddArg2(x, y) 15118 v.AddArg(v0) 15119 return true 15120 } 15121 // match: (NotEqual (CMPconst [0] z:(MADD a x y))) 15122 // cond: z.Uses == 1 15123 // result: (NotEqual (CMN a (MUL <x.Type> x y))) 15124 for { 15125 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 15126 break 15127 } 15128 z := v_0.Args[0] 15129 if z.Op != OpARM64MADD { 15130 break 15131 } 15132 y := z.Args[2] 15133 a := z.Args[0] 15134 x := z.Args[1] 15135 if !(z.Uses == 1) { 15136 break 15137 } 15138 v.reset(OpARM64NotEqual) 15139 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 15140 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 15141 v1.AddArg2(x, y) 15142 v0.AddArg2(a, v1) 15143 v.AddArg(v0) 15144 return true 15145 } 15146 // match: (NotEqual (CMPconst [0] z:(MSUB a x y))) 15147 // cond: z.Uses == 1 15148 // result: (NotEqual (CMP a (MUL <x.Type> x y))) 15149 for { 15150 if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { 15151 break 15152 } 15153 z := v_0.Args[0] 15154 if z.Op != OpARM64MSUB { 15155 break 15156 } 15157 y := z.Args[2] 15158 a := z.Args[0] 15159 x := z.Args[1] 15160 if !(z.Uses == 1) { 15161 break 15162 } 15163 v.reset(OpARM64NotEqual) 15164 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 15165 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 15166 v1.AddArg2(x, y) 15167 v0.AddArg2(a, v1) 15168 v.AddArg(v0) 15169 return true 15170 } 15171 // match: (NotEqual (CMPWconst [0] z:(MADDW a x y))) 15172 // cond: z.Uses == 1 15173 // result: (NotEqual (CMNW a (MULW <x.Type> x y))) 15174 for { 15175 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 15176 break 15177 } 15178 z := v_0.Args[0] 15179 if z.Op != OpARM64MADDW { 15180 break 15181 } 15182 y := z.Args[2] 15183 a := z.Args[0] 15184 x := z.Args[1] 15185 if !(z.Uses == 1) { 15186 break 15187 } 15188 v.reset(OpARM64NotEqual) 15189 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 15190 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 15191 v1.AddArg2(x, y) 15192 v0.AddArg2(a, v1) 15193 v.AddArg(v0) 15194 return true 15195 } 15196 // match: (NotEqual (CMPWconst [0] z:(MSUBW a x y))) 15197 // cond: z.Uses == 1 15198 // result: (NotEqual (CMPW a (MULW <x.Type> x y))) 15199 for { 15200 if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 { 15201 break 15202 } 15203 z := v_0.Args[0] 15204 if z.Op != OpARM64MSUBW { 15205 break 15206 } 15207 y := z.Args[2] 15208 a := z.Args[0] 15209 x := z.Args[1] 15210 if !(z.Uses == 1) { 15211 break 15212 } 15213 v.reset(OpARM64NotEqual) 15214 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 15215 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 15216 v1.AddArg2(x, y) 15217 v0.AddArg2(a, v1) 15218 v.AddArg(v0) 15219 return true 15220 } 15221 // match: (NotEqual (FlagConstant [fc])) 15222 // result: (MOVDconst [b2i(fc.ne())]) 15223 for { 15224 if v_0.Op != OpARM64FlagConstant { 15225 break 15226 } 15227 fc := auxIntToFlagConstant(v_0.AuxInt) 15228 v.reset(OpARM64MOVDconst) 15229 v.AuxInt = int64ToAuxInt(b2i(fc.ne())) 15230 return true 15231 } 15232 // match: (NotEqual (InvertFlags x)) 15233 // result: (NotEqual x) 15234 for { 15235 if v_0.Op != OpARM64InvertFlags { 15236 break 15237 } 15238 x := v_0.Args[0] 15239 v.reset(OpARM64NotEqual) 15240 v.AddArg(x) 15241 return true 15242 } 15243 return false 15244 } 15245 func rewriteValueARM64_OpARM64OR(v *Value) bool { 15246 v_1 := v.Args[1] 15247 v_0 := v.Args[0] 15248 // match: (OR x (MOVDconst [c])) 15249 // result: (ORconst [c] x) 15250 for { 15251 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 15252 x := v_0 15253 if v_1.Op != OpARM64MOVDconst { 15254 continue 15255 } 15256 c := auxIntToInt64(v_1.AuxInt) 15257 v.reset(OpARM64ORconst) 15258 v.AuxInt = int64ToAuxInt(c) 15259 v.AddArg(x) 15260 return true 15261 } 15262 break 15263 } 15264 // match: (OR x x) 15265 // result: x 15266 for { 15267 x := v_0 15268 if x != v_1 { 15269 break 15270 } 15271 v.copyOf(x) 15272 return true 15273 } 15274 // match: (OR x (MVN y)) 15275 // result: (ORN x y) 15276 for { 15277 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 15278 x := v_0 15279 if v_1.Op != OpARM64MVN { 15280 continue 15281 } 15282 y := v_1.Args[0] 15283 v.reset(OpARM64ORN) 15284 v.AddArg2(x, y) 15285 return true 15286 } 15287 break 15288 } 15289 // match: (OR x0 x1:(SLLconst [c] y)) 15290 // cond: clobberIfDead(x1) 15291 // result: (ORshiftLL x0 y [c]) 15292 for { 15293 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 15294 x0 := v_0 15295 x1 := v_1 15296 if x1.Op != OpARM64SLLconst { 15297 continue 15298 } 15299 c := auxIntToInt64(x1.AuxInt) 15300 y := x1.Args[0] 15301 if !(clobberIfDead(x1)) { 15302 continue 15303 } 15304 v.reset(OpARM64ORshiftLL) 15305 v.AuxInt = int64ToAuxInt(c) 15306 v.AddArg2(x0, y) 15307 return true 15308 } 15309 break 15310 } 15311 // match: (OR x0 x1:(SRLconst [c] y)) 15312 // cond: clobberIfDead(x1) 15313 // result: (ORshiftRL x0 y [c]) 15314 for { 15315 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 15316 x0 := v_0 15317 x1 := v_1 15318 if x1.Op != OpARM64SRLconst { 15319 continue 15320 } 15321 c := auxIntToInt64(x1.AuxInt) 15322 y := x1.Args[0] 15323 if !(clobberIfDead(x1)) { 15324 continue 15325 } 15326 v.reset(OpARM64ORshiftRL) 15327 v.AuxInt = int64ToAuxInt(c) 15328 v.AddArg2(x0, y) 15329 return true 15330 } 15331 break 15332 } 15333 // match: (OR x0 x1:(SRAconst [c] y)) 15334 // cond: clobberIfDead(x1) 15335 // result: (ORshiftRA x0 y [c]) 15336 for { 15337 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 15338 x0 := v_0 15339 x1 := v_1 15340 if x1.Op != OpARM64SRAconst { 15341 continue 15342 } 15343 c := auxIntToInt64(x1.AuxInt) 15344 y := x1.Args[0] 15345 if !(clobberIfDead(x1)) { 15346 continue 15347 } 15348 v.reset(OpARM64ORshiftRA) 15349 v.AuxInt = int64ToAuxInt(c) 15350 v.AddArg2(x0, y) 15351 return true 15352 } 15353 break 15354 } 15355 // match: (OR x0 x1:(RORconst [c] y)) 15356 // cond: clobberIfDead(x1) 15357 // result: (ORshiftRO x0 y [c]) 15358 for { 15359 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 15360 x0 := v_0 15361 x1 := v_1 15362 if x1.Op != OpARM64RORconst { 15363 continue 15364 } 15365 c := auxIntToInt64(x1.AuxInt) 15366 y := x1.Args[0] 15367 if !(clobberIfDead(x1)) { 15368 continue 15369 } 15370 v.reset(OpARM64ORshiftRO) 15371 v.AuxInt = int64ToAuxInt(c) 15372 v.AddArg2(x0, y) 15373 return true 15374 } 15375 break 15376 } 15377 // match: (OR (UBFIZ [bfc] x) (ANDconst [ac] y)) 15378 // cond: ac == ^((1<<uint(bfc.getARM64BFwidth())-1) << uint(bfc.getARM64BFlsb())) 15379 // result: (BFI [bfc] y x) 15380 for { 15381 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 15382 if v_0.Op != OpARM64UBFIZ { 15383 continue 15384 } 15385 bfc := auxIntToArm64BitField(v_0.AuxInt) 15386 x := v_0.Args[0] 15387 if v_1.Op != OpARM64ANDconst { 15388 continue 15389 } 15390 ac := auxIntToInt64(v_1.AuxInt) 15391 y := v_1.Args[0] 15392 if !(ac == ^((1<<uint(bfc.getARM64BFwidth()) - 1) << uint(bfc.getARM64BFlsb()))) { 15393 continue 15394 } 15395 v.reset(OpARM64BFI) 15396 v.AuxInt = arm64BitFieldToAuxInt(bfc) 15397 v.AddArg2(y, x) 15398 return true 15399 } 15400 break 15401 } 15402 // match: (OR (UBFX [bfc] x) (ANDconst [ac] y)) 15403 // cond: ac == ^(1<<uint(bfc.getARM64BFwidth())-1) 15404 // result: (BFXIL [bfc] y x) 15405 for { 15406 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 15407 if v_0.Op != OpARM64UBFX { 15408 continue 15409 } 15410 bfc := auxIntToArm64BitField(v_0.AuxInt) 15411 x := v_0.Args[0] 15412 if v_1.Op != OpARM64ANDconst { 15413 continue 15414 } 15415 ac := auxIntToInt64(v_1.AuxInt) 15416 y := v_1.Args[0] 15417 if !(ac == ^(1<<uint(bfc.getARM64BFwidth()) - 1)) { 15418 continue 15419 } 15420 v.reset(OpARM64BFXIL) 15421 v.AuxInt = arm64BitFieldToAuxInt(bfc) 15422 v.AddArg2(y, x) 15423 return true 15424 } 15425 break 15426 } 15427 return false 15428 } 15429 func rewriteValueARM64_OpARM64ORN(v *Value) bool { 15430 v_1 := v.Args[1] 15431 v_0 := v.Args[0] 15432 // match: (ORN x (MOVDconst [c])) 15433 // result: (ORconst [^c] x) 15434 for { 15435 x := v_0 15436 if v_1.Op != OpARM64MOVDconst { 15437 break 15438 } 15439 c := auxIntToInt64(v_1.AuxInt) 15440 v.reset(OpARM64ORconst) 15441 v.AuxInt = int64ToAuxInt(^c) 15442 v.AddArg(x) 15443 return true 15444 } 15445 // match: (ORN x x) 15446 // result: (MOVDconst [-1]) 15447 for { 15448 x := v_0 15449 if x != v_1 { 15450 break 15451 } 15452 v.reset(OpARM64MOVDconst) 15453 v.AuxInt = int64ToAuxInt(-1) 15454 return true 15455 } 15456 // match: (ORN x0 x1:(SLLconst [c] y)) 15457 // cond: clobberIfDead(x1) 15458 // result: (ORNshiftLL x0 y [c]) 15459 for { 15460 x0 := v_0 15461 x1 := v_1 15462 if x1.Op != OpARM64SLLconst { 15463 break 15464 } 15465 c := auxIntToInt64(x1.AuxInt) 15466 y := x1.Args[0] 15467 if !(clobberIfDead(x1)) { 15468 break 15469 } 15470 v.reset(OpARM64ORNshiftLL) 15471 v.AuxInt = int64ToAuxInt(c) 15472 v.AddArg2(x0, y) 15473 return true 15474 } 15475 // match: (ORN x0 x1:(SRLconst [c] y)) 15476 // cond: clobberIfDead(x1) 15477 // result: (ORNshiftRL x0 y [c]) 15478 for { 15479 x0 := v_0 15480 x1 := v_1 15481 if x1.Op != OpARM64SRLconst { 15482 break 15483 } 15484 c := auxIntToInt64(x1.AuxInt) 15485 y := x1.Args[0] 15486 if !(clobberIfDead(x1)) { 15487 break 15488 } 15489 v.reset(OpARM64ORNshiftRL) 15490 v.AuxInt = int64ToAuxInt(c) 15491 v.AddArg2(x0, y) 15492 return true 15493 } 15494 // match: (ORN x0 x1:(SRAconst [c] y)) 15495 // cond: clobberIfDead(x1) 15496 // result: (ORNshiftRA x0 y [c]) 15497 for { 15498 x0 := v_0 15499 x1 := v_1 15500 if x1.Op != OpARM64SRAconst { 15501 break 15502 } 15503 c := auxIntToInt64(x1.AuxInt) 15504 y := x1.Args[0] 15505 if !(clobberIfDead(x1)) { 15506 break 15507 } 15508 v.reset(OpARM64ORNshiftRA) 15509 v.AuxInt = int64ToAuxInt(c) 15510 v.AddArg2(x0, y) 15511 return true 15512 } 15513 // match: (ORN x0 x1:(RORconst [c] y)) 15514 // cond: clobberIfDead(x1) 15515 // result: (ORNshiftRO x0 y [c]) 15516 for { 15517 x0 := v_0 15518 x1 := v_1 15519 if x1.Op != OpARM64RORconst { 15520 break 15521 } 15522 c := auxIntToInt64(x1.AuxInt) 15523 y := x1.Args[0] 15524 if !(clobberIfDead(x1)) { 15525 break 15526 } 15527 v.reset(OpARM64ORNshiftRO) 15528 v.AuxInt = int64ToAuxInt(c) 15529 v.AddArg2(x0, y) 15530 return true 15531 } 15532 return false 15533 } 15534 func rewriteValueARM64_OpARM64ORNshiftLL(v *Value) bool { 15535 v_1 := v.Args[1] 15536 v_0 := v.Args[0] 15537 // match: (ORNshiftLL x (MOVDconst [c]) [d]) 15538 // result: (ORconst x [^int64(uint64(c)<<uint64(d))]) 15539 for { 15540 d := auxIntToInt64(v.AuxInt) 15541 x := v_0 15542 if v_1.Op != OpARM64MOVDconst { 15543 break 15544 } 15545 c := auxIntToInt64(v_1.AuxInt) 15546 v.reset(OpARM64ORconst) 15547 v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d))) 15548 v.AddArg(x) 15549 return true 15550 } 15551 // match: (ORNshiftLL (SLLconst x [c]) x [c]) 15552 // result: (MOVDconst [-1]) 15553 for { 15554 c := auxIntToInt64(v.AuxInt) 15555 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c { 15556 break 15557 } 15558 x := v_0.Args[0] 15559 if x != v_1 { 15560 break 15561 } 15562 v.reset(OpARM64MOVDconst) 15563 v.AuxInt = int64ToAuxInt(-1) 15564 return true 15565 } 15566 return false 15567 } 15568 func rewriteValueARM64_OpARM64ORNshiftRA(v *Value) bool { 15569 v_1 := v.Args[1] 15570 v_0 := v.Args[0] 15571 // match: (ORNshiftRA x (MOVDconst [c]) [d]) 15572 // result: (ORconst x [^(c>>uint64(d))]) 15573 for { 15574 d := auxIntToInt64(v.AuxInt) 15575 x := v_0 15576 if v_1.Op != OpARM64MOVDconst { 15577 break 15578 } 15579 c := auxIntToInt64(v_1.AuxInt) 15580 v.reset(OpARM64ORconst) 15581 v.AuxInt = int64ToAuxInt(^(c >> uint64(d))) 15582 v.AddArg(x) 15583 return true 15584 } 15585 // match: (ORNshiftRA (SRAconst x [c]) x [c]) 15586 // result: (MOVDconst [-1]) 15587 for { 15588 c := auxIntToInt64(v.AuxInt) 15589 if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c { 15590 break 15591 } 15592 x := v_0.Args[0] 15593 if x != v_1 { 15594 break 15595 } 15596 v.reset(OpARM64MOVDconst) 15597 v.AuxInt = int64ToAuxInt(-1) 15598 return true 15599 } 15600 return false 15601 } 15602 func rewriteValueARM64_OpARM64ORNshiftRL(v *Value) bool { 15603 v_1 := v.Args[1] 15604 v_0 := v.Args[0] 15605 // match: (ORNshiftRL x (MOVDconst [c]) [d]) 15606 // result: (ORconst x [^int64(uint64(c)>>uint64(d))]) 15607 for { 15608 d := auxIntToInt64(v.AuxInt) 15609 x := v_0 15610 if v_1.Op != OpARM64MOVDconst { 15611 break 15612 } 15613 c := auxIntToInt64(v_1.AuxInt) 15614 v.reset(OpARM64ORconst) 15615 v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d))) 15616 v.AddArg(x) 15617 return true 15618 } 15619 // match: (ORNshiftRL (SRLconst x [c]) x [c]) 15620 // result: (MOVDconst [-1]) 15621 for { 15622 c := auxIntToInt64(v.AuxInt) 15623 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c { 15624 break 15625 } 15626 x := v_0.Args[0] 15627 if x != v_1 { 15628 break 15629 } 15630 v.reset(OpARM64MOVDconst) 15631 v.AuxInt = int64ToAuxInt(-1) 15632 return true 15633 } 15634 return false 15635 } 15636 func rewriteValueARM64_OpARM64ORNshiftRO(v *Value) bool { 15637 v_1 := v.Args[1] 15638 v_0 := v.Args[0] 15639 // match: (ORNshiftRO x (MOVDconst [c]) [d]) 15640 // result: (ORconst x [^rotateRight64(c, d)]) 15641 for { 15642 d := auxIntToInt64(v.AuxInt) 15643 x := v_0 15644 if v_1.Op != OpARM64MOVDconst { 15645 break 15646 } 15647 c := auxIntToInt64(v_1.AuxInt) 15648 v.reset(OpARM64ORconst) 15649 v.AuxInt = int64ToAuxInt(^rotateRight64(c, d)) 15650 v.AddArg(x) 15651 return true 15652 } 15653 // match: (ORNshiftRO (RORconst x [c]) x [c]) 15654 // result: (MOVDconst [-1]) 15655 for { 15656 c := auxIntToInt64(v.AuxInt) 15657 if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c { 15658 break 15659 } 15660 x := v_0.Args[0] 15661 if x != v_1 { 15662 break 15663 } 15664 v.reset(OpARM64MOVDconst) 15665 v.AuxInt = int64ToAuxInt(-1) 15666 return true 15667 } 15668 return false 15669 } 15670 func rewriteValueARM64_OpARM64ORconst(v *Value) bool { 15671 v_0 := v.Args[0] 15672 // match: (ORconst [0] x) 15673 // result: x 15674 for { 15675 if auxIntToInt64(v.AuxInt) != 0 { 15676 break 15677 } 15678 x := v_0 15679 v.copyOf(x) 15680 return true 15681 } 15682 // match: (ORconst [-1] _) 15683 // result: (MOVDconst [-1]) 15684 for { 15685 if auxIntToInt64(v.AuxInt) != -1 { 15686 break 15687 } 15688 v.reset(OpARM64MOVDconst) 15689 v.AuxInt = int64ToAuxInt(-1) 15690 return true 15691 } 15692 // match: (ORconst [c] (MOVDconst [d])) 15693 // result: (MOVDconst [c|d]) 15694 for { 15695 c := auxIntToInt64(v.AuxInt) 15696 if v_0.Op != OpARM64MOVDconst { 15697 break 15698 } 15699 d := auxIntToInt64(v_0.AuxInt) 15700 v.reset(OpARM64MOVDconst) 15701 v.AuxInt = int64ToAuxInt(c | d) 15702 return true 15703 } 15704 // match: (ORconst [c] (ORconst [d] x)) 15705 // result: (ORconst [c|d] x) 15706 for { 15707 c := auxIntToInt64(v.AuxInt) 15708 if v_0.Op != OpARM64ORconst { 15709 break 15710 } 15711 d := auxIntToInt64(v_0.AuxInt) 15712 x := v_0.Args[0] 15713 v.reset(OpARM64ORconst) 15714 v.AuxInt = int64ToAuxInt(c | d) 15715 v.AddArg(x) 15716 return true 15717 } 15718 // match: (ORconst [c1] (ANDconst [c2] x)) 15719 // cond: c2|c1 == ^0 15720 // result: (ORconst [c1] x) 15721 for { 15722 c1 := auxIntToInt64(v.AuxInt) 15723 if v_0.Op != OpARM64ANDconst { 15724 break 15725 } 15726 c2 := auxIntToInt64(v_0.AuxInt) 15727 x := v_0.Args[0] 15728 if !(c2|c1 == ^0) { 15729 break 15730 } 15731 v.reset(OpARM64ORconst) 15732 v.AuxInt = int64ToAuxInt(c1) 15733 v.AddArg(x) 15734 return true 15735 } 15736 return false 15737 } 15738 func rewriteValueARM64_OpARM64ORshiftLL(v *Value) bool { 15739 v_1 := v.Args[1] 15740 v_0 := v.Args[0] 15741 b := v.Block 15742 typ := &b.Func.Config.Types 15743 // match: (ORshiftLL (MOVDconst [c]) x [d]) 15744 // result: (ORconst [c] (SLLconst <x.Type> x [d])) 15745 for { 15746 d := auxIntToInt64(v.AuxInt) 15747 if v_0.Op != OpARM64MOVDconst { 15748 break 15749 } 15750 c := auxIntToInt64(v_0.AuxInt) 15751 x := v_1 15752 v.reset(OpARM64ORconst) 15753 v.AuxInt = int64ToAuxInt(c) 15754 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 15755 v0.AuxInt = int64ToAuxInt(d) 15756 v0.AddArg(x) 15757 v.AddArg(v0) 15758 return true 15759 } 15760 // match: (ORshiftLL x (MOVDconst [c]) [d]) 15761 // result: (ORconst x [int64(uint64(c)<<uint64(d))]) 15762 for { 15763 d := auxIntToInt64(v.AuxInt) 15764 x := v_0 15765 if v_1.Op != OpARM64MOVDconst { 15766 break 15767 } 15768 c := auxIntToInt64(v_1.AuxInt) 15769 v.reset(OpARM64ORconst) 15770 v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d))) 15771 v.AddArg(x) 15772 return true 15773 } 15774 // match: (ORshiftLL y:(SLLconst x [c]) x [c]) 15775 // result: y 15776 for { 15777 c := auxIntToInt64(v.AuxInt) 15778 y := v_0 15779 if y.Op != OpARM64SLLconst || auxIntToInt64(y.AuxInt) != c { 15780 break 15781 } 15782 x := y.Args[0] 15783 if x != v_1 { 15784 break 15785 } 15786 v.copyOf(y) 15787 return true 15788 } 15789 // match: (ORshiftLL <typ.UInt16> [8] (UBFX <typ.UInt16> [armBFAuxInt(8, 8)] x) x) 15790 // result: (REV16W x) 15791 for { 15792 if v.Type != typ.UInt16 || auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || v_0.Type != typ.UInt16 || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 8) { 15793 break 15794 } 15795 x := v_0.Args[0] 15796 if x != v_1 { 15797 break 15798 } 15799 v.reset(OpARM64REV16W) 15800 v.AddArg(x) 15801 return true 15802 } 15803 // match: (ORshiftLL [8] (UBFX [armBFAuxInt(8, 24)] (ANDconst [c1] x)) (ANDconst [c2] x)) 15804 // cond: uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff 15805 // result: (REV16W x) 15806 for { 15807 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 24) { 15808 break 15809 } 15810 v_0_0 := v_0.Args[0] 15811 if v_0_0.Op != OpARM64ANDconst { 15812 break 15813 } 15814 c1 := auxIntToInt64(v_0_0.AuxInt) 15815 x := v_0_0.Args[0] 15816 if v_1.Op != OpARM64ANDconst { 15817 break 15818 } 15819 c2 := auxIntToInt64(v_1.AuxInt) 15820 if x != v_1.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) { 15821 break 15822 } 15823 v.reset(OpARM64REV16W) 15824 v.AddArg(x) 15825 return true 15826 } 15827 // match: (ORshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x)) 15828 // cond: (uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) 15829 // result: (REV16 x) 15830 for { 15831 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 { 15832 break 15833 } 15834 v_0_0 := v_0.Args[0] 15835 if v_0_0.Op != OpARM64ANDconst { 15836 break 15837 } 15838 c1 := auxIntToInt64(v_0_0.AuxInt) 15839 x := v_0_0.Args[0] 15840 if v_1.Op != OpARM64ANDconst { 15841 break 15842 } 15843 c2 := auxIntToInt64(v_1.AuxInt) 15844 if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) { 15845 break 15846 } 15847 v.reset(OpARM64REV16) 15848 v.AddArg(x) 15849 return true 15850 } 15851 // match: (ORshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x)) 15852 // cond: (uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) 15853 // result: (REV16 (ANDconst <x.Type> [0xffffffff] x)) 15854 for { 15855 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 { 15856 break 15857 } 15858 v_0_0 := v_0.Args[0] 15859 if v_0_0.Op != OpARM64ANDconst { 15860 break 15861 } 15862 c1 := auxIntToInt64(v_0_0.AuxInt) 15863 x := v_0_0.Args[0] 15864 if v_1.Op != OpARM64ANDconst { 15865 break 15866 } 15867 c2 := auxIntToInt64(v_1.AuxInt) 15868 if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) { 15869 break 15870 } 15871 v.reset(OpARM64REV16) 15872 v0 := b.NewValue0(v.Pos, OpARM64ANDconst, x.Type) 15873 v0.AuxInt = int64ToAuxInt(0xffffffff) 15874 v0.AddArg(x) 15875 v.AddArg(v0) 15876 return true 15877 } 15878 // match: ( ORshiftLL [c] (SRLconst x [64-c]) x2) 15879 // result: (EXTRconst [64-c] x2 x) 15880 for { 15881 c := auxIntToInt64(v.AuxInt) 15882 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 64-c { 15883 break 15884 } 15885 x := v_0.Args[0] 15886 x2 := v_1 15887 v.reset(OpARM64EXTRconst) 15888 v.AuxInt = int64ToAuxInt(64 - c) 15889 v.AddArg2(x2, x) 15890 return true 15891 } 15892 // match: ( ORshiftLL <t> [c] (UBFX [bfc] x) x2) 15893 // cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c) 15894 // result: (EXTRWconst [32-c] x2 x) 15895 for { 15896 t := v.Type 15897 c := auxIntToInt64(v.AuxInt) 15898 if v_0.Op != OpARM64UBFX { 15899 break 15900 } 15901 bfc := auxIntToArm64BitField(v_0.AuxInt) 15902 x := v_0.Args[0] 15903 x2 := v_1 15904 if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) { 15905 break 15906 } 15907 v.reset(OpARM64EXTRWconst) 15908 v.AuxInt = int64ToAuxInt(32 - c) 15909 v.AddArg2(x2, x) 15910 return true 15911 } 15912 // match: (ORshiftLL [sc] (UBFX [bfc] x) (SRLconst [sc] y)) 15913 // cond: sc == bfc.getARM64BFwidth() 15914 // result: (BFXIL [bfc] y x) 15915 for { 15916 sc := auxIntToInt64(v.AuxInt) 15917 if v_0.Op != OpARM64UBFX { 15918 break 15919 } 15920 bfc := auxIntToArm64BitField(v_0.AuxInt) 15921 x := v_0.Args[0] 15922 if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != sc { 15923 break 15924 } 15925 y := v_1.Args[0] 15926 if !(sc == bfc.getARM64BFwidth()) { 15927 break 15928 } 15929 v.reset(OpARM64BFXIL) 15930 v.AuxInt = arm64BitFieldToAuxInt(bfc) 15931 v.AddArg2(y, x) 15932 return true 15933 } 15934 return false 15935 } 15936 func rewriteValueARM64_OpARM64ORshiftRA(v *Value) bool { 15937 v_1 := v.Args[1] 15938 v_0 := v.Args[0] 15939 b := v.Block 15940 // match: (ORshiftRA (MOVDconst [c]) x [d]) 15941 // result: (ORconst [c] (SRAconst <x.Type> x [d])) 15942 for { 15943 d := auxIntToInt64(v.AuxInt) 15944 if v_0.Op != OpARM64MOVDconst { 15945 break 15946 } 15947 c := auxIntToInt64(v_0.AuxInt) 15948 x := v_1 15949 v.reset(OpARM64ORconst) 15950 v.AuxInt = int64ToAuxInt(c) 15951 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 15952 v0.AuxInt = int64ToAuxInt(d) 15953 v0.AddArg(x) 15954 v.AddArg(v0) 15955 return true 15956 } 15957 // match: (ORshiftRA x (MOVDconst [c]) [d]) 15958 // result: (ORconst x [c>>uint64(d)]) 15959 for { 15960 d := auxIntToInt64(v.AuxInt) 15961 x := v_0 15962 if v_1.Op != OpARM64MOVDconst { 15963 break 15964 } 15965 c := auxIntToInt64(v_1.AuxInt) 15966 v.reset(OpARM64ORconst) 15967 v.AuxInt = int64ToAuxInt(c >> uint64(d)) 15968 v.AddArg(x) 15969 return true 15970 } 15971 // match: (ORshiftRA y:(SRAconst x [c]) x [c]) 15972 // result: y 15973 for { 15974 c := auxIntToInt64(v.AuxInt) 15975 y := v_0 15976 if y.Op != OpARM64SRAconst || auxIntToInt64(y.AuxInt) != c { 15977 break 15978 } 15979 x := y.Args[0] 15980 if x != v_1 { 15981 break 15982 } 15983 v.copyOf(y) 15984 return true 15985 } 15986 return false 15987 } 15988 func rewriteValueARM64_OpARM64ORshiftRL(v *Value) bool { 15989 v_1 := v.Args[1] 15990 v_0 := v.Args[0] 15991 b := v.Block 15992 // match: (ORshiftRL (MOVDconst [c]) x [d]) 15993 // result: (ORconst [c] (SRLconst <x.Type> x [d])) 15994 for { 15995 d := auxIntToInt64(v.AuxInt) 15996 if v_0.Op != OpARM64MOVDconst { 15997 break 15998 } 15999 c := auxIntToInt64(v_0.AuxInt) 16000 x := v_1 16001 v.reset(OpARM64ORconst) 16002 v.AuxInt = int64ToAuxInt(c) 16003 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 16004 v0.AuxInt = int64ToAuxInt(d) 16005 v0.AddArg(x) 16006 v.AddArg(v0) 16007 return true 16008 } 16009 // match: (ORshiftRL x (MOVDconst [c]) [d]) 16010 // result: (ORconst x [int64(uint64(c)>>uint64(d))]) 16011 for { 16012 d := auxIntToInt64(v.AuxInt) 16013 x := v_0 16014 if v_1.Op != OpARM64MOVDconst { 16015 break 16016 } 16017 c := auxIntToInt64(v_1.AuxInt) 16018 v.reset(OpARM64ORconst) 16019 v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d))) 16020 v.AddArg(x) 16021 return true 16022 } 16023 // match: (ORshiftRL y:(SRLconst x [c]) x [c]) 16024 // result: y 16025 for { 16026 c := auxIntToInt64(v.AuxInt) 16027 y := v_0 16028 if y.Op != OpARM64SRLconst || auxIntToInt64(y.AuxInt) != c { 16029 break 16030 } 16031 x := y.Args[0] 16032 if x != v_1 { 16033 break 16034 } 16035 v.copyOf(y) 16036 return true 16037 } 16038 // match: (ORshiftRL [rc] (ANDconst [ac] x) (SLLconst [lc] y)) 16039 // cond: lc > rc && ac == ^((1<<uint(64-lc)-1) << uint64(lc-rc)) 16040 // result: (BFI [armBFAuxInt(lc-rc, 64-lc)] x y) 16041 for { 16042 rc := auxIntToInt64(v.AuxInt) 16043 if v_0.Op != OpARM64ANDconst { 16044 break 16045 } 16046 ac := auxIntToInt64(v_0.AuxInt) 16047 x := v_0.Args[0] 16048 if v_1.Op != OpARM64SLLconst { 16049 break 16050 } 16051 lc := auxIntToInt64(v_1.AuxInt) 16052 y := v_1.Args[0] 16053 if !(lc > rc && ac == ^((1<<uint(64-lc)-1)<<uint64(lc-rc))) { 16054 break 16055 } 16056 v.reset(OpARM64BFI) 16057 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc-rc, 64-lc)) 16058 v.AddArg2(x, y) 16059 return true 16060 } 16061 // match: (ORshiftRL [rc] (ANDconst [ac] y) (SLLconst [lc] x)) 16062 // cond: lc < rc && ac == ^((1<<uint(64-rc)-1)) 16063 // result: (BFXIL [armBFAuxInt(rc-lc, 64-rc)] y x) 16064 for { 16065 rc := auxIntToInt64(v.AuxInt) 16066 if v_0.Op != OpARM64ANDconst { 16067 break 16068 } 16069 ac := auxIntToInt64(v_0.AuxInt) 16070 y := v_0.Args[0] 16071 if v_1.Op != OpARM64SLLconst { 16072 break 16073 } 16074 lc := auxIntToInt64(v_1.AuxInt) 16075 x := v_1.Args[0] 16076 if !(lc < rc && ac == ^(1<<uint(64-rc)-1)) { 16077 break 16078 } 16079 v.reset(OpARM64BFXIL) 16080 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc-lc, 64-rc)) 16081 v.AddArg2(y, x) 16082 return true 16083 } 16084 return false 16085 } 16086 func rewriteValueARM64_OpARM64ORshiftRO(v *Value) bool { 16087 v_1 := v.Args[1] 16088 v_0 := v.Args[0] 16089 b := v.Block 16090 // match: (ORshiftRO (MOVDconst [c]) x [d]) 16091 // result: (ORconst [c] (RORconst <x.Type> x [d])) 16092 for { 16093 d := auxIntToInt64(v.AuxInt) 16094 if v_0.Op != OpARM64MOVDconst { 16095 break 16096 } 16097 c := auxIntToInt64(v_0.AuxInt) 16098 x := v_1 16099 v.reset(OpARM64ORconst) 16100 v.AuxInt = int64ToAuxInt(c) 16101 v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type) 16102 v0.AuxInt = int64ToAuxInt(d) 16103 v0.AddArg(x) 16104 v.AddArg(v0) 16105 return true 16106 } 16107 // match: (ORshiftRO x (MOVDconst [c]) [d]) 16108 // result: (ORconst x [rotateRight64(c, d)]) 16109 for { 16110 d := auxIntToInt64(v.AuxInt) 16111 x := v_0 16112 if v_1.Op != OpARM64MOVDconst { 16113 break 16114 } 16115 c := auxIntToInt64(v_1.AuxInt) 16116 v.reset(OpARM64ORconst) 16117 v.AuxInt = int64ToAuxInt(rotateRight64(c, d)) 16118 v.AddArg(x) 16119 return true 16120 } 16121 // match: (ORshiftRO y:(RORconst x [c]) x [c]) 16122 // result: y 16123 for { 16124 c := auxIntToInt64(v.AuxInt) 16125 y := v_0 16126 if y.Op != OpARM64RORconst || auxIntToInt64(y.AuxInt) != c { 16127 break 16128 } 16129 x := y.Args[0] 16130 if x != v_1 { 16131 break 16132 } 16133 v.copyOf(y) 16134 return true 16135 } 16136 return false 16137 } 16138 func rewriteValueARM64_OpARM64REV(v *Value) bool { 16139 v_0 := v.Args[0] 16140 // match: (REV (REV p)) 16141 // result: p 16142 for { 16143 if v_0.Op != OpARM64REV { 16144 break 16145 } 16146 p := v_0.Args[0] 16147 v.copyOf(p) 16148 return true 16149 } 16150 return false 16151 } 16152 func rewriteValueARM64_OpARM64REVW(v *Value) bool { 16153 v_0 := v.Args[0] 16154 // match: (REVW (REVW p)) 16155 // result: p 16156 for { 16157 if v_0.Op != OpARM64REVW { 16158 break 16159 } 16160 p := v_0.Args[0] 16161 v.copyOf(p) 16162 return true 16163 } 16164 return false 16165 } 16166 func rewriteValueARM64_OpARM64ROR(v *Value) bool { 16167 v_1 := v.Args[1] 16168 v_0 := v.Args[0] 16169 // match: (ROR x (MOVDconst [c])) 16170 // result: (RORconst x [c&63]) 16171 for { 16172 x := v_0 16173 if v_1.Op != OpARM64MOVDconst { 16174 break 16175 } 16176 c := auxIntToInt64(v_1.AuxInt) 16177 v.reset(OpARM64RORconst) 16178 v.AuxInt = int64ToAuxInt(c & 63) 16179 v.AddArg(x) 16180 return true 16181 } 16182 return false 16183 } 16184 func rewriteValueARM64_OpARM64RORW(v *Value) bool { 16185 v_1 := v.Args[1] 16186 v_0 := v.Args[0] 16187 // match: (RORW x (MOVDconst [c])) 16188 // result: (RORWconst x [c&31]) 16189 for { 16190 x := v_0 16191 if v_1.Op != OpARM64MOVDconst { 16192 break 16193 } 16194 c := auxIntToInt64(v_1.AuxInt) 16195 v.reset(OpARM64RORWconst) 16196 v.AuxInt = int64ToAuxInt(c & 31) 16197 v.AddArg(x) 16198 return true 16199 } 16200 return false 16201 } 16202 func rewriteValueARM64_OpARM64SBCSflags(v *Value) bool { 16203 v_2 := v.Args[2] 16204 v_1 := v.Args[1] 16205 v_0 := v.Args[0] 16206 b := v.Block 16207 typ := &b.Func.Config.Types 16208 // match: (SBCSflags x y (Select1 <types.TypeFlags> (NEGSflags (NEG <typ.UInt64> (NGCzerocarry <typ.UInt64> bo))))) 16209 // result: (SBCSflags x y bo) 16210 for { 16211 x := v_0 16212 y := v_1 16213 if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags { 16214 break 16215 } 16216 v_2_0 := v_2.Args[0] 16217 if v_2_0.Op != OpARM64NEGSflags { 16218 break 16219 } 16220 v_2_0_0 := v_2_0.Args[0] 16221 if v_2_0_0.Op != OpARM64NEG || v_2_0_0.Type != typ.UInt64 { 16222 break 16223 } 16224 v_2_0_0_0 := v_2_0_0.Args[0] 16225 if v_2_0_0_0.Op != OpARM64NGCzerocarry || v_2_0_0_0.Type != typ.UInt64 { 16226 break 16227 } 16228 bo := v_2_0_0_0.Args[0] 16229 v.reset(OpARM64SBCSflags) 16230 v.AddArg3(x, y, bo) 16231 return true 16232 } 16233 // match: (SBCSflags x y (Select1 <types.TypeFlags> (NEGSflags (MOVDconst [0])))) 16234 // result: (SUBSflags x y) 16235 for { 16236 x := v_0 16237 y := v_1 16238 if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags { 16239 break 16240 } 16241 v_2_0 := v_2.Args[0] 16242 if v_2_0.Op != OpARM64NEGSflags { 16243 break 16244 } 16245 v_2_0_0 := v_2_0.Args[0] 16246 if v_2_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_2_0_0.AuxInt) != 0 { 16247 break 16248 } 16249 v.reset(OpARM64SUBSflags) 16250 v.AddArg2(x, y) 16251 return true 16252 } 16253 return false 16254 } 16255 func rewriteValueARM64_OpARM64SLL(v *Value) bool { 16256 v_1 := v.Args[1] 16257 v_0 := v.Args[0] 16258 // match: (SLL x (MOVDconst [c])) 16259 // result: (SLLconst x [c&63]) 16260 for { 16261 x := v_0 16262 if v_1.Op != OpARM64MOVDconst { 16263 break 16264 } 16265 c := auxIntToInt64(v_1.AuxInt) 16266 v.reset(OpARM64SLLconst) 16267 v.AuxInt = int64ToAuxInt(c & 63) 16268 v.AddArg(x) 16269 return true 16270 } 16271 // match: (SLL x (ANDconst [63] y)) 16272 // result: (SLL x y) 16273 for { 16274 x := v_0 16275 if v_1.Op != OpARM64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 { 16276 break 16277 } 16278 y := v_1.Args[0] 16279 v.reset(OpARM64SLL) 16280 v.AddArg2(x, y) 16281 return true 16282 } 16283 return false 16284 } 16285 func rewriteValueARM64_OpARM64SLLconst(v *Value) bool { 16286 v_0 := v.Args[0] 16287 // match: (SLLconst [c] (MOVDconst [d])) 16288 // result: (MOVDconst [d<<uint64(c)]) 16289 for { 16290 c := auxIntToInt64(v.AuxInt) 16291 if v_0.Op != OpARM64MOVDconst { 16292 break 16293 } 16294 d := auxIntToInt64(v_0.AuxInt) 16295 v.reset(OpARM64MOVDconst) 16296 v.AuxInt = int64ToAuxInt(d << uint64(c)) 16297 return true 16298 } 16299 // match: (SLLconst [c] (SRLconst [c] x)) 16300 // cond: 0 < c && c < 64 16301 // result: (ANDconst [^(1<<uint(c)-1)] x) 16302 for { 16303 c := auxIntToInt64(v.AuxInt) 16304 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c { 16305 break 16306 } 16307 x := v_0.Args[0] 16308 if !(0 < c && c < 64) { 16309 break 16310 } 16311 v.reset(OpARM64ANDconst) 16312 v.AuxInt = int64ToAuxInt(^(1<<uint(c) - 1)) 16313 v.AddArg(x) 16314 return true 16315 } 16316 // match: (SLLconst [lc] (MOVWreg x)) 16317 // result: (SBFIZ [armBFAuxInt(lc, min(32, 64-lc))] x) 16318 for { 16319 lc := auxIntToInt64(v.AuxInt) 16320 if v_0.Op != OpARM64MOVWreg { 16321 break 16322 } 16323 x := v_0.Args[0] 16324 v.reset(OpARM64SBFIZ) 16325 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(32, 64-lc))) 16326 v.AddArg(x) 16327 return true 16328 } 16329 // match: (SLLconst [lc] (MOVHreg x)) 16330 // result: (SBFIZ [armBFAuxInt(lc, min(16, 64-lc))] x) 16331 for { 16332 lc := auxIntToInt64(v.AuxInt) 16333 if v_0.Op != OpARM64MOVHreg { 16334 break 16335 } 16336 x := v_0.Args[0] 16337 v.reset(OpARM64SBFIZ) 16338 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(16, 64-lc))) 16339 v.AddArg(x) 16340 return true 16341 } 16342 // match: (SLLconst [lc] (MOVBreg x)) 16343 // result: (SBFIZ [armBFAuxInt(lc, min(8, 64-lc))] x) 16344 for { 16345 lc := auxIntToInt64(v.AuxInt) 16346 if v_0.Op != OpARM64MOVBreg { 16347 break 16348 } 16349 x := v_0.Args[0] 16350 v.reset(OpARM64SBFIZ) 16351 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(8, 64-lc))) 16352 v.AddArg(x) 16353 return true 16354 } 16355 // match: (SLLconst [lc] (MOVWUreg x)) 16356 // result: (UBFIZ [armBFAuxInt(lc, min(32, 64-lc))] x) 16357 for { 16358 lc := auxIntToInt64(v.AuxInt) 16359 if v_0.Op != OpARM64MOVWUreg { 16360 break 16361 } 16362 x := v_0.Args[0] 16363 v.reset(OpARM64UBFIZ) 16364 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(32, 64-lc))) 16365 v.AddArg(x) 16366 return true 16367 } 16368 // match: (SLLconst [lc] (MOVHUreg x)) 16369 // result: (UBFIZ [armBFAuxInt(lc, min(16, 64-lc))] x) 16370 for { 16371 lc := auxIntToInt64(v.AuxInt) 16372 if v_0.Op != OpARM64MOVHUreg { 16373 break 16374 } 16375 x := v_0.Args[0] 16376 v.reset(OpARM64UBFIZ) 16377 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(16, 64-lc))) 16378 v.AddArg(x) 16379 return true 16380 } 16381 // match: (SLLconst [lc] (MOVBUreg x)) 16382 // result: (UBFIZ [armBFAuxInt(lc, min(8, 64-lc))] x) 16383 for { 16384 lc := auxIntToInt64(v.AuxInt) 16385 if v_0.Op != OpARM64MOVBUreg { 16386 break 16387 } 16388 x := v_0.Args[0] 16389 v.reset(OpARM64UBFIZ) 16390 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(8, 64-lc))) 16391 v.AddArg(x) 16392 return true 16393 } 16394 // match: (SLLconst [sc] (ANDconst [ac] x)) 16395 // cond: isARM64BFMask(sc, ac, 0) 16396 // result: (UBFIZ [armBFAuxInt(sc, arm64BFWidth(ac, 0))] x) 16397 for { 16398 sc := auxIntToInt64(v.AuxInt) 16399 if v_0.Op != OpARM64ANDconst { 16400 break 16401 } 16402 ac := auxIntToInt64(v_0.AuxInt) 16403 x := v_0.Args[0] 16404 if !(isARM64BFMask(sc, ac, 0)) { 16405 break 16406 } 16407 v.reset(OpARM64UBFIZ) 16408 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, 0))) 16409 v.AddArg(x) 16410 return true 16411 } 16412 // match: (SLLconst [sc] (UBFIZ [bfc] x)) 16413 // cond: sc+bfc.getARM64BFwidth()+bfc.getARM64BFlsb() < 64 16414 // result: (UBFIZ [armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth())] x) 16415 for { 16416 sc := auxIntToInt64(v.AuxInt) 16417 if v_0.Op != OpARM64UBFIZ { 16418 break 16419 } 16420 bfc := auxIntToArm64BitField(v_0.AuxInt) 16421 x := v_0.Args[0] 16422 if !(sc+bfc.getARM64BFwidth()+bfc.getARM64BFlsb() < 64) { 16423 break 16424 } 16425 v.reset(OpARM64UBFIZ) 16426 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth())) 16427 v.AddArg(x) 16428 return true 16429 } 16430 return false 16431 } 16432 func rewriteValueARM64_OpARM64SRA(v *Value) bool { 16433 v_1 := v.Args[1] 16434 v_0 := v.Args[0] 16435 // match: (SRA x (MOVDconst [c])) 16436 // result: (SRAconst x [c&63]) 16437 for { 16438 x := v_0 16439 if v_1.Op != OpARM64MOVDconst { 16440 break 16441 } 16442 c := auxIntToInt64(v_1.AuxInt) 16443 v.reset(OpARM64SRAconst) 16444 v.AuxInt = int64ToAuxInt(c & 63) 16445 v.AddArg(x) 16446 return true 16447 } 16448 // match: (SRA x (ANDconst [63] y)) 16449 // result: (SRA x y) 16450 for { 16451 x := v_0 16452 if v_1.Op != OpARM64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 { 16453 break 16454 } 16455 y := v_1.Args[0] 16456 v.reset(OpARM64SRA) 16457 v.AddArg2(x, y) 16458 return true 16459 } 16460 return false 16461 } 16462 func rewriteValueARM64_OpARM64SRAconst(v *Value) bool { 16463 v_0 := v.Args[0] 16464 // match: (SRAconst [c] (MOVDconst [d])) 16465 // result: (MOVDconst [d>>uint64(c)]) 16466 for { 16467 c := auxIntToInt64(v.AuxInt) 16468 if v_0.Op != OpARM64MOVDconst { 16469 break 16470 } 16471 d := auxIntToInt64(v_0.AuxInt) 16472 v.reset(OpARM64MOVDconst) 16473 v.AuxInt = int64ToAuxInt(d >> uint64(c)) 16474 return true 16475 } 16476 // match: (SRAconst [rc] (SLLconst [lc] x)) 16477 // cond: lc > rc 16478 // result: (SBFIZ [armBFAuxInt(lc-rc, 64-lc)] x) 16479 for { 16480 rc := auxIntToInt64(v.AuxInt) 16481 if v_0.Op != OpARM64SLLconst { 16482 break 16483 } 16484 lc := auxIntToInt64(v_0.AuxInt) 16485 x := v_0.Args[0] 16486 if !(lc > rc) { 16487 break 16488 } 16489 v.reset(OpARM64SBFIZ) 16490 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc-rc, 64-lc)) 16491 v.AddArg(x) 16492 return true 16493 } 16494 // match: (SRAconst [rc] (SLLconst [lc] x)) 16495 // cond: lc <= rc 16496 // result: (SBFX [armBFAuxInt(rc-lc, 64-rc)] x) 16497 for { 16498 rc := auxIntToInt64(v.AuxInt) 16499 if v_0.Op != OpARM64SLLconst { 16500 break 16501 } 16502 lc := auxIntToInt64(v_0.AuxInt) 16503 x := v_0.Args[0] 16504 if !(lc <= rc) { 16505 break 16506 } 16507 v.reset(OpARM64SBFX) 16508 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc-lc, 64-rc)) 16509 v.AddArg(x) 16510 return true 16511 } 16512 // match: (SRAconst [rc] (MOVWreg x)) 16513 // cond: rc < 32 16514 // result: (SBFX [armBFAuxInt(rc, 32-rc)] x) 16515 for { 16516 rc := auxIntToInt64(v.AuxInt) 16517 if v_0.Op != OpARM64MOVWreg { 16518 break 16519 } 16520 x := v_0.Args[0] 16521 if !(rc < 32) { 16522 break 16523 } 16524 v.reset(OpARM64SBFX) 16525 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 32-rc)) 16526 v.AddArg(x) 16527 return true 16528 } 16529 // match: (SRAconst [rc] (MOVHreg x)) 16530 // cond: rc < 16 16531 // result: (SBFX [armBFAuxInt(rc, 16-rc)] x) 16532 for { 16533 rc := auxIntToInt64(v.AuxInt) 16534 if v_0.Op != OpARM64MOVHreg { 16535 break 16536 } 16537 x := v_0.Args[0] 16538 if !(rc < 16) { 16539 break 16540 } 16541 v.reset(OpARM64SBFX) 16542 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 16-rc)) 16543 v.AddArg(x) 16544 return true 16545 } 16546 // match: (SRAconst [rc] (MOVBreg x)) 16547 // cond: rc < 8 16548 // result: (SBFX [armBFAuxInt(rc, 8-rc)] x) 16549 for { 16550 rc := auxIntToInt64(v.AuxInt) 16551 if v_0.Op != OpARM64MOVBreg { 16552 break 16553 } 16554 x := v_0.Args[0] 16555 if !(rc < 8) { 16556 break 16557 } 16558 v.reset(OpARM64SBFX) 16559 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 8-rc)) 16560 v.AddArg(x) 16561 return true 16562 } 16563 // match: (SRAconst [sc] (SBFIZ [bfc] x)) 16564 // cond: sc < bfc.getARM64BFlsb() 16565 // result: (SBFIZ [armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth())] x) 16566 for { 16567 sc := auxIntToInt64(v.AuxInt) 16568 if v_0.Op != OpARM64SBFIZ { 16569 break 16570 } 16571 bfc := auxIntToArm64BitField(v_0.AuxInt) 16572 x := v_0.Args[0] 16573 if !(sc < bfc.getARM64BFlsb()) { 16574 break 16575 } 16576 v.reset(OpARM64SBFIZ) 16577 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth())) 16578 v.AddArg(x) 16579 return true 16580 } 16581 // match: (SRAconst [sc] (SBFIZ [bfc] x)) 16582 // cond: sc >= bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth() 16583 // result: (SBFX [armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc)] x) 16584 for { 16585 sc := auxIntToInt64(v.AuxInt) 16586 if v_0.Op != OpARM64SBFIZ { 16587 break 16588 } 16589 bfc := auxIntToArm64BitField(v_0.AuxInt) 16590 x := v_0.Args[0] 16591 if !(sc >= bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()) { 16592 break 16593 } 16594 v.reset(OpARM64SBFX) 16595 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc)) 16596 v.AddArg(x) 16597 return true 16598 } 16599 return false 16600 } 16601 func rewriteValueARM64_OpARM64SRL(v *Value) bool { 16602 v_1 := v.Args[1] 16603 v_0 := v.Args[0] 16604 // match: (SRL x (MOVDconst [c])) 16605 // result: (SRLconst x [c&63]) 16606 for { 16607 x := v_0 16608 if v_1.Op != OpARM64MOVDconst { 16609 break 16610 } 16611 c := auxIntToInt64(v_1.AuxInt) 16612 v.reset(OpARM64SRLconst) 16613 v.AuxInt = int64ToAuxInt(c & 63) 16614 v.AddArg(x) 16615 return true 16616 } 16617 // match: (SRL x (ANDconst [63] y)) 16618 // result: (SRL x y) 16619 for { 16620 x := v_0 16621 if v_1.Op != OpARM64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 { 16622 break 16623 } 16624 y := v_1.Args[0] 16625 v.reset(OpARM64SRL) 16626 v.AddArg2(x, y) 16627 return true 16628 } 16629 return false 16630 } 16631 func rewriteValueARM64_OpARM64SRLconst(v *Value) bool { 16632 v_0 := v.Args[0] 16633 // match: (SRLconst [c] (MOVDconst [d])) 16634 // result: (MOVDconst [int64(uint64(d)>>uint64(c))]) 16635 for { 16636 c := auxIntToInt64(v.AuxInt) 16637 if v_0.Op != OpARM64MOVDconst { 16638 break 16639 } 16640 d := auxIntToInt64(v_0.AuxInt) 16641 v.reset(OpARM64MOVDconst) 16642 v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c))) 16643 return true 16644 } 16645 // match: (SRLconst [c] (SLLconst [c] x)) 16646 // cond: 0 < c && c < 64 16647 // result: (ANDconst [1<<uint(64-c)-1] x) 16648 for { 16649 c := auxIntToInt64(v.AuxInt) 16650 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c { 16651 break 16652 } 16653 x := v_0.Args[0] 16654 if !(0 < c && c < 64) { 16655 break 16656 } 16657 v.reset(OpARM64ANDconst) 16658 v.AuxInt = int64ToAuxInt(1<<uint(64-c) - 1) 16659 v.AddArg(x) 16660 return true 16661 } 16662 // match: (SRLconst [rc] (MOVWUreg x)) 16663 // cond: rc >= 32 16664 // result: (MOVDconst [0]) 16665 for { 16666 rc := auxIntToInt64(v.AuxInt) 16667 if v_0.Op != OpARM64MOVWUreg { 16668 break 16669 } 16670 if !(rc >= 32) { 16671 break 16672 } 16673 v.reset(OpARM64MOVDconst) 16674 v.AuxInt = int64ToAuxInt(0) 16675 return true 16676 } 16677 // match: (SRLconst [rc] (MOVHUreg x)) 16678 // cond: rc >= 16 16679 // result: (MOVDconst [0]) 16680 for { 16681 rc := auxIntToInt64(v.AuxInt) 16682 if v_0.Op != OpARM64MOVHUreg { 16683 break 16684 } 16685 if !(rc >= 16) { 16686 break 16687 } 16688 v.reset(OpARM64MOVDconst) 16689 v.AuxInt = int64ToAuxInt(0) 16690 return true 16691 } 16692 // match: (SRLconst [rc] (MOVBUreg x)) 16693 // cond: rc >= 8 16694 // result: (MOVDconst [0]) 16695 for { 16696 rc := auxIntToInt64(v.AuxInt) 16697 if v_0.Op != OpARM64MOVBUreg { 16698 break 16699 } 16700 if !(rc >= 8) { 16701 break 16702 } 16703 v.reset(OpARM64MOVDconst) 16704 v.AuxInt = int64ToAuxInt(0) 16705 return true 16706 } 16707 // match: (SRLconst [rc] (SLLconst [lc] x)) 16708 // cond: lc > rc 16709 // result: (UBFIZ [armBFAuxInt(lc-rc, 64-lc)] x) 16710 for { 16711 rc := auxIntToInt64(v.AuxInt) 16712 if v_0.Op != OpARM64SLLconst { 16713 break 16714 } 16715 lc := auxIntToInt64(v_0.AuxInt) 16716 x := v_0.Args[0] 16717 if !(lc > rc) { 16718 break 16719 } 16720 v.reset(OpARM64UBFIZ) 16721 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc-rc, 64-lc)) 16722 v.AddArg(x) 16723 return true 16724 } 16725 // match: (SRLconst [rc] (SLLconst [lc] x)) 16726 // cond: lc < rc 16727 // result: (UBFX [armBFAuxInt(rc-lc, 64-rc)] x) 16728 for { 16729 rc := auxIntToInt64(v.AuxInt) 16730 if v_0.Op != OpARM64SLLconst { 16731 break 16732 } 16733 lc := auxIntToInt64(v_0.AuxInt) 16734 x := v_0.Args[0] 16735 if !(lc < rc) { 16736 break 16737 } 16738 v.reset(OpARM64UBFX) 16739 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc-lc, 64-rc)) 16740 v.AddArg(x) 16741 return true 16742 } 16743 // match: (SRLconst [rc] (MOVWUreg x)) 16744 // cond: rc < 32 16745 // result: (UBFX [armBFAuxInt(rc, 32-rc)] x) 16746 for { 16747 rc := auxIntToInt64(v.AuxInt) 16748 if v_0.Op != OpARM64MOVWUreg { 16749 break 16750 } 16751 x := v_0.Args[0] 16752 if !(rc < 32) { 16753 break 16754 } 16755 v.reset(OpARM64UBFX) 16756 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 32-rc)) 16757 v.AddArg(x) 16758 return true 16759 } 16760 // match: (SRLconst [rc] (MOVHUreg x)) 16761 // cond: rc < 16 16762 // result: (UBFX [armBFAuxInt(rc, 16-rc)] x) 16763 for { 16764 rc := auxIntToInt64(v.AuxInt) 16765 if v_0.Op != OpARM64MOVHUreg { 16766 break 16767 } 16768 x := v_0.Args[0] 16769 if !(rc < 16) { 16770 break 16771 } 16772 v.reset(OpARM64UBFX) 16773 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 16-rc)) 16774 v.AddArg(x) 16775 return true 16776 } 16777 // match: (SRLconst [rc] (MOVBUreg x)) 16778 // cond: rc < 8 16779 // result: (UBFX [armBFAuxInt(rc, 8-rc)] x) 16780 for { 16781 rc := auxIntToInt64(v.AuxInt) 16782 if v_0.Op != OpARM64MOVBUreg { 16783 break 16784 } 16785 x := v_0.Args[0] 16786 if !(rc < 8) { 16787 break 16788 } 16789 v.reset(OpARM64UBFX) 16790 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 8-rc)) 16791 v.AddArg(x) 16792 return true 16793 } 16794 // match: (SRLconst [sc] (ANDconst [ac] x)) 16795 // cond: isARM64BFMask(sc, ac, sc) 16796 // result: (UBFX [armBFAuxInt(sc, arm64BFWidth(ac, sc))] x) 16797 for { 16798 sc := auxIntToInt64(v.AuxInt) 16799 if v_0.Op != OpARM64ANDconst { 16800 break 16801 } 16802 ac := auxIntToInt64(v_0.AuxInt) 16803 x := v_0.Args[0] 16804 if !(isARM64BFMask(sc, ac, sc)) { 16805 break 16806 } 16807 v.reset(OpARM64UBFX) 16808 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, sc))) 16809 v.AddArg(x) 16810 return true 16811 } 16812 // match: (SRLconst [sc] (UBFX [bfc] x)) 16813 // cond: sc < bfc.getARM64BFwidth() 16814 // result: (UBFX [armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()-sc)] x) 16815 for { 16816 sc := auxIntToInt64(v.AuxInt) 16817 if v_0.Op != OpARM64UBFX { 16818 break 16819 } 16820 bfc := auxIntToArm64BitField(v_0.AuxInt) 16821 x := v_0.Args[0] 16822 if !(sc < bfc.getARM64BFwidth()) { 16823 break 16824 } 16825 v.reset(OpARM64UBFX) 16826 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()-sc)) 16827 v.AddArg(x) 16828 return true 16829 } 16830 // match: (SRLconst [sc] (UBFIZ [bfc] x)) 16831 // cond: sc == bfc.getARM64BFlsb() 16832 // result: (ANDconst [1<<uint(bfc.getARM64BFwidth())-1] x) 16833 for { 16834 sc := auxIntToInt64(v.AuxInt) 16835 if v_0.Op != OpARM64UBFIZ { 16836 break 16837 } 16838 bfc := auxIntToArm64BitField(v_0.AuxInt) 16839 x := v_0.Args[0] 16840 if !(sc == bfc.getARM64BFlsb()) { 16841 break 16842 } 16843 v.reset(OpARM64ANDconst) 16844 v.AuxInt = int64ToAuxInt(1<<uint(bfc.getARM64BFwidth()) - 1) 16845 v.AddArg(x) 16846 return true 16847 } 16848 // match: (SRLconst [sc] (UBFIZ [bfc] x)) 16849 // cond: sc < bfc.getARM64BFlsb() 16850 // result: (UBFIZ [armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth())] x) 16851 for { 16852 sc := auxIntToInt64(v.AuxInt) 16853 if v_0.Op != OpARM64UBFIZ { 16854 break 16855 } 16856 bfc := auxIntToArm64BitField(v_0.AuxInt) 16857 x := v_0.Args[0] 16858 if !(sc < bfc.getARM64BFlsb()) { 16859 break 16860 } 16861 v.reset(OpARM64UBFIZ) 16862 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth())) 16863 v.AddArg(x) 16864 return true 16865 } 16866 // match: (SRLconst [sc] (UBFIZ [bfc] x)) 16867 // cond: sc > bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth() 16868 // result: (UBFX [armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc)] x) 16869 for { 16870 sc := auxIntToInt64(v.AuxInt) 16871 if v_0.Op != OpARM64UBFIZ { 16872 break 16873 } 16874 bfc := auxIntToArm64BitField(v_0.AuxInt) 16875 x := v_0.Args[0] 16876 if !(sc > bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()) { 16877 break 16878 } 16879 v.reset(OpARM64UBFX) 16880 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc)) 16881 v.AddArg(x) 16882 return true 16883 } 16884 return false 16885 } 16886 func rewriteValueARM64_OpARM64STP(v *Value) bool { 16887 v_3 := v.Args[3] 16888 v_2 := v.Args[2] 16889 v_1 := v.Args[1] 16890 v_0 := v.Args[0] 16891 b := v.Block 16892 config := b.Func.Config 16893 // match: (STP [off1] {sym} (ADDconst [off2] ptr) val1 val2 mem) 16894 // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 16895 // result: (STP [off1+int32(off2)] {sym} ptr val1 val2 mem) 16896 for { 16897 off1 := auxIntToInt32(v.AuxInt) 16898 sym := auxToSym(v.Aux) 16899 if v_0.Op != OpARM64ADDconst { 16900 break 16901 } 16902 off2 := auxIntToInt64(v_0.AuxInt) 16903 ptr := v_0.Args[0] 16904 val1 := v_1 16905 val2 := v_2 16906 mem := v_3 16907 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 16908 break 16909 } 16910 v.reset(OpARM64STP) 16911 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 16912 v.Aux = symToAux(sym) 16913 v.AddArg4(ptr, val1, val2, mem) 16914 return true 16915 } 16916 // match: (STP [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val1 val2 mem) 16917 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) 16918 // result: (STP [off1+off2] {mergeSym(sym1,sym2)} ptr val1 val2 mem) 16919 for { 16920 off1 := auxIntToInt32(v.AuxInt) 16921 sym1 := auxToSym(v.Aux) 16922 if v_0.Op != OpARM64MOVDaddr { 16923 break 16924 } 16925 off2 := auxIntToInt32(v_0.AuxInt) 16926 sym2 := auxToSym(v_0.Aux) 16927 ptr := v_0.Args[0] 16928 val1 := v_1 16929 val2 := v_2 16930 mem := v_3 16931 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) { 16932 break 16933 } 16934 v.reset(OpARM64STP) 16935 v.AuxInt = int32ToAuxInt(off1 + off2) 16936 v.Aux = symToAux(mergeSym(sym1, sym2)) 16937 v.AddArg4(ptr, val1, val2, mem) 16938 return true 16939 } 16940 // match: (STP [off] {sym} ptr (MOVDconst [0]) (MOVDconst [0]) mem) 16941 // result: (MOVQstorezero [off] {sym} ptr mem) 16942 for { 16943 off := auxIntToInt32(v.AuxInt) 16944 sym := auxToSym(v.Aux) 16945 ptr := v_0 16946 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 || v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 { 16947 break 16948 } 16949 mem := v_3 16950 v.reset(OpARM64MOVQstorezero) 16951 v.AuxInt = int32ToAuxInt(off) 16952 v.Aux = symToAux(sym) 16953 v.AddArg2(ptr, mem) 16954 return true 16955 } 16956 return false 16957 } 16958 func rewriteValueARM64_OpARM64SUB(v *Value) bool { 16959 v_1 := v.Args[1] 16960 v_0 := v.Args[0] 16961 b := v.Block 16962 // match: (SUB x (MOVDconst [c])) 16963 // result: (SUBconst [c] x) 16964 for { 16965 x := v_0 16966 if v_1.Op != OpARM64MOVDconst { 16967 break 16968 } 16969 c := auxIntToInt64(v_1.AuxInt) 16970 v.reset(OpARM64SUBconst) 16971 v.AuxInt = int64ToAuxInt(c) 16972 v.AddArg(x) 16973 return true 16974 } 16975 // match: (SUB a l:(MUL x y)) 16976 // cond: l.Uses==1 && clobber(l) 16977 // result: (MSUB a x y) 16978 for { 16979 a := v_0 16980 l := v_1 16981 if l.Op != OpARM64MUL { 16982 break 16983 } 16984 y := l.Args[1] 16985 x := l.Args[0] 16986 if !(l.Uses == 1 && clobber(l)) { 16987 break 16988 } 16989 v.reset(OpARM64MSUB) 16990 v.AddArg3(a, x, y) 16991 return true 16992 } 16993 // match: (SUB a l:(MNEG x y)) 16994 // cond: l.Uses==1 && clobber(l) 16995 // result: (MADD a x y) 16996 for { 16997 a := v_0 16998 l := v_1 16999 if l.Op != OpARM64MNEG { 17000 break 17001 } 17002 y := l.Args[1] 17003 x := l.Args[0] 17004 if !(l.Uses == 1 && clobber(l)) { 17005 break 17006 } 17007 v.reset(OpARM64MADD) 17008 v.AddArg3(a, x, y) 17009 return true 17010 } 17011 // match: (SUB a l:(MULW x y)) 17012 // cond: v.Type.Size() <= 4 && l.Uses==1 && clobber(l) 17013 // result: (MSUBW a x y) 17014 for { 17015 a := v_0 17016 l := v_1 17017 if l.Op != OpARM64MULW { 17018 break 17019 } 17020 y := l.Args[1] 17021 x := l.Args[0] 17022 if !(v.Type.Size() <= 4 && l.Uses == 1 && clobber(l)) { 17023 break 17024 } 17025 v.reset(OpARM64MSUBW) 17026 v.AddArg3(a, x, y) 17027 return true 17028 } 17029 // match: (SUB a l:(MNEGW x y)) 17030 // cond: v.Type.Size() <= 4 && l.Uses==1 && clobber(l) 17031 // result: (MADDW a x y) 17032 for { 17033 a := v_0 17034 l := v_1 17035 if l.Op != OpARM64MNEGW { 17036 break 17037 } 17038 y := l.Args[1] 17039 x := l.Args[0] 17040 if !(v.Type.Size() <= 4 && l.Uses == 1 && clobber(l)) { 17041 break 17042 } 17043 v.reset(OpARM64MADDW) 17044 v.AddArg3(a, x, y) 17045 return true 17046 } 17047 // match: (SUB x x) 17048 // result: (MOVDconst [0]) 17049 for { 17050 x := v_0 17051 if x != v_1 { 17052 break 17053 } 17054 v.reset(OpARM64MOVDconst) 17055 v.AuxInt = int64ToAuxInt(0) 17056 return true 17057 } 17058 // match: (SUB x (SUB y z)) 17059 // result: (SUB (ADD <v.Type> x z) y) 17060 for { 17061 x := v_0 17062 if v_1.Op != OpARM64SUB { 17063 break 17064 } 17065 z := v_1.Args[1] 17066 y := v_1.Args[0] 17067 v.reset(OpARM64SUB) 17068 v0 := b.NewValue0(v.Pos, OpARM64ADD, v.Type) 17069 v0.AddArg2(x, z) 17070 v.AddArg2(v0, y) 17071 return true 17072 } 17073 // match: (SUB (SUB x y) z) 17074 // result: (SUB x (ADD <y.Type> y z)) 17075 for { 17076 if v_0.Op != OpARM64SUB { 17077 break 17078 } 17079 y := v_0.Args[1] 17080 x := v_0.Args[0] 17081 z := v_1 17082 v.reset(OpARM64SUB) 17083 v0 := b.NewValue0(v.Pos, OpARM64ADD, y.Type) 17084 v0.AddArg2(y, z) 17085 v.AddArg2(x, v0) 17086 return true 17087 } 17088 // match: (SUB x0 x1:(SLLconst [c] y)) 17089 // cond: clobberIfDead(x1) 17090 // result: (SUBshiftLL x0 y [c]) 17091 for { 17092 x0 := v_0 17093 x1 := v_1 17094 if x1.Op != OpARM64SLLconst { 17095 break 17096 } 17097 c := auxIntToInt64(x1.AuxInt) 17098 y := x1.Args[0] 17099 if !(clobberIfDead(x1)) { 17100 break 17101 } 17102 v.reset(OpARM64SUBshiftLL) 17103 v.AuxInt = int64ToAuxInt(c) 17104 v.AddArg2(x0, y) 17105 return true 17106 } 17107 // match: (SUB x0 x1:(SRLconst [c] y)) 17108 // cond: clobberIfDead(x1) 17109 // result: (SUBshiftRL x0 y [c]) 17110 for { 17111 x0 := v_0 17112 x1 := v_1 17113 if x1.Op != OpARM64SRLconst { 17114 break 17115 } 17116 c := auxIntToInt64(x1.AuxInt) 17117 y := x1.Args[0] 17118 if !(clobberIfDead(x1)) { 17119 break 17120 } 17121 v.reset(OpARM64SUBshiftRL) 17122 v.AuxInt = int64ToAuxInt(c) 17123 v.AddArg2(x0, y) 17124 return true 17125 } 17126 // match: (SUB x0 x1:(SRAconst [c] y)) 17127 // cond: clobberIfDead(x1) 17128 // result: (SUBshiftRA x0 y [c]) 17129 for { 17130 x0 := v_0 17131 x1 := v_1 17132 if x1.Op != OpARM64SRAconst { 17133 break 17134 } 17135 c := auxIntToInt64(x1.AuxInt) 17136 y := x1.Args[0] 17137 if !(clobberIfDead(x1)) { 17138 break 17139 } 17140 v.reset(OpARM64SUBshiftRA) 17141 v.AuxInt = int64ToAuxInt(c) 17142 v.AddArg2(x0, y) 17143 return true 17144 } 17145 return false 17146 } 17147 func rewriteValueARM64_OpARM64SUBconst(v *Value) bool { 17148 v_0 := v.Args[0] 17149 // match: (SUBconst [0] x) 17150 // result: x 17151 for { 17152 if auxIntToInt64(v.AuxInt) != 0 { 17153 break 17154 } 17155 x := v_0 17156 v.copyOf(x) 17157 return true 17158 } 17159 // match: (SUBconst [c] (MOVDconst [d])) 17160 // result: (MOVDconst [d-c]) 17161 for { 17162 c := auxIntToInt64(v.AuxInt) 17163 if v_0.Op != OpARM64MOVDconst { 17164 break 17165 } 17166 d := auxIntToInt64(v_0.AuxInt) 17167 v.reset(OpARM64MOVDconst) 17168 v.AuxInt = int64ToAuxInt(d - c) 17169 return true 17170 } 17171 // match: (SUBconst [c] (SUBconst [d] x)) 17172 // result: (ADDconst [-c-d] x) 17173 for { 17174 c := auxIntToInt64(v.AuxInt) 17175 if v_0.Op != OpARM64SUBconst { 17176 break 17177 } 17178 d := auxIntToInt64(v_0.AuxInt) 17179 x := v_0.Args[0] 17180 v.reset(OpARM64ADDconst) 17181 v.AuxInt = int64ToAuxInt(-c - d) 17182 v.AddArg(x) 17183 return true 17184 } 17185 // match: (SUBconst [c] (ADDconst [d] x)) 17186 // result: (ADDconst [-c+d] x) 17187 for { 17188 c := auxIntToInt64(v.AuxInt) 17189 if v_0.Op != OpARM64ADDconst { 17190 break 17191 } 17192 d := auxIntToInt64(v_0.AuxInt) 17193 x := v_0.Args[0] 17194 v.reset(OpARM64ADDconst) 17195 v.AuxInt = int64ToAuxInt(-c + d) 17196 v.AddArg(x) 17197 return true 17198 } 17199 return false 17200 } 17201 func rewriteValueARM64_OpARM64SUBshiftLL(v *Value) bool { 17202 v_1 := v.Args[1] 17203 v_0 := v.Args[0] 17204 // match: (SUBshiftLL x (MOVDconst [c]) [d]) 17205 // result: (SUBconst x [int64(uint64(c)<<uint64(d))]) 17206 for { 17207 d := auxIntToInt64(v.AuxInt) 17208 x := v_0 17209 if v_1.Op != OpARM64MOVDconst { 17210 break 17211 } 17212 c := auxIntToInt64(v_1.AuxInt) 17213 v.reset(OpARM64SUBconst) 17214 v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d))) 17215 v.AddArg(x) 17216 return true 17217 } 17218 // match: (SUBshiftLL (SLLconst x [c]) x [c]) 17219 // result: (MOVDconst [0]) 17220 for { 17221 c := auxIntToInt64(v.AuxInt) 17222 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c { 17223 break 17224 } 17225 x := v_0.Args[0] 17226 if x != v_1 { 17227 break 17228 } 17229 v.reset(OpARM64MOVDconst) 17230 v.AuxInt = int64ToAuxInt(0) 17231 return true 17232 } 17233 return false 17234 } 17235 func rewriteValueARM64_OpARM64SUBshiftRA(v *Value) bool { 17236 v_1 := v.Args[1] 17237 v_0 := v.Args[0] 17238 // match: (SUBshiftRA x (MOVDconst [c]) [d]) 17239 // result: (SUBconst x [c>>uint64(d)]) 17240 for { 17241 d := auxIntToInt64(v.AuxInt) 17242 x := v_0 17243 if v_1.Op != OpARM64MOVDconst { 17244 break 17245 } 17246 c := auxIntToInt64(v_1.AuxInt) 17247 v.reset(OpARM64SUBconst) 17248 v.AuxInt = int64ToAuxInt(c >> uint64(d)) 17249 v.AddArg(x) 17250 return true 17251 } 17252 // match: (SUBshiftRA (SRAconst x [c]) x [c]) 17253 // result: (MOVDconst [0]) 17254 for { 17255 c := auxIntToInt64(v.AuxInt) 17256 if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c { 17257 break 17258 } 17259 x := v_0.Args[0] 17260 if x != v_1 { 17261 break 17262 } 17263 v.reset(OpARM64MOVDconst) 17264 v.AuxInt = int64ToAuxInt(0) 17265 return true 17266 } 17267 return false 17268 } 17269 func rewriteValueARM64_OpARM64SUBshiftRL(v *Value) bool { 17270 v_1 := v.Args[1] 17271 v_0 := v.Args[0] 17272 // match: (SUBshiftRL x (MOVDconst [c]) [d]) 17273 // result: (SUBconst x [int64(uint64(c)>>uint64(d))]) 17274 for { 17275 d := auxIntToInt64(v.AuxInt) 17276 x := v_0 17277 if v_1.Op != OpARM64MOVDconst { 17278 break 17279 } 17280 c := auxIntToInt64(v_1.AuxInt) 17281 v.reset(OpARM64SUBconst) 17282 v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d))) 17283 v.AddArg(x) 17284 return true 17285 } 17286 // match: (SUBshiftRL (SRLconst x [c]) x [c]) 17287 // result: (MOVDconst [0]) 17288 for { 17289 c := auxIntToInt64(v.AuxInt) 17290 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c { 17291 break 17292 } 17293 x := v_0.Args[0] 17294 if x != v_1 { 17295 break 17296 } 17297 v.reset(OpARM64MOVDconst) 17298 v.AuxInt = int64ToAuxInt(0) 17299 return true 17300 } 17301 return false 17302 } 17303 func rewriteValueARM64_OpARM64TST(v *Value) bool { 17304 v_1 := v.Args[1] 17305 v_0 := v.Args[0] 17306 // match: (TST x (MOVDconst [c])) 17307 // result: (TSTconst [c] x) 17308 for { 17309 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 17310 x := v_0 17311 if v_1.Op != OpARM64MOVDconst { 17312 continue 17313 } 17314 c := auxIntToInt64(v_1.AuxInt) 17315 v.reset(OpARM64TSTconst) 17316 v.AuxInt = int64ToAuxInt(c) 17317 v.AddArg(x) 17318 return true 17319 } 17320 break 17321 } 17322 // match: (TST x0 x1:(SLLconst [c] y)) 17323 // cond: clobberIfDead(x1) 17324 // result: (TSTshiftLL x0 y [c]) 17325 for { 17326 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 17327 x0 := v_0 17328 x1 := v_1 17329 if x1.Op != OpARM64SLLconst { 17330 continue 17331 } 17332 c := auxIntToInt64(x1.AuxInt) 17333 y := x1.Args[0] 17334 if !(clobberIfDead(x1)) { 17335 continue 17336 } 17337 v.reset(OpARM64TSTshiftLL) 17338 v.AuxInt = int64ToAuxInt(c) 17339 v.AddArg2(x0, y) 17340 return true 17341 } 17342 break 17343 } 17344 // match: (TST x0 x1:(SRLconst [c] y)) 17345 // cond: clobberIfDead(x1) 17346 // result: (TSTshiftRL x0 y [c]) 17347 for { 17348 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 17349 x0 := v_0 17350 x1 := v_1 17351 if x1.Op != OpARM64SRLconst { 17352 continue 17353 } 17354 c := auxIntToInt64(x1.AuxInt) 17355 y := x1.Args[0] 17356 if !(clobberIfDead(x1)) { 17357 continue 17358 } 17359 v.reset(OpARM64TSTshiftRL) 17360 v.AuxInt = int64ToAuxInt(c) 17361 v.AddArg2(x0, y) 17362 return true 17363 } 17364 break 17365 } 17366 // match: (TST x0 x1:(SRAconst [c] y)) 17367 // cond: clobberIfDead(x1) 17368 // result: (TSTshiftRA x0 y [c]) 17369 for { 17370 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 17371 x0 := v_0 17372 x1 := v_1 17373 if x1.Op != OpARM64SRAconst { 17374 continue 17375 } 17376 c := auxIntToInt64(x1.AuxInt) 17377 y := x1.Args[0] 17378 if !(clobberIfDead(x1)) { 17379 continue 17380 } 17381 v.reset(OpARM64TSTshiftRA) 17382 v.AuxInt = int64ToAuxInt(c) 17383 v.AddArg2(x0, y) 17384 return true 17385 } 17386 break 17387 } 17388 // match: (TST x0 x1:(RORconst [c] y)) 17389 // cond: clobberIfDead(x1) 17390 // result: (TSTshiftRO x0 y [c]) 17391 for { 17392 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 17393 x0 := v_0 17394 x1 := v_1 17395 if x1.Op != OpARM64RORconst { 17396 continue 17397 } 17398 c := auxIntToInt64(x1.AuxInt) 17399 y := x1.Args[0] 17400 if !(clobberIfDead(x1)) { 17401 continue 17402 } 17403 v.reset(OpARM64TSTshiftRO) 17404 v.AuxInt = int64ToAuxInt(c) 17405 v.AddArg2(x0, y) 17406 return true 17407 } 17408 break 17409 } 17410 return false 17411 } 17412 func rewriteValueARM64_OpARM64TSTW(v *Value) bool { 17413 v_1 := v.Args[1] 17414 v_0 := v.Args[0] 17415 // match: (TSTW x (MOVDconst [c])) 17416 // result: (TSTWconst [int32(c)] x) 17417 for { 17418 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 17419 x := v_0 17420 if v_1.Op != OpARM64MOVDconst { 17421 continue 17422 } 17423 c := auxIntToInt64(v_1.AuxInt) 17424 v.reset(OpARM64TSTWconst) 17425 v.AuxInt = int32ToAuxInt(int32(c)) 17426 v.AddArg(x) 17427 return true 17428 } 17429 break 17430 } 17431 return false 17432 } 17433 func rewriteValueARM64_OpARM64TSTWconst(v *Value) bool { 17434 v_0 := v.Args[0] 17435 // match: (TSTWconst (MOVDconst [x]) [y]) 17436 // result: (FlagConstant [logicFlags32(int32(x)&y)]) 17437 for { 17438 y := auxIntToInt32(v.AuxInt) 17439 if v_0.Op != OpARM64MOVDconst { 17440 break 17441 } 17442 x := auxIntToInt64(v_0.AuxInt) 17443 v.reset(OpARM64FlagConstant) 17444 v.AuxInt = flagConstantToAuxInt(logicFlags32(int32(x) & y)) 17445 return true 17446 } 17447 return false 17448 } 17449 func rewriteValueARM64_OpARM64TSTconst(v *Value) bool { 17450 v_0 := v.Args[0] 17451 // match: (TSTconst (MOVDconst [x]) [y]) 17452 // result: (FlagConstant [logicFlags64(x&y)]) 17453 for { 17454 y := auxIntToInt64(v.AuxInt) 17455 if v_0.Op != OpARM64MOVDconst { 17456 break 17457 } 17458 x := auxIntToInt64(v_0.AuxInt) 17459 v.reset(OpARM64FlagConstant) 17460 v.AuxInt = flagConstantToAuxInt(logicFlags64(x & y)) 17461 return true 17462 } 17463 return false 17464 } 17465 func rewriteValueARM64_OpARM64TSTshiftLL(v *Value) bool { 17466 v_1 := v.Args[1] 17467 v_0 := v.Args[0] 17468 b := v.Block 17469 // match: (TSTshiftLL (MOVDconst [c]) x [d]) 17470 // result: (TSTconst [c] (SLLconst <x.Type> x [d])) 17471 for { 17472 d := auxIntToInt64(v.AuxInt) 17473 if v_0.Op != OpARM64MOVDconst { 17474 break 17475 } 17476 c := auxIntToInt64(v_0.AuxInt) 17477 x := v_1 17478 v.reset(OpARM64TSTconst) 17479 v.AuxInt = int64ToAuxInt(c) 17480 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 17481 v0.AuxInt = int64ToAuxInt(d) 17482 v0.AddArg(x) 17483 v.AddArg(v0) 17484 return true 17485 } 17486 // match: (TSTshiftLL x (MOVDconst [c]) [d]) 17487 // result: (TSTconst x [int64(uint64(c)<<uint64(d))]) 17488 for { 17489 d := auxIntToInt64(v.AuxInt) 17490 x := v_0 17491 if v_1.Op != OpARM64MOVDconst { 17492 break 17493 } 17494 c := auxIntToInt64(v_1.AuxInt) 17495 v.reset(OpARM64TSTconst) 17496 v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d))) 17497 v.AddArg(x) 17498 return true 17499 } 17500 return false 17501 } 17502 func rewriteValueARM64_OpARM64TSTshiftRA(v *Value) bool { 17503 v_1 := v.Args[1] 17504 v_0 := v.Args[0] 17505 b := v.Block 17506 // match: (TSTshiftRA (MOVDconst [c]) x [d]) 17507 // result: (TSTconst [c] (SRAconst <x.Type> x [d])) 17508 for { 17509 d := auxIntToInt64(v.AuxInt) 17510 if v_0.Op != OpARM64MOVDconst { 17511 break 17512 } 17513 c := auxIntToInt64(v_0.AuxInt) 17514 x := v_1 17515 v.reset(OpARM64TSTconst) 17516 v.AuxInt = int64ToAuxInt(c) 17517 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 17518 v0.AuxInt = int64ToAuxInt(d) 17519 v0.AddArg(x) 17520 v.AddArg(v0) 17521 return true 17522 } 17523 // match: (TSTshiftRA x (MOVDconst [c]) [d]) 17524 // result: (TSTconst x [c>>uint64(d)]) 17525 for { 17526 d := auxIntToInt64(v.AuxInt) 17527 x := v_0 17528 if v_1.Op != OpARM64MOVDconst { 17529 break 17530 } 17531 c := auxIntToInt64(v_1.AuxInt) 17532 v.reset(OpARM64TSTconst) 17533 v.AuxInt = int64ToAuxInt(c >> uint64(d)) 17534 v.AddArg(x) 17535 return true 17536 } 17537 return false 17538 } 17539 func rewriteValueARM64_OpARM64TSTshiftRL(v *Value) bool { 17540 v_1 := v.Args[1] 17541 v_0 := v.Args[0] 17542 b := v.Block 17543 // match: (TSTshiftRL (MOVDconst [c]) x [d]) 17544 // result: (TSTconst [c] (SRLconst <x.Type> x [d])) 17545 for { 17546 d := auxIntToInt64(v.AuxInt) 17547 if v_0.Op != OpARM64MOVDconst { 17548 break 17549 } 17550 c := auxIntToInt64(v_0.AuxInt) 17551 x := v_1 17552 v.reset(OpARM64TSTconst) 17553 v.AuxInt = int64ToAuxInt(c) 17554 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 17555 v0.AuxInt = int64ToAuxInt(d) 17556 v0.AddArg(x) 17557 v.AddArg(v0) 17558 return true 17559 } 17560 // match: (TSTshiftRL x (MOVDconst [c]) [d]) 17561 // result: (TSTconst x [int64(uint64(c)>>uint64(d))]) 17562 for { 17563 d := auxIntToInt64(v.AuxInt) 17564 x := v_0 17565 if v_1.Op != OpARM64MOVDconst { 17566 break 17567 } 17568 c := auxIntToInt64(v_1.AuxInt) 17569 v.reset(OpARM64TSTconst) 17570 v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d))) 17571 v.AddArg(x) 17572 return true 17573 } 17574 return false 17575 } 17576 func rewriteValueARM64_OpARM64TSTshiftRO(v *Value) bool { 17577 v_1 := v.Args[1] 17578 v_0 := v.Args[0] 17579 b := v.Block 17580 // match: (TSTshiftRO (MOVDconst [c]) x [d]) 17581 // result: (TSTconst [c] (RORconst <x.Type> x [d])) 17582 for { 17583 d := auxIntToInt64(v.AuxInt) 17584 if v_0.Op != OpARM64MOVDconst { 17585 break 17586 } 17587 c := auxIntToInt64(v_0.AuxInt) 17588 x := v_1 17589 v.reset(OpARM64TSTconst) 17590 v.AuxInt = int64ToAuxInt(c) 17591 v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type) 17592 v0.AuxInt = int64ToAuxInt(d) 17593 v0.AddArg(x) 17594 v.AddArg(v0) 17595 return true 17596 } 17597 // match: (TSTshiftRO x (MOVDconst [c]) [d]) 17598 // result: (TSTconst x [rotateRight64(c, d)]) 17599 for { 17600 d := auxIntToInt64(v.AuxInt) 17601 x := v_0 17602 if v_1.Op != OpARM64MOVDconst { 17603 break 17604 } 17605 c := auxIntToInt64(v_1.AuxInt) 17606 v.reset(OpARM64TSTconst) 17607 v.AuxInt = int64ToAuxInt(rotateRight64(c, d)) 17608 v.AddArg(x) 17609 return true 17610 } 17611 return false 17612 } 17613 func rewriteValueARM64_OpARM64UBFIZ(v *Value) bool { 17614 v_0 := v.Args[0] 17615 // match: (UBFIZ [bfc] (SLLconst [sc] x)) 17616 // cond: sc < bfc.getARM64BFwidth() 17617 // result: (UBFIZ [armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()-sc)] x) 17618 for { 17619 bfc := auxIntToArm64BitField(v.AuxInt) 17620 if v_0.Op != OpARM64SLLconst { 17621 break 17622 } 17623 sc := auxIntToInt64(v_0.AuxInt) 17624 x := v_0.Args[0] 17625 if !(sc < bfc.getARM64BFwidth()) { 17626 break 17627 } 17628 v.reset(OpARM64UBFIZ) 17629 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()-sc)) 17630 v.AddArg(x) 17631 return true 17632 } 17633 return false 17634 } 17635 func rewriteValueARM64_OpARM64UBFX(v *Value) bool { 17636 v_0 := v.Args[0] 17637 // match: (UBFX [bfc] (ANDconst [c] x)) 17638 // cond: isARM64BFMask(0, c, 0) && bfc.getARM64BFlsb() + bfc.getARM64BFwidth() <= arm64BFWidth(c, 0) 17639 // result: (UBFX [bfc] x) 17640 for { 17641 bfc := auxIntToArm64BitField(v.AuxInt) 17642 if v_0.Op != OpARM64ANDconst { 17643 break 17644 } 17645 c := auxIntToInt64(v_0.AuxInt) 17646 x := v_0.Args[0] 17647 if !(isARM64BFMask(0, c, 0) && bfc.getARM64BFlsb()+bfc.getARM64BFwidth() <= arm64BFWidth(c, 0)) { 17648 break 17649 } 17650 v.reset(OpARM64UBFX) 17651 v.AuxInt = arm64BitFieldToAuxInt(bfc) 17652 v.AddArg(x) 17653 return true 17654 } 17655 // match: (UBFX [bfc] (SRLconst [sc] x)) 17656 // cond: sc+bfc.getARM64BFwidth()+bfc.getARM64BFlsb() < 64 17657 // result: (UBFX [armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth())] x) 17658 for { 17659 bfc := auxIntToArm64BitField(v.AuxInt) 17660 if v_0.Op != OpARM64SRLconst { 17661 break 17662 } 17663 sc := auxIntToInt64(v_0.AuxInt) 17664 x := v_0.Args[0] 17665 if !(sc+bfc.getARM64BFwidth()+bfc.getARM64BFlsb() < 64) { 17666 break 17667 } 17668 v.reset(OpARM64UBFX) 17669 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth())) 17670 v.AddArg(x) 17671 return true 17672 } 17673 // match: (UBFX [bfc] (SLLconst [sc] x)) 17674 // cond: sc == bfc.getARM64BFlsb() 17675 // result: (ANDconst [1<<uint(bfc.getARM64BFwidth())-1] x) 17676 for { 17677 bfc := auxIntToArm64BitField(v.AuxInt) 17678 if v_0.Op != OpARM64SLLconst { 17679 break 17680 } 17681 sc := auxIntToInt64(v_0.AuxInt) 17682 x := v_0.Args[0] 17683 if !(sc == bfc.getARM64BFlsb()) { 17684 break 17685 } 17686 v.reset(OpARM64ANDconst) 17687 v.AuxInt = int64ToAuxInt(1<<uint(bfc.getARM64BFwidth()) - 1) 17688 v.AddArg(x) 17689 return true 17690 } 17691 // match: (UBFX [bfc] (SLLconst [sc] x)) 17692 // cond: sc < bfc.getARM64BFlsb() 17693 // result: (UBFX [armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth())] x) 17694 for { 17695 bfc := auxIntToArm64BitField(v.AuxInt) 17696 if v_0.Op != OpARM64SLLconst { 17697 break 17698 } 17699 sc := auxIntToInt64(v_0.AuxInt) 17700 x := v_0.Args[0] 17701 if !(sc < bfc.getARM64BFlsb()) { 17702 break 17703 } 17704 v.reset(OpARM64UBFX) 17705 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth())) 17706 v.AddArg(x) 17707 return true 17708 } 17709 // match: (UBFX [bfc] (SLLconst [sc] x)) 17710 // cond: sc > bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth() 17711 // result: (UBFIZ [armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc)] x) 17712 for { 17713 bfc := auxIntToArm64BitField(v.AuxInt) 17714 if v_0.Op != OpARM64SLLconst { 17715 break 17716 } 17717 sc := auxIntToInt64(v_0.AuxInt) 17718 x := v_0.Args[0] 17719 if !(sc > bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()) { 17720 break 17721 } 17722 v.reset(OpARM64UBFIZ) 17723 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc)) 17724 v.AddArg(x) 17725 return true 17726 } 17727 return false 17728 } 17729 func rewriteValueARM64_OpARM64UDIV(v *Value) bool { 17730 v_1 := v.Args[1] 17731 v_0 := v.Args[0] 17732 // match: (UDIV x (MOVDconst [1])) 17733 // result: x 17734 for { 17735 x := v_0 17736 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 { 17737 break 17738 } 17739 v.copyOf(x) 17740 return true 17741 } 17742 // match: (UDIV x (MOVDconst [c])) 17743 // cond: isPowerOfTwo64(c) 17744 // result: (SRLconst [log64(c)] x) 17745 for { 17746 x := v_0 17747 if v_1.Op != OpARM64MOVDconst { 17748 break 17749 } 17750 c := auxIntToInt64(v_1.AuxInt) 17751 if !(isPowerOfTwo64(c)) { 17752 break 17753 } 17754 v.reset(OpARM64SRLconst) 17755 v.AuxInt = int64ToAuxInt(log64(c)) 17756 v.AddArg(x) 17757 return true 17758 } 17759 // match: (UDIV (MOVDconst [c]) (MOVDconst [d])) 17760 // cond: d != 0 17761 // result: (MOVDconst [int64(uint64(c)/uint64(d))]) 17762 for { 17763 if v_0.Op != OpARM64MOVDconst { 17764 break 17765 } 17766 c := auxIntToInt64(v_0.AuxInt) 17767 if v_1.Op != OpARM64MOVDconst { 17768 break 17769 } 17770 d := auxIntToInt64(v_1.AuxInt) 17771 if !(d != 0) { 17772 break 17773 } 17774 v.reset(OpARM64MOVDconst) 17775 v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d))) 17776 return true 17777 } 17778 return false 17779 } 17780 func rewriteValueARM64_OpARM64UDIVW(v *Value) bool { 17781 v_1 := v.Args[1] 17782 v_0 := v.Args[0] 17783 b := v.Block 17784 // match: (UDIVW x (MOVDconst [c])) 17785 // cond: uint32(c)==1 17786 // result: (MOVWUreg x) 17787 for { 17788 x := v_0 17789 if v_1.Op != OpARM64MOVDconst { 17790 break 17791 } 17792 c := auxIntToInt64(v_1.AuxInt) 17793 if !(uint32(c) == 1) { 17794 break 17795 } 17796 v.reset(OpARM64MOVWUreg) 17797 v.AddArg(x) 17798 return true 17799 } 17800 // match: (UDIVW x (MOVDconst [c])) 17801 // cond: isPowerOfTwo64(c) && is32Bit(c) 17802 // result: (SRLconst [log64(c)] (MOVWUreg <v.Type> x)) 17803 for { 17804 x := v_0 17805 if v_1.Op != OpARM64MOVDconst { 17806 break 17807 } 17808 c := auxIntToInt64(v_1.AuxInt) 17809 if !(isPowerOfTwo64(c) && is32Bit(c)) { 17810 break 17811 } 17812 v.reset(OpARM64SRLconst) 17813 v.AuxInt = int64ToAuxInt(log64(c)) 17814 v0 := b.NewValue0(v.Pos, OpARM64MOVWUreg, v.Type) 17815 v0.AddArg(x) 17816 v.AddArg(v0) 17817 return true 17818 } 17819 // match: (UDIVW (MOVDconst [c]) (MOVDconst [d])) 17820 // cond: d != 0 17821 // result: (MOVDconst [int64(uint32(c)/uint32(d))]) 17822 for { 17823 if v_0.Op != OpARM64MOVDconst { 17824 break 17825 } 17826 c := auxIntToInt64(v_0.AuxInt) 17827 if v_1.Op != OpARM64MOVDconst { 17828 break 17829 } 17830 d := auxIntToInt64(v_1.AuxInt) 17831 if !(d != 0) { 17832 break 17833 } 17834 v.reset(OpARM64MOVDconst) 17835 v.AuxInt = int64ToAuxInt(int64(uint32(c) / uint32(d))) 17836 return true 17837 } 17838 return false 17839 } 17840 func rewriteValueARM64_OpARM64UMOD(v *Value) bool { 17841 v_1 := v.Args[1] 17842 v_0 := v.Args[0] 17843 b := v.Block 17844 typ := &b.Func.Config.Types 17845 // match: (UMOD <typ.UInt64> x y) 17846 // result: (MSUB <typ.UInt64> x y (UDIV <typ.UInt64> x y)) 17847 for { 17848 if v.Type != typ.UInt64 { 17849 break 17850 } 17851 x := v_0 17852 y := v_1 17853 v.reset(OpARM64MSUB) 17854 v.Type = typ.UInt64 17855 v0 := b.NewValue0(v.Pos, OpARM64UDIV, typ.UInt64) 17856 v0.AddArg2(x, y) 17857 v.AddArg3(x, y, v0) 17858 return true 17859 } 17860 // match: (UMOD _ (MOVDconst [1])) 17861 // result: (MOVDconst [0]) 17862 for { 17863 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 { 17864 break 17865 } 17866 v.reset(OpARM64MOVDconst) 17867 v.AuxInt = int64ToAuxInt(0) 17868 return true 17869 } 17870 // match: (UMOD x (MOVDconst [c])) 17871 // cond: isPowerOfTwo64(c) 17872 // result: (ANDconst [c-1] x) 17873 for { 17874 x := v_0 17875 if v_1.Op != OpARM64MOVDconst { 17876 break 17877 } 17878 c := auxIntToInt64(v_1.AuxInt) 17879 if !(isPowerOfTwo64(c)) { 17880 break 17881 } 17882 v.reset(OpARM64ANDconst) 17883 v.AuxInt = int64ToAuxInt(c - 1) 17884 v.AddArg(x) 17885 return true 17886 } 17887 // match: (UMOD (MOVDconst [c]) (MOVDconst [d])) 17888 // cond: d != 0 17889 // result: (MOVDconst [int64(uint64(c)%uint64(d))]) 17890 for { 17891 if v_0.Op != OpARM64MOVDconst { 17892 break 17893 } 17894 c := auxIntToInt64(v_0.AuxInt) 17895 if v_1.Op != OpARM64MOVDconst { 17896 break 17897 } 17898 d := auxIntToInt64(v_1.AuxInt) 17899 if !(d != 0) { 17900 break 17901 } 17902 v.reset(OpARM64MOVDconst) 17903 v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d))) 17904 return true 17905 } 17906 return false 17907 } 17908 func rewriteValueARM64_OpARM64UMODW(v *Value) bool { 17909 v_1 := v.Args[1] 17910 v_0 := v.Args[0] 17911 b := v.Block 17912 typ := &b.Func.Config.Types 17913 // match: (UMODW <typ.UInt32> x y) 17914 // result: (MSUBW <typ.UInt32> x y (UDIVW <typ.UInt32> x y)) 17915 for { 17916 if v.Type != typ.UInt32 { 17917 break 17918 } 17919 x := v_0 17920 y := v_1 17921 v.reset(OpARM64MSUBW) 17922 v.Type = typ.UInt32 17923 v0 := b.NewValue0(v.Pos, OpARM64UDIVW, typ.UInt32) 17924 v0.AddArg2(x, y) 17925 v.AddArg3(x, y, v0) 17926 return true 17927 } 17928 // match: (UMODW _ (MOVDconst [c])) 17929 // cond: uint32(c)==1 17930 // result: (MOVDconst [0]) 17931 for { 17932 if v_1.Op != OpARM64MOVDconst { 17933 break 17934 } 17935 c := auxIntToInt64(v_1.AuxInt) 17936 if !(uint32(c) == 1) { 17937 break 17938 } 17939 v.reset(OpARM64MOVDconst) 17940 v.AuxInt = int64ToAuxInt(0) 17941 return true 17942 } 17943 // match: (UMODW x (MOVDconst [c])) 17944 // cond: isPowerOfTwo64(c) && is32Bit(c) 17945 // result: (ANDconst [c-1] x) 17946 for { 17947 x := v_0 17948 if v_1.Op != OpARM64MOVDconst { 17949 break 17950 } 17951 c := auxIntToInt64(v_1.AuxInt) 17952 if !(isPowerOfTwo64(c) && is32Bit(c)) { 17953 break 17954 } 17955 v.reset(OpARM64ANDconst) 17956 v.AuxInt = int64ToAuxInt(c - 1) 17957 v.AddArg(x) 17958 return true 17959 } 17960 // match: (UMODW (MOVDconst [c]) (MOVDconst [d])) 17961 // cond: d != 0 17962 // result: (MOVDconst [int64(uint32(c)%uint32(d))]) 17963 for { 17964 if v_0.Op != OpARM64MOVDconst { 17965 break 17966 } 17967 c := auxIntToInt64(v_0.AuxInt) 17968 if v_1.Op != OpARM64MOVDconst { 17969 break 17970 } 17971 d := auxIntToInt64(v_1.AuxInt) 17972 if !(d != 0) { 17973 break 17974 } 17975 v.reset(OpARM64MOVDconst) 17976 v.AuxInt = int64ToAuxInt(int64(uint32(c) % uint32(d))) 17977 return true 17978 } 17979 return false 17980 } 17981 func rewriteValueARM64_OpARM64XOR(v *Value) bool { 17982 v_1 := v.Args[1] 17983 v_0 := v.Args[0] 17984 // match: (XOR x (MOVDconst [c])) 17985 // result: (XORconst [c] x) 17986 for { 17987 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 17988 x := v_0 17989 if v_1.Op != OpARM64MOVDconst { 17990 continue 17991 } 17992 c := auxIntToInt64(v_1.AuxInt) 17993 v.reset(OpARM64XORconst) 17994 v.AuxInt = int64ToAuxInt(c) 17995 v.AddArg(x) 17996 return true 17997 } 17998 break 17999 } 18000 // match: (XOR x x) 18001 // result: (MOVDconst [0]) 18002 for { 18003 x := v_0 18004 if x != v_1 { 18005 break 18006 } 18007 v.reset(OpARM64MOVDconst) 18008 v.AuxInt = int64ToAuxInt(0) 18009 return true 18010 } 18011 // match: (XOR x (MVN y)) 18012 // result: (EON x y) 18013 for { 18014 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 18015 x := v_0 18016 if v_1.Op != OpARM64MVN { 18017 continue 18018 } 18019 y := v_1.Args[0] 18020 v.reset(OpARM64EON) 18021 v.AddArg2(x, y) 18022 return true 18023 } 18024 break 18025 } 18026 // match: (XOR x0 x1:(SLLconst [c] y)) 18027 // cond: clobberIfDead(x1) 18028 // result: (XORshiftLL x0 y [c]) 18029 for { 18030 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 18031 x0 := v_0 18032 x1 := v_1 18033 if x1.Op != OpARM64SLLconst { 18034 continue 18035 } 18036 c := auxIntToInt64(x1.AuxInt) 18037 y := x1.Args[0] 18038 if !(clobberIfDead(x1)) { 18039 continue 18040 } 18041 v.reset(OpARM64XORshiftLL) 18042 v.AuxInt = int64ToAuxInt(c) 18043 v.AddArg2(x0, y) 18044 return true 18045 } 18046 break 18047 } 18048 // match: (XOR x0 x1:(SRLconst [c] y)) 18049 // cond: clobberIfDead(x1) 18050 // result: (XORshiftRL x0 y [c]) 18051 for { 18052 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 18053 x0 := v_0 18054 x1 := v_1 18055 if x1.Op != OpARM64SRLconst { 18056 continue 18057 } 18058 c := auxIntToInt64(x1.AuxInt) 18059 y := x1.Args[0] 18060 if !(clobberIfDead(x1)) { 18061 continue 18062 } 18063 v.reset(OpARM64XORshiftRL) 18064 v.AuxInt = int64ToAuxInt(c) 18065 v.AddArg2(x0, y) 18066 return true 18067 } 18068 break 18069 } 18070 // match: (XOR x0 x1:(SRAconst [c] y)) 18071 // cond: clobberIfDead(x1) 18072 // result: (XORshiftRA x0 y [c]) 18073 for { 18074 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 18075 x0 := v_0 18076 x1 := v_1 18077 if x1.Op != OpARM64SRAconst { 18078 continue 18079 } 18080 c := auxIntToInt64(x1.AuxInt) 18081 y := x1.Args[0] 18082 if !(clobberIfDead(x1)) { 18083 continue 18084 } 18085 v.reset(OpARM64XORshiftRA) 18086 v.AuxInt = int64ToAuxInt(c) 18087 v.AddArg2(x0, y) 18088 return true 18089 } 18090 break 18091 } 18092 // match: (XOR x0 x1:(RORconst [c] y)) 18093 // cond: clobberIfDead(x1) 18094 // result: (XORshiftRO x0 y [c]) 18095 for { 18096 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 18097 x0 := v_0 18098 x1 := v_1 18099 if x1.Op != OpARM64RORconst { 18100 continue 18101 } 18102 c := auxIntToInt64(x1.AuxInt) 18103 y := x1.Args[0] 18104 if !(clobberIfDead(x1)) { 18105 continue 18106 } 18107 v.reset(OpARM64XORshiftRO) 18108 v.AuxInt = int64ToAuxInt(c) 18109 v.AddArg2(x0, y) 18110 return true 18111 } 18112 break 18113 } 18114 return false 18115 } 18116 func rewriteValueARM64_OpARM64XORconst(v *Value) bool { 18117 v_0 := v.Args[0] 18118 // match: (XORconst [0] x) 18119 // result: x 18120 for { 18121 if auxIntToInt64(v.AuxInt) != 0 { 18122 break 18123 } 18124 x := v_0 18125 v.copyOf(x) 18126 return true 18127 } 18128 // match: (XORconst [-1] x) 18129 // result: (MVN x) 18130 for { 18131 if auxIntToInt64(v.AuxInt) != -1 { 18132 break 18133 } 18134 x := v_0 18135 v.reset(OpARM64MVN) 18136 v.AddArg(x) 18137 return true 18138 } 18139 // match: (XORconst [c] (MOVDconst [d])) 18140 // result: (MOVDconst [c^d]) 18141 for { 18142 c := auxIntToInt64(v.AuxInt) 18143 if v_0.Op != OpARM64MOVDconst { 18144 break 18145 } 18146 d := auxIntToInt64(v_0.AuxInt) 18147 v.reset(OpARM64MOVDconst) 18148 v.AuxInt = int64ToAuxInt(c ^ d) 18149 return true 18150 } 18151 // match: (XORconst [c] (XORconst [d] x)) 18152 // result: (XORconst [c^d] x) 18153 for { 18154 c := auxIntToInt64(v.AuxInt) 18155 if v_0.Op != OpARM64XORconst { 18156 break 18157 } 18158 d := auxIntToInt64(v_0.AuxInt) 18159 x := v_0.Args[0] 18160 v.reset(OpARM64XORconst) 18161 v.AuxInt = int64ToAuxInt(c ^ d) 18162 v.AddArg(x) 18163 return true 18164 } 18165 return false 18166 } 18167 func rewriteValueARM64_OpARM64XORshiftLL(v *Value) bool { 18168 v_1 := v.Args[1] 18169 v_0 := v.Args[0] 18170 b := v.Block 18171 typ := &b.Func.Config.Types 18172 // match: (XORshiftLL (MOVDconst [c]) x [d]) 18173 // result: (XORconst [c] (SLLconst <x.Type> x [d])) 18174 for { 18175 d := auxIntToInt64(v.AuxInt) 18176 if v_0.Op != OpARM64MOVDconst { 18177 break 18178 } 18179 c := auxIntToInt64(v_0.AuxInt) 18180 x := v_1 18181 v.reset(OpARM64XORconst) 18182 v.AuxInt = int64ToAuxInt(c) 18183 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 18184 v0.AuxInt = int64ToAuxInt(d) 18185 v0.AddArg(x) 18186 v.AddArg(v0) 18187 return true 18188 } 18189 // match: (XORshiftLL x (MOVDconst [c]) [d]) 18190 // result: (XORconst x [int64(uint64(c)<<uint64(d))]) 18191 for { 18192 d := auxIntToInt64(v.AuxInt) 18193 x := v_0 18194 if v_1.Op != OpARM64MOVDconst { 18195 break 18196 } 18197 c := auxIntToInt64(v_1.AuxInt) 18198 v.reset(OpARM64XORconst) 18199 v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d))) 18200 v.AddArg(x) 18201 return true 18202 } 18203 // match: (XORshiftLL (SLLconst x [c]) x [c]) 18204 // result: (MOVDconst [0]) 18205 for { 18206 c := auxIntToInt64(v.AuxInt) 18207 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c { 18208 break 18209 } 18210 x := v_0.Args[0] 18211 if x != v_1 { 18212 break 18213 } 18214 v.reset(OpARM64MOVDconst) 18215 v.AuxInt = int64ToAuxInt(0) 18216 return true 18217 } 18218 // match: (XORshiftLL <typ.UInt16> [8] (UBFX <typ.UInt16> [armBFAuxInt(8, 8)] x) x) 18219 // result: (REV16W x) 18220 for { 18221 if v.Type != typ.UInt16 || auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || v_0.Type != typ.UInt16 || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 8) { 18222 break 18223 } 18224 x := v_0.Args[0] 18225 if x != v_1 { 18226 break 18227 } 18228 v.reset(OpARM64REV16W) 18229 v.AddArg(x) 18230 return true 18231 } 18232 // match: (XORshiftLL [8] (UBFX [armBFAuxInt(8, 24)] (ANDconst [c1] x)) (ANDconst [c2] x)) 18233 // cond: uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff 18234 // result: (REV16W x) 18235 for { 18236 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 24) { 18237 break 18238 } 18239 v_0_0 := v_0.Args[0] 18240 if v_0_0.Op != OpARM64ANDconst { 18241 break 18242 } 18243 c1 := auxIntToInt64(v_0_0.AuxInt) 18244 x := v_0_0.Args[0] 18245 if v_1.Op != OpARM64ANDconst { 18246 break 18247 } 18248 c2 := auxIntToInt64(v_1.AuxInt) 18249 if x != v_1.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) { 18250 break 18251 } 18252 v.reset(OpARM64REV16W) 18253 v.AddArg(x) 18254 return true 18255 } 18256 // match: (XORshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x)) 18257 // cond: (uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) 18258 // result: (REV16 x) 18259 for { 18260 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 { 18261 break 18262 } 18263 v_0_0 := v_0.Args[0] 18264 if v_0_0.Op != OpARM64ANDconst { 18265 break 18266 } 18267 c1 := auxIntToInt64(v_0_0.AuxInt) 18268 x := v_0_0.Args[0] 18269 if v_1.Op != OpARM64ANDconst { 18270 break 18271 } 18272 c2 := auxIntToInt64(v_1.AuxInt) 18273 if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) { 18274 break 18275 } 18276 v.reset(OpARM64REV16) 18277 v.AddArg(x) 18278 return true 18279 } 18280 // match: (XORshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x)) 18281 // cond: (uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) 18282 // result: (REV16 (ANDconst <x.Type> [0xffffffff] x)) 18283 for { 18284 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 { 18285 break 18286 } 18287 v_0_0 := v_0.Args[0] 18288 if v_0_0.Op != OpARM64ANDconst { 18289 break 18290 } 18291 c1 := auxIntToInt64(v_0_0.AuxInt) 18292 x := v_0_0.Args[0] 18293 if v_1.Op != OpARM64ANDconst { 18294 break 18295 } 18296 c2 := auxIntToInt64(v_1.AuxInt) 18297 if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) { 18298 break 18299 } 18300 v.reset(OpARM64REV16) 18301 v0 := b.NewValue0(v.Pos, OpARM64ANDconst, x.Type) 18302 v0.AuxInt = int64ToAuxInt(0xffffffff) 18303 v0.AddArg(x) 18304 v.AddArg(v0) 18305 return true 18306 } 18307 // match: (XORshiftLL [c] (SRLconst x [64-c]) x2) 18308 // result: (EXTRconst [64-c] x2 x) 18309 for { 18310 c := auxIntToInt64(v.AuxInt) 18311 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 64-c { 18312 break 18313 } 18314 x := v_0.Args[0] 18315 x2 := v_1 18316 v.reset(OpARM64EXTRconst) 18317 v.AuxInt = int64ToAuxInt(64 - c) 18318 v.AddArg2(x2, x) 18319 return true 18320 } 18321 // match: (XORshiftLL <t> [c] (UBFX [bfc] x) x2) 18322 // cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c) 18323 // result: (EXTRWconst [32-c] x2 x) 18324 for { 18325 t := v.Type 18326 c := auxIntToInt64(v.AuxInt) 18327 if v_0.Op != OpARM64UBFX { 18328 break 18329 } 18330 bfc := auxIntToArm64BitField(v_0.AuxInt) 18331 x := v_0.Args[0] 18332 x2 := v_1 18333 if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) { 18334 break 18335 } 18336 v.reset(OpARM64EXTRWconst) 18337 v.AuxInt = int64ToAuxInt(32 - c) 18338 v.AddArg2(x2, x) 18339 return true 18340 } 18341 return false 18342 } 18343 func rewriteValueARM64_OpARM64XORshiftRA(v *Value) bool { 18344 v_1 := v.Args[1] 18345 v_0 := v.Args[0] 18346 b := v.Block 18347 // match: (XORshiftRA (MOVDconst [c]) x [d]) 18348 // result: (XORconst [c] (SRAconst <x.Type> x [d])) 18349 for { 18350 d := auxIntToInt64(v.AuxInt) 18351 if v_0.Op != OpARM64MOVDconst { 18352 break 18353 } 18354 c := auxIntToInt64(v_0.AuxInt) 18355 x := v_1 18356 v.reset(OpARM64XORconst) 18357 v.AuxInt = int64ToAuxInt(c) 18358 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 18359 v0.AuxInt = int64ToAuxInt(d) 18360 v0.AddArg(x) 18361 v.AddArg(v0) 18362 return true 18363 } 18364 // match: (XORshiftRA x (MOVDconst [c]) [d]) 18365 // result: (XORconst x [c>>uint64(d)]) 18366 for { 18367 d := auxIntToInt64(v.AuxInt) 18368 x := v_0 18369 if v_1.Op != OpARM64MOVDconst { 18370 break 18371 } 18372 c := auxIntToInt64(v_1.AuxInt) 18373 v.reset(OpARM64XORconst) 18374 v.AuxInt = int64ToAuxInt(c >> uint64(d)) 18375 v.AddArg(x) 18376 return true 18377 } 18378 // match: (XORshiftRA (SRAconst x [c]) x [c]) 18379 // result: (MOVDconst [0]) 18380 for { 18381 c := auxIntToInt64(v.AuxInt) 18382 if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c { 18383 break 18384 } 18385 x := v_0.Args[0] 18386 if x != v_1 { 18387 break 18388 } 18389 v.reset(OpARM64MOVDconst) 18390 v.AuxInt = int64ToAuxInt(0) 18391 return true 18392 } 18393 return false 18394 } 18395 func rewriteValueARM64_OpARM64XORshiftRL(v *Value) bool { 18396 v_1 := v.Args[1] 18397 v_0 := v.Args[0] 18398 b := v.Block 18399 // match: (XORshiftRL (MOVDconst [c]) x [d]) 18400 // result: (XORconst [c] (SRLconst <x.Type> x [d])) 18401 for { 18402 d := auxIntToInt64(v.AuxInt) 18403 if v_0.Op != OpARM64MOVDconst { 18404 break 18405 } 18406 c := auxIntToInt64(v_0.AuxInt) 18407 x := v_1 18408 v.reset(OpARM64XORconst) 18409 v.AuxInt = int64ToAuxInt(c) 18410 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 18411 v0.AuxInt = int64ToAuxInt(d) 18412 v0.AddArg(x) 18413 v.AddArg(v0) 18414 return true 18415 } 18416 // match: (XORshiftRL x (MOVDconst [c]) [d]) 18417 // result: (XORconst x [int64(uint64(c)>>uint64(d))]) 18418 for { 18419 d := auxIntToInt64(v.AuxInt) 18420 x := v_0 18421 if v_1.Op != OpARM64MOVDconst { 18422 break 18423 } 18424 c := auxIntToInt64(v_1.AuxInt) 18425 v.reset(OpARM64XORconst) 18426 v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d))) 18427 v.AddArg(x) 18428 return true 18429 } 18430 // match: (XORshiftRL (SRLconst x [c]) x [c]) 18431 // result: (MOVDconst [0]) 18432 for { 18433 c := auxIntToInt64(v.AuxInt) 18434 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c { 18435 break 18436 } 18437 x := v_0.Args[0] 18438 if x != v_1 { 18439 break 18440 } 18441 v.reset(OpARM64MOVDconst) 18442 v.AuxInt = int64ToAuxInt(0) 18443 return true 18444 } 18445 return false 18446 } 18447 func rewriteValueARM64_OpARM64XORshiftRO(v *Value) bool { 18448 v_1 := v.Args[1] 18449 v_0 := v.Args[0] 18450 b := v.Block 18451 // match: (XORshiftRO (MOVDconst [c]) x [d]) 18452 // result: (XORconst [c] (RORconst <x.Type> x [d])) 18453 for { 18454 d := auxIntToInt64(v.AuxInt) 18455 if v_0.Op != OpARM64MOVDconst { 18456 break 18457 } 18458 c := auxIntToInt64(v_0.AuxInt) 18459 x := v_1 18460 v.reset(OpARM64XORconst) 18461 v.AuxInt = int64ToAuxInt(c) 18462 v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type) 18463 v0.AuxInt = int64ToAuxInt(d) 18464 v0.AddArg(x) 18465 v.AddArg(v0) 18466 return true 18467 } 18468 // match: (XORshiftRO x (MOVDconst [c]) [d]) 18469 // result: (XORconst x [rotateRight64(c, d)]) 18470 for { 18471 d := auxIntToInt64(v.AuxInt) 18472 x := v_0 18473 if v_1.Op != OpARM64MOVDconst { 18474 break 18475 } 18476 c := auxIntToInt64(v_1.AuxInt) 18477 v.reset(OpARM64XORconst) 18478 v.AuxInt = int64ToAuxInt(rotateRight64(c, d)) 18479 v.AddArg(x) 18480 return true 18481 } 18482 // match: (XORshiftRO (RORconst x [c]) x [c]) 18483 // result: (MOVDconst [0]) 18484 for { 18485 c := auxIntToInt64(v.AuxInt) 18486 if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c { 18487 break 18488 } 18489 x := v_0.Args[0] 18490 if x != v_1 { 18491 break 18492 } 18493 v.reset(OpARM64MOVDconst) 18494 v.AuxInt = int64ToAuxInt(0) 18495 return true 18496 } 18497 return false 18498 } 18499 func rewriteValueARM64_OpAddr(v *Value) bool { 18500 v_0 := v.Args[0] 18501 // match: (Addr {sym} base) 18502 // result: (MOVDaddr {sym} base) 18503 for { 18504 sym := auxToSym(v.Aux) 18505 base := v_0 18506 v.reset(OpARM64MOVDaddr) 18507 v.Aux = symToAux(sym) 18508 v.AddArg(base) 18509 return true 18510 } 18511 } 18512 func rewriteValueARM64_OpAtomicAnd32(v *Value) bool { 18513 v_2 := v.Args[2] 18514 v_1 := v.Args[1] 18515 v_0 := v.Args[0] 18516 b := v.Block 18517 typ := &b.Func.Config.Types 18518 // match: (AtomicAnd32 ptr val mem) 18519 // result: (Select1 (LoweredAtomicAnd32 ptr val mem)) 18520 for { 18521 ptr := v_0 18522 val := v_1 18523 mem := v_2 18524 v.reset(OpSelect1) 18525 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicAnd32, types.NewTuple(typ.UInt32, types.TypeMem)) 18526 v0.AddArg3(ptr, val, mem) 18527 v.AddArg(v0) 18528 return true 18529 } 18530 } 18531 func rewriteValueARM64_OpAtomicAnd32Variant(v *Value) bool { 18532 v_2 := v.Args[2] 18533 v_1 := v.Args[1] 18534 v_0 := v.Args[0] 18535 b := v.Block 18536 typ := &b.Func.Config.Types 18537 // match: (AtomicAnd32Variant ptr val mem) 18538 // result: (Select1 (LoweredAtomicAnd32Variant ptr val mem)) 18539 for { 18540 ptr := v_0 18541 val := v_1 18542 mem := v_2 18543 v.reset(OpSelect1) 18544 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicAnd32Variant, types.NewTuple(typ.UInt32, types.TypeMem)) 18545 v0.AddArg3(ptr, val, mem) 18546 v.AddArg(v0) 18547 return true 18548 } 18549 } 18550 func rewriteValueARM64_OpAtomicAnd8(v *Value) bool { 18551 v_2 := v.Args[2] 18552 v_1 := v.Args[1] 18553 v_0 := v.Args[0] 18554 b := v.Block 18555 typ := &b.Func.Config.Types 18556 // match: (AtomicAnd8 ptr val mem) 18557 // result: (Select1 (LoweredAtomicAnd8 ptr val mem)) 18558 for { 18559 ptr := v_0 18560 val := v_1 18561 mem := v_2 18562 v.reset(OpSelect1) 18563 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicAnd8, types.NewTuple(typ.UInt8, types.TypeMem)) 18564 v0.AddArg3(ptr, val, mem) 18565 v.AddArg(v0) 18566 return true 18567 } 18568 } 18569 func rewriteValueARM64_OpAtomicAnd8Variant(v *Value) bool { 18570 v_2 := v.Args[2] 18571 v_1 := v.Args[1] 18572 v_0 := v.Args[0] 18573 b := v.Block 18574 typ := &b.Func.Config.Types 18575 // match: (AtomicAnd8Variant ptr val mem) 18576 // result: (Select1 (LoweredAtomicAnd8Variant ptr val mem)) 18577 for { 18578 ptr := v_0 18579 val := v_1 18580 mem := v_2 18581 v.reset(OpSelect1) 18582 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicAnd8Variant, types.NewTuple(typ.UInt8, types.TypeMem)) 18583 v0.AddArg3(ptr, val, mem) 18584 v.AddArg(v0) 18585 return true 18586 } 18587 } 18588 func rewriteValueARM64_OpAtomicOr32(v *Value) bool { 18589 v_2 := v.Args[2] 18590 v_1 := v.Args[1] 18591 v_0 := v.Args[0] 18592 b := v.Block 18593 typ := &b.Func.Config.Types 18594 // match: (AtomicOr32 ptr val mem) 18595 // result: (Select1 (LoweredAtomicOr32 ptr val mem)) 18596 for { 18597 ptr := v_0 18598 val := v_1 18599 mem := v_2 18600 v.reset(OpSelect1) 18601 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicOr32, types.NewTuple(typ.UInt32, types.TypeMem)) 18602 v0.AddArg3(ptr, val, mem) 18603 v.AddArg(v0) 18604 return true 18605 } 18606 } 18607 func rewriteValueARM64_OpAtomicOr32Variant(v *Value) bool { 18608 v_2 := v.Args[2] 18609 v_1 := v.Args[1] 18610 v_0 := v.Args[0] 18611 b := v.Block 18612 typ := &b.Func.Config.Types 18613 // match: (AtomicOr32Variant ptr val mem) 18614 // result: (Select1 (LoweredAtomicOr32Variant ptr val mem)) 18615 for { 18616 ptr := v_0 18617 val := v_1 18618 mem := v_2 18619 v.reset(OpSelect1) 18620 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicOr32Variant, types.NewTuple(typ.UInt32, types.TypeMem)) 18621 v0.AddArg3(ptr, val, mem) 18622 v.AddArg(v0) 18623 return true 18624 } 18625 } 18626 func rewriteValueARM64_OpAtomicOr8(v *Value) bool { 18627 v_2 := v.Args[2] 18628 v_1 := v.Args[1] 18629 v_0 := v.Args[0] 18630 b := v.Block 18631 typ := &b.Func.Config.Types 18632 // match: (AtomicOr8 ptr val mem) 18633 // result: (Select1 (LoweredAtomicOr8 ptr val mem)) 18634 for { 18635 ptr := v_0 18636 val := v_1 18637 mem := v_2 18638 v.reset(OpSelect1) 18639 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicOr8, types.NewTuple(typ.UInt8, types.TypeMem)) 18640 v0.AddArg3(ptr, val, mem) 18641 v.AddArg(v0) 18642 return true 18643 } 18644 } 18645 func rewriteValueARM64_OpAtomicOr8Variant(v *Value) bool { 18646 v_2 := v.Args[2] 18647 v_1 := v.Args[1] 18648 v_0 := v.Args[0] 18649 b := v.Block 18650 typ := &b.Func.Config.Types 18651 // match: (AtomicOr8Variant ptr val mem) 18652 // result: (Select1 (LoweredAtomicOr8Variant ptr val mem)) 18653 for { 18654 ptr := v_0 18655 val := v_1 18656 mem := v_2 18657 v.reset(OpSelect1) 18658 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicOr8Variant, types.NewTuple(typ.UInt8, types.TypeMem)) 18659 v0.AddArg3(ptr, val, mem) 18660 v.AddArg(v0) 18661 return true 18662 } 18663 } 18664 func rewriteValueARM64_OpAvg64u(v *Value) bool { 18665 v_1 := v.Args[1] 18666 v_0 := v.Args[0] 18667 b := v.Block 18668 // match: (Avg64u <t> x y) 18669 // result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y) 18670 for { 18671 t := v.Type 18672 x := v_0 18673 y := v_1 18674 v.reset(OpARM64ADD) 18675 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, t) 18676 v0.AuxInt = int64ToAuxInt(1) 18677 v1 := b.NewValue0(v.Pos, OpARM64SUB, t) 18678 v1.AddArg2(x, y) 18679 v0.AddArg(v1) 18680 v.AddArg2(v0, y) 18681 return true 18682 } 18683 } 18684 func rewriteValueARM64_OpBitLen32(v *Value) bool { 18685 v_0 := v.Args[0] 18686 b := v.Block 18687 typ := &b.Func.Config.Types 18688 // match: (BitLen32 x) 18689 // result: (SUB (MOVDconst [32]) (CLZW <typ.Int> x)) 18690 for { 18691 x := v_0 18692 v.reset(OpARM64SUB) 18693 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18694 v0.AuxInt = int64ToAuxInt(32) 18695 v1 := b.NewValue0(v.Pos, OpARM64CLZW, typ.Int) 18696 v1.AddArg(x) 18697 v.AddArg2(v0, v1) 18698 return true 18699 } 18700 } 18701 func rewriteValueARM64_OpBitLen64(v *Value) bool { 18702 v_0 := v.Args[0] 18703 b := v.Block 18704 typ := &b.Func.Config.Types 18705 // match: (BitLen64 x) 18706 // result: (SUB (MOVDconst [64]) (CLZ <typ.Int> x)) 18707 for { 18708 x := v_0 18709 v.reset(OpARM64SUB) 18710 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18711 v0.AuxInt = int64ToAuxInt(64) 18712 v1 := b.NewValue0(v.Pos, OpARM64CLZ, typ.Int) 18713 v1.AddArg(x) 18714 v.AddArg2(v0, v1) 18715 return true 18716 } 18717 } 18718 func rewriteValueARM64_OpBitRev16(v *Value) bool { 18719 v_0 := v.Args[0] 18720 b := v.Block 18721 typ := &b.Func.Config.Types 18722 // match: (BitRev16 x) 18723 // result: (SRLconst [48] (RBIT <typ.UInt64> x)) 18724 for { 18725 x := v_0 18726 v.reset(OpARM64SRLconst) 18727 v.AuxInt = int64ToAuxInt(48) 18728 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64) 18729 v0.AddArg(x) 18730 v.AddArg(v0) 18731 return true 18732 } 18733 } 18734 func rewriteValueARM64_OpBitRev8(v *Value) bool { 18735 v_0 := v.Args[0] 18736 b := v.Block 18737 typ := &b.Func.Config.Types 18738 // match: (BitRev8 x) 18739 // result: (SRLconst [56] (RBIT <typ.UInt64> x)) 18740 for { 18741 x := v_0 18742 v.reset(OpARM64SRLconst) 18743 v.AuxInt = int64ToAuxInt(56) 18744 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64) 18745 v0.AddArg(x) 18746 v.AddArg(v0) 18747 return true 18748 } 18749 } 18750 func rewriteValueARM64_OpCondSelect(v *Value) bool { 18751 v_2 := v.Args[2] 18752 v_1 := v.Args[1] 18753 v_0 := v.Args[0] 18754 b := v.Block 18755 // match: (CondSelect x y boolval) 18756 // cond: flagArg(boolval) != nil 18757 // result: (CSEL [boolval.Op] x y flagArg(boolval)) 18758 for { 18759 x := v_0 18760 y := v_1 18761 boolval := v_2 18762 if !(flagArg(boolval) != nil) { 18763 break 18764 } 18765 v.reset(OpARM64CSEL) 18766 v.AuxInt = opToAuxInt(boolval.Op) 18767 v.AddArg3(x, y, flagArg(boolval)) 18768 return true 18769 } 18770 // match: (CondSelect x y boolval) 18771 // cond: flagArg(boolval) == nil 18772 // result: (CSEL [OpARM64NotEqual] x y (TSTWconst [1] boolval)) 18773 for { 18774 x := v_0 18775 y := v_1 18776 boolval := v_2 18777 if !(flagArg(boolval) == nil) { 18778 break 18779 } 18780 v.reset(OpARM64CSEL) 18781 v.AuxInt = opToAuxInt(OpARM64NotEqual) 18782 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 18783 v0.AuxInt = int32ToAuxInt(1) 18784 v0.AddArg(boolval) 18785 v.AddArg3(x, y, v0) 18786 return true 18787 } 18788 return false 18789 } 18790 func rewriteValueARM64_OpConst16(v *Value) bool { 18791 // match: (Const16 [val]) 18792 // result: (MOVDconst [int64(val)]) 18793 for { 18794 val := auxIntToInt16(v.AuxInt) 18795 v.reset(OpARM64MOVDconst) 18796 v.AuxInt = int64ToAuxInt(int64(val)) 18797 return true 18798 } 18799 } 18800 func rewriteValueARM64_OpConst32(v *Value) bool { 18801 // match: (Const32 [val]) 18802 // result: (MOVDconst [int64(val)]) 18803 for { 18804 val := auxIntToInt32(v.AuxInt) 18805 v.reset(OpARM64MOVDconst) 18806 v.AuxInt = int64ToAuxInt(int64(val)) 18807 return true 18808 } 18809 } 18810 func rewriteValueARM64_OpConst32F(v *Value) bool { 18811 // match: (Const32F [val]) 18812 // result: (FMOVSconst [float64(val)]) 18813 for { 18814 val := auxIntToFloat32(v.AuxInt) 18815 v.reset(OpARM64FMOVSconst) 18816 v.AuxInt = float64ToAuxInt(float64(val)) 18817 return true 18818 } 18819 } 18820 func rewriteValueARM64_OpConst64(v *Value) bool { 18821 // match: (Const64 [val]) 18822 // result: (MOVDconst [int64(val)]) 18823 for { 18824 val := auxIntToInt64(v.AuxInt) 18825 v.reset(OpARM64MOVDconst) 18826 v.AuxInt = int64ToAuxInt(int64(val)) 18827 return true 18828 } 18829 } 18830 func rewriteValueARM64_OpConst64F(v *Value) bool { 18831 // match: (Const64F [val]) 18832 // result: (FMOVDconst [float64(val)]) 18833 for { 18834 val := auxIntToFloat64(v.AuxInt) 18835 v.reset(OpARM64FMOVDconst) 18836 v.AuxInt = float64ToAuxInt(float64(val)) 18837 return true 18838 } 18839 } 18840 func rewriteValueARM64_OpConst8(v *Value) bool { 18841 // match: (Const8 [val]) 18842 // result: (MOVDconst [int64(val)]) 18843 for { 18844 val := auxIntToInt8(v.AuxInt) 18845 v.reset(OpARM64MOVDconst) 18846 v.AuxInt = int64ToAuxInt(int64(val)) 18847 return true 18848 } 18849 } 18850 func rewriteValueARM64_OpConstBool(v *Value) bool { 18851 // match: (ConstBool [t]) 18852 // result: (MOVDconst [b2i(t)]) 18853 for { 18854 t := auxIntToBool(v.AuxInt) 18855 v.reset(OpARM64MOVDconst) 18856 v.AuxInt = int64ToAuxInt(b2i(t)) 18857 return true 18858 } 18859 } 18860 func rewriteValueARM64_OpConstNil(v *Value) bool { 18861 // match: (ConstNil) 18862 // result: (MOVDconst [0]) 18863 for { 18864 v.reset(OpARM64MOVDconst) 18865 v.AuxInt = int64ToAuxInt(0) 18866 return true 18867 } 18868 } 18869 func rewriteValueARM64_OpCtz16(v *Value) bool { 18870 v_0 := v.Args[0] 18871 b := v.Block 18872 typ := &b.Func.Config.Types 18873 // match: (Ctz16 <t> x) 18874 // result: (CLZW <t> (RBITW <typ.UInt32> (ORconst <typ.UInt32> [0x10000] x))) 18875 for { 18876 t := v.Type 18877 x := v_0 18878 v.reset(OpARM64CLZW) 18879 v.Type = t 18880 v0 := b.NewValue0(v.Pos, OpARM64RBITW, typ.UInt32) 18881 v1 := b.NewValue0(v.Pos, OpARM64ORconst, typ.UInt32) 18882 v1.AuxInt = int64ToAuxInt(0x10000) 18883 v1.AddArg(x) 18884 v0.AddArg(v1) 18885 v.AddArg(v0) 18886 return true 18887 } 18888 } 18889 func rewriteValueARM64_OpCtz32(v *Value) bool { 18890 v_0 := v.Args[0] 18891 b := v.Block 18892 // match: (Ctz32 <t> x) 18893 // result: (CLZW (RBITW <t> x)) 18894 for { 18895 t := v.Type 18896 x := v_0 18897 v.reset(OpARM64CLZW) 18898 v0 := b.NewValue0(v.Pos, OpARM64RBITW, t) 18899 v0.AddArg(x) 18900 v.AddArg(v0) 18901 return true 18902 } 18903 } 18904 func rewriteValueARM64_OpCtz64(v *Value) bool { 18905 v_0 := v.Args[0] 18906 b := v.Block 18907 // match: (Ctz64 <t> x) 18908 // result: (CLZ (RBIT <t> x)) 18909 for { 18910 t := v.Type 18911 x := v_0 18912 v.reset(OpARM64CLZ) 18913 v0 := b.NewValue0(v.Pos, OpARM64RBIT, t) 18914 v0.AddArg(x) 18915 v.AddArg(v0) 18916 return true 18917 } 18918 } 18919 func rewriteValueARM64_OpCtz8(v *Value) bool { 18920 v_0 := v.Args[0] 18921 b := v.Block 18922 typ := &b.Func.Config.Types 18923 // match: (Ctz8 <t> x) 18924 // result: (CLZW <t> (RBITW <typ.UInt32> (ORconst <typ.UInt32> [0x100] x))) 18925 for { 18926 t := v.Type 18927 x := v_0 18928 v.reset(OpARM64CLZW) 18929 v.Type = t 18930 v0 := b.NewValue0(v.Pos, OpARM64RBITW, typ.UInt32) 18931 v1 := b.NewValue0(v.Pos, OpARM64ORconst, typ.UInt32) 18932 v1.AuxInt = int64ToAuxInt(0x100) 18933 v1.AddArg(x) 18934 v0.AddArg(v1) 18935 v.AddArg(v0) 18936 return true 18937 } 18938 } 18939 func rewriteValueARM64_OpDiv16(v *Value) bool { 18940 v_1 := v.Args[1] 18941 v_0 := v.Args[0] 18942 b := v.Block 18943 typ := &b.Func.Config.Types 18944 // match: (Div16 [false] x y) 18945 // result: (DIVW (SignExt16to32 x) (SignExt16to32 y)) 18946 for { 18947 if auxIntToBool(v.AuxInt) != false { 18948 break 18949 } 18950 x := v_0 18951 y := v_1 18952 v.reset(OpARM64DIVW) 18953 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 18954 v0.AddArg(x) 18955 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 18956 v1.AddArg(y) 18957 v.AddArg2(v0, v1) 18958 return true 18959 } 18960 return false 18961 } 18962 func rewriteValueARM64_OpDiv16u(v *Value) bool { 18963 v_1 := v.Args[1] 18964 v_0 := v.Args[0] 18965 b := v.Block 18966 typ := &b.Func.Config.Types 18967 // match: (Div16u x y) 18968 // result: (UDIVW (ZeroExt16to32 x) (ZeroExt16to32 y)) 18969 for { 18970 x := v_0 18971 y := v_1 18972 v.reset(OpARM64UDIVW) 18973 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 18974 v0.AddArg(x) 18975 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 18976 v1.AddArg(y) 18977 v.AddArg2(v0, v1) 18978 return true 18979 } 18980 } 18981 func rewriteValueARM64_OpDiv32(v *Value) bool { 18982 v_1 := v.Args[1] 18983 v_0 := v.Args[0] 18984 // match: (Div32 [false] x y) 18985 // result: (DIVW x y) 18986 for { 18987 if auxIntToBool(v.AuxInt) != false { 18988 break 18989 } 18990 x := v_0 18991 y := v_1 18992 v.reset(OpARM64DIVW) 18993 v.AddArg2(x, y) 18994 return true 18995 } 18996 return false 18997 } 18998 func rewriteValueARM64_OpDiv64(v *Value) bool { 18999 v_1 := v.Args[1] 19000 v_0 := v.Args[0] 19001 // match: (Div64 [false] x y) 19002 // result: (DIV x y) 19003 for { 19004 if auxIntToBool(v.AuxInt) != false { 19005 break 19006 } 19007 x := v_0 19008 y := v_1 19009 v.reset(OpARM64DIV) 19010 v.AddArg2(x, y) 19011 return true 19012 } 19013 return false 19014 } 19015 func rewriteValueARM64_OpDiv8(v *Value) bool { 19016 v_1 := v.Args[1] 19017 v_0 := v.Args[0] 19018 b := v.Block 19019 typ := &b.Func.Config.Types 19020 // match: (Div8 x y) 19021 // result: (DIVW (SignExt8to32 x) (SignExt8to32 y)) 19022 for { 19023 x := v_0 19024 y := v_1 19025 v.reset(OpARM64DIVW) 19026 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 19027 v0.AddArg(x) 19028 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 19029 v1.AddArg(y) 19030 v.AddArg2(v0, v1) 19031 return true 19032 } 19033 } 19034 func rewriteValueARM64_OpDiv8u(v *Value) bool { 19035 v_1 := v.Args[1] 19036 v_0 := v.Args[0] 19037 b := v.Block 19038 typ := &b.Func.Config.Types 19039 // match: (Div8u x y) 19040 // result: (UDIVW (ZeroExt8to32 x) (ZeroExt8to32 y)) 19041 for { 19042 x := v_0 19043 y := v_1 19044 v.reset(OpARM64UDIVW) 19045 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 19046 v0.AddArg(x) 19047 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 19048 v1.AddArg(y) 19049 v.AddArg2(v0, v1) 19050 return true 19051 } 19052 } 19053 func rewriteValueARM64_OpEq16(v *Value) bool { 19054 v_1 := v.Args[1] 19055 v_0 := v.Args[0] 19056 b := v.Block 19057 typ := &b.Func.Config.Types 19058 // match: (Eq16 x y) 19059 // result: (Equal (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 19060 for { 19061 x := v_0 19062 y := v_1 19063 v.reset(OpARM64Equal) 19064 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 19065 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 19066 v1.AddArg(x) 19067 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 19068 v2.AddArg(y) 19069 v0.AddArg2(v1, v2) 19070 v.AddArg(v0) 19071 return true 19072 } 19073 } 19074 func rewriteValueARM64_OpEq32(v *Value) bool { 19075 v_1 := v.Args[1] 19076 v_0 := v.Args[0] 19077 b := v.Block 19078 // match: (Eq32 x y) 19079 // result: (Equal (CMPW x y)) 19080 for { 19081 x := v_0 19082 y := v_1 19083 v.reset(OpARM64Equal) 19084 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 19085 v0.AddArg2(x, y) 19086 v.AddArg(v0) 19087 return true 19088 } 19089 } 19090 func rewriteValueARM64_OpEq32F(v *Value) bool { 19091 v_1 := v.Args[1] 19092 v_0 := v.Args[0] 19093 b := v.Block 19094 // match: (Eq32F x y) 19095 // result: (Equal (FCMPS x y)) 19096 for { 19097 x := v_0 19098 y := v_1 19099 v.reset(OpARM64Equal) 19100 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 19101 v0.AddArg2(x, y) 19102 v.AddArg(v0) 19103 return true 19104 } 19105 } 19106 func rewriteValueARM64_OpEq64(v *Value) bool { 19107 v_1 := v.Args[1] 19108 v_0 := v.Args[0] 19109 b := v.Block 19110 // match: (Eq64 x y) 19111 // result: (Equal (CMP x y)) 19112 for { 19113 x := v_0 19114 y := v_1 19115 v.reset(OpARM64Equal) 19116 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 19117 v0.AddArg2(x, y) 19118 v.AddArg(v0) 19119 return true 19120 } 19121 } 19122 func rewriteValueARM64_OpEq64F(v *Value) bool { 19123 v_1 := v.Args[1] 19124 v_0 := v.Args[0] 19125 b := v.Block 19126 // match: (Eq64F x y) 19127 // result: (Equal (FCMPD x y)) 19128 for { 19129 x := v_0 19130 y := v_1 19131 v.reset(OpARM64Equal) 19132 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 19133 v0.AddArg2(x, y) 19134 v.AddArg(v0) 19135 return true 19136 } 19137 } 19138 func rewriteValueARM64_OpEq8(v *Value) bool { 19139 v_1 := v.Args[1] 19140 v_0 := v.Args[0] 19141 b := v.Block 19142 typ := &b.Func.Config.Types 19143 // match: (Eq8 x y) 19144 // result: (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 19145 for { 19146 x := v_0 19147 y := v_1 19148 v.reset(OpARM64Equal) 19149 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 19150 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 19151 v1.AddArg(x) 19152 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 19153 v2.AddArg(y) 19154 v0.AddArg2(v1, v2) 19155 v.AddArg(v0) 19156 return true 19157 } 19158 } 19159 func rewriteValueARM64_OpEqB(v *Value) bool { 19160 v_1 := v.Args[1] 19161 v_0 := v.Args[0] 19162 b := v.Block 19163 typ := &b.Func.Config.Types 19164 // match: (EqB x y) 19165 // result: (XOR (MOVDconst [1]) (XOR <typ.Bool> x y)) 19166 for { 19167 x := v_0 19168 y := v_1 19169 v.reset(OpARM64XOR) 19170 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 19171 v0.AuxInt = int64ToAuxInt(1) 19172 v1 := b.NewValue0(v.Pos, OpARM64XOR, typ.Bool) 19173 v1.AddArg2(x, y) 19174 v.AddArg2(v0, v1) 19175 return true 19176 } 19177 } 19178 func rewriteValueARM64_OpEqPtr(v *Value) bool { 19179 v_1 := v.Args[1] 19180 v_0 := v.Args[0] 19181 b := v.Block 19182 // match: (EqPtr x y) 19183 // result: (Equal (CMP x y)) 19184 for { 19185 x := v_0 19186 y := v_1 19187 v.reset(OpARM64Equal) 19188 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 19189 v0.AddArg2(x, y) 19190 v.AddArg(v0) 19191 return true 19192 } 19193 } 19194 func rewriteValueARM64_OpFMA(v *Value) bool { 19195 v_2 := v.Args[2] 19196 v_1 := v.Args[1] 19197 v_0 := v.Args[0] 19198 // match: (FMA x y z) 19199 // result: (FMADDD z x y) 19200 for { 19201 x := v_0 19202 y := v_1 19203 z := v_2 19204 v.reset(OpARM64FMADDD) 19205 v.AddArg3(z, x, y) 19206 return true 19207 } 19208 } 19209 func rewriteValueARM64_OpHmul32(v *Value) bool { 19210 v_1 := v.Args[1] 19211 v_0 := v.Args[0] 19212 b := v.Block 19213 typ := &b.Func.Config.Types 19214 // match: (Hmul32 x y) 19215 // result: (SRAconst (MULL <typ.Int64> x y) [32]) 19216 for { 19217 x := v_0 19218 y := v_1 19219 v.reset(OpARM64SRAconst) 19220 v.AuxInt = int64ToAuxInt(32) 19221 v0 := b.NewValue0(v.Pos, OpARM64MULL, typ.Int64) 19222 v0.AddArg2(x, y) 19223 v.AddArg(v0) 19224 return true 19225 } 19226 } 19227 func rewriteValueARM64_OpHmul32u(v *Value) bool { 19228 v_1 := v.Args[1] 19229 v_0 := v.Args[0] 19230 b := v.Block 19231 typ := &b.Func.Config.Types 19232 // match: (Hmul32u x y) 19233 // result: (SRAconst (UMULL <typ.UInt64> x y) [32]) 19234 for { 19235 x := v_0 19236 y := v_1 19237 v.reset(OpARM64SRAconst) 19238 v.AuxInt = int64ToAuxInt(32) 19239 v0 := b.NewValue0(v.Pos, OpARM64UMULL, typ.UInt64) 19240 v0.AddArg2(x, y) 19241 v.AddArg(v0) 19242 return true 19243 } 19244 } 19245 func rewriteValueARM64_OpIsInBounds(v *Value) bool { 19246 v_1 := v.Args[1] 19247 v_0 := v.Args[0] 19248 b := v.Block 19249 // match: (IsInBounds idx len) 19250 // result: (LessThanU (CMP idx len)) 19251 for { 19252 idx := v_0 19253 len := v_1 19254 v.reset(OpARM64LessThanU) 19255 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 19256 v0.AddArg2(idx, len) 19257 v.AddArg(v0) 19258 return true 19259 } 19260 } 19261 func rewriteValueARM64_OpIsNonNil(v *Value) bool { 19262 v_0 := v.Args[0] 19263 b := v.Block 19264 // match: (IsNonNil ptr) 19265 // result: (NotEqual (CMPconst [0] ptr)) 19266 for { 19267 ptr := v_0 19268 v.reset(OpARM64NotEqual) 19269 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 19270 v0.AuxInt = int64ToAuxInt(0) 19271 v0.AddArg(ptr) 19272 v.AddArg(v0) 19273 return true 19274 } 19275 } 19276 func rewriteValueARM64_OpIsSliceInBounds(v *Value) bool { 19277 v_1 := v.Args[1] 19278 v_0 := v.Args[0] 19279 b := v.Block 19280 // match: (IsSliceInBounds idx len) 19281 // result: (LessEqualU (CMP idx len)) 19282 for { 19283 idx := v_0 19284 len := v_1 19285 v.reset(OpARM64LessEqualU) 19286 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 19287 v0.AddArg2(idx, len) 19288 v.AddArg(v0) 19289 return true 19290 } 19291 } 19292 func rewriteValueARM64_OpLeq16(v *Value) bool { 19293 v_1 := v.Args[1] 19294 v_0 := v.Args[0] 19295 b := v.Block 19296 typ := &b.Func.Config.Types 19297 // match: (Leq16 x y) 19298 // result: (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) 19299 for { 19300 x := v_0 19301 y := v_1 19302 v.reset(OpARM64LessEqual) 19303 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 19304 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 19305 v1.AddArg(x) 19306 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 19307 v2.AddArg(y) 19308 v0.AddArg2(v1, v2) 19309 v.AddArg(v0) 19310 return true 19311 } 19312 } 19313 func rewriteValueARM64_OpLeq16U(v *Value) bool { 19314 v_1 := v.Args[1] 19315 v_0 := v.Args[0] 19316 b := v.Block 19317 typ := &b.Func.Config.Types 19318 // match: (Leq16U x zero:(MOVDconst [0])) 19319 // result: (Eq16 x zero) 19320 for { 19321 x := v_0 19322 zero := v_1 19323 if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 { 19324 break 19325 } 19326 v.reset(OpEq16) 19327 v.AddArg2(x, zero) 19328 return true 19329 } 19330 // match: (Leq16U (MOVDconst [1]) x) 19331 // result: (Neq16 (MOVDconst [0]) x) 19332 for { 19333 if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 { 19334 break 19335 } 19336 x := v_1 19337 v.reset(OpNeq16) 19338 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 19339 v0.AuxInt = int64ToAuxInt(0) 19340 v.AddArg2(v0, x) 19341 return true 19342 } 19343 // match: (Leq16U x y) 19344 // result: (LessEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 19345 for { 19346 x := v_0 19347 y := v_1 19348 v.reset(OpARM64LessEqualU) 19349 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 19350 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 19351 v1.AddArg(x) 19352 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 19353 v2.AddArg(y) 19354 v0.AddArg2(v1, v2) 19355 v.AddArg(v0) 19356 return true 19357 } 19358 } 19359 func rewriteValueARM64_OpLeq32(v *Value) bool { 19360 v_1 := v.Args[1] 19361 v_0 := v.Args[0] 19362 b := v.Block 19363 // match: (Leq32 x y) 19364 // result: (LessEqual (CMPW x y)) 19365 for { 19366 x := v_0 19367 y := v_1 19368 v.reset(OpARM64LessEqual) 19369 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 19370 v0.AddArg2(x, y) 19371 v.AddArg(v0) 19372 return true 19373 } 19374 } 19375 func rewriteValueARM64_OpLeq32F(v *Value) bool { 19376 v_1 := v.Args[1] 19377 v_0 := v.Args[0] 19378 b := v.Block 19379 // match: (Leq32F x y) 19380 // result: (LessEqualF (FCMPS x y)) 19381 for { 19382 x := v_0 19383 y := v_1 19384 v.reset(OpARM64LessEqualF) 19385 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 19386 v0.AddArg2(x, y) 19387 v.AddArg(v0) 19388 return true 19389 } 19390 } 19391 func rewriteValueARM64_OpLeq32U(v *Value) bool { 19392 v_1 := v.Args[1] 19393 v_0 := v.Args[0] 19394 b := v.Block 19395 typ := &b.Func.Config.Types 19396 // match: (Leq32U x zero:(MOVDconst [0])) 19397 // result: (Eq32 x zero) 19398 for { 19399 x := v_0 19400 zero := v_1 19401 if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 { 19402 break 19403 } 19404 v.reset(OpEq32) 19405 v.AddArg2(x, zero) 19406 return true 19407 } 19408 // match: (Leq32U (MOVDconst [1]) x) 19409 // result: (Neq32 (MOVDconst [0]) x) 19410 for { 19411 if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 { 19412 break 19413 } 19414 x := v_1 19415 v.reset(OpNeq32) 19416 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 19417 v0.AuxInt = int64ToAuxInt(0) 19418 v.AddArg2(v0, x) 19419 return true 19420 } 19421 // match: (Leq32U x y) 19422 // result: (LessEqualU (CMPW x y)) 19423 for { 19424 x := v_0 19425 y := v_1 19426 v.reset(OpARM64LessEqualU) 19427 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 19428 v0.AddArg2(x, y) 19429 v.AddArg(v0) 19430 return true 19431 } 19432 } 19433 func rewriteValueARM64_OpLeq64(v *Value) bool { 19434 v_1 := v.Args[1] 19435 v_0 := v.Args[0] 19436 b := v.Block 19437 // match: (Leq64 x y) 19438 // result: (LessEqual (CMP x y)) 19439 for { 19440 x := v_0 19441 y := v_1 19442 v.reset(OpARM64LessEqual) 19443 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 19444 v0.AddArg2(x, y) 19445 v.AddArg(v0) 19446 return true 19447 } 19448 } 19449 func rewriteValueARM64_OpLeq64F(v *Value) bool { 19450 v_1 := v.Args[1] 19451 v_0 := v.Args[0] 19452 b := v.Block 19453 // match: (Leq64F x y) 19454 // result: (LessEqualF (FCMPD x y)) 19455 for { 19456 x := v_0 19457 y := v_1 19458 v.reset(OpARM64LessEqualF) 19459 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 19460 v0.AddArg2(x, y) 19461 v.AddArg(v0) 19462 return true 19463 } 19464 } 19465 func rewriteValueARM64_OpLeq64U(v *Value) bool { 19466 v_1 := v.Args[1] 19467 v_0 := v.Args[0] 19468 b := v.Block 19469 typ := &b.Func.Config.Types 19470 // match: (Leq64U x zero:(MOVDconst [0])) 19471 // result: (Eq64 x zero) 19472 for { 19473 x := v_0 19474 zero := v_1 19475 if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 { 19476 break 19477 } 19478 v.reset(OpEq64) 19479 v.AddArg2(x, zero) 19480 return true 19481 } 19482 // match: (Leq64U (MOVDconst [1]) x) 19483 // result: (Neq64 (MOVDconst [0]) x) 19484 for { 19485 if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 { 19486 break 19487 } 19488 x := v_1 19489 v.reset(OpNeq64) 19490 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 19491 v0.AuxInt = int64ToAuxInt(0) 19492 v.AddArg2(v0, x) 19493 return true 19494 } 19495 // match: (Leq64U x y) 19496 // result: (LessEqualU (CMP x y)) 19497 for { 19498 x := v_0 19499 y := v_1 19500 v.reset(OpARM64LessEqualU) 19501 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 19502 v0.AddArg2(x, y) 19503 v.AddArg(v0) 19504 return true 19505 } 19506 } 19507 func rewriteValueARM64_OpLeq8(v *Value) bool { 19508 v_1 := v.Args[1] 19509 v_0 := v.Args[0] 19510 b := v.Block 19511 typ := &b.Func.Config.Types 19512 // match: (Leq8 x y) 19513 // result: (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) 19514 for { 19515 x := v_0 19516 y := v_1 19517 v.reset(OpARM64LessEqual) 19518 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 19519 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 19520 v1.AddArg(x) 19521 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 19522 v2.AddArg(y) 19523 v0.AddArg2(v1, v2) 19524 v.AddArg(v0) 19525 return true 19526 } 19527 } 19528 func rewriteValueARM64_OpLeq8U(v *Value) bool { 19529 v_1 := v.Args[1] 19530 v_0 := v.Args[0] 19531 b := v.Block 19532 typ := &b.Func.Config.Types 19533 // match: (Leq8U x zero:(MOVDconst [0])) 19534 // result: (Eq8 x zero) 19535 for { 19536 x := v_0 19537 zero := v_1 19538 if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 { 19539 break 19540 } 19541 v.reset(OpEq8) 19542 v.AddArg2(x, zero) 19543 return true 19544 } 19545 // match: (Leq8U (MOVDconst [1]) x) 19546 // result: (Neq8 (MOVDconst [0]) x) 19547 for { 19548 if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 { 19549 break 19550 } 19551 x := v_1 19552 v.reset(OpNeq8) 19553 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 19554 v0.AuxInt = int64ToAuxInt(0) 19555 v.AddArg2(v0, x) 19556 return true 19557 } 19558 // match: (Leq8U x y) 19559 // result: (LessEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 19560 for { 19561 x := v_0 19562 y := v_1 19563 v.reset(OpARM64LessEqualU) 19564 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 19565 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 19566 v1.AddArg(x) 19567 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 19568 v2.AddArg(y) 19569 v0.AddArg2(v1, v2) 19570 v.AddArg(v0) 19571 return true 19572 } 19573 } 19574 func rewriteValueARM64_OpLess16(v *Value) bool { 19575 v_1 := v.Args[1] 19576 v_0 := v.Args[0] 19577 b := v.Block 19578 typ := &b.Func.Config.Types 19579 // match: (Less16 x y) 19580 // result: (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y))) 19581 for { 19582 x := v_0 19583 y := v_1 19584 v.reset(OpARM64LessThan) 19585 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 19586 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 19587 v1.AddArg(x) 19588 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 19589 v2.AddArg(y) 19590 v0.AddArg2(v1, v2) 19591 v.AddArg(v0) 19592 return true 19593 } 19594 } 19595 func rewriteValueARM64_OpLess16U(v *Value) bool { 19596 v_1 := v.Args[1] 19597 v_0 := v.Args[0] 19598 b := v.Block 19599 typ := &b.Func.Config.Types 19600 // match: (Less16U zero:(MOVDconst [0]) x) 19601 // result: (Neq16 zero x) 19602 for { 19603 zero := v_0 19604 if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 { 19605 break 19606 } 19607 x := v_1 19608 v.reset(OpNeq16) 19609 v.AddArg2(zero, x) 19610 return true 19611 } 19612 // match: (Less16U x (MOVDconst [1])) 19613 // result: (Eq16 x (MOVDconst [0])) 19614 for { 19615 x := v_0 19616 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 { 19617 break 19618 } 19619 v.reset(OpEq16) 19620 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 19621 v0.AuxInt = int64ToAuxInt(0) 19622 v.AddArg2(x, v0) 19623 return true 19624 } 19625 // match: (Less16U x y) 19626 // result: (LessThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 19627 for { 19628 x := v_0 19629 y := v_1 19630 v.reset(OpARM64LessThanU) 19631 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 19632 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 19633 v1.AddArg(x) 19634 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 19635 v2.AddArg(y) 19636 v0.AddArg2(v1, v2) 19637 v.AddArg(v0) 19638 return true 19639 } 19640 } 19641 func rewriteValueARM64_OpLess32(v *Value) bool { 19642 v_1 := v.Args[1] 19643 v_0 := v.Args[0] 19644 b := v.Block 19645 // match: (Less32 x y) 19646 // result: (LessThan (CMPW x y)) 19647 for { 19648 x := v_0 19649 y := v_1 19650 v.reset(OpARM64LessThan) 19651 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 19652 v0.AddArg2(x, y) 19653 v.AddArg(v0) 19654 return true 19655 } 19656 } 19657 func rewriteValueARM64_OpLess32F(v *Value) bool { 19658 v_1 := v.Args[1] 19659 v_0 := v.Args[0] 19660 b := v.Block 19661 // match: (Less32F x y) 19662 // result: (LessThanF (FCMPS x y)) 19663 for { 19664 x := v_0 19665 y := v_1 19666 v.reset(OpARM64LessThanF) 19667 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 19668 v0.AddArg2(x, y) 19669 v.AddArg(v0) 19670 return true 19671 } 19672 } 19673 func rewriteValueARM64_OpLess32U(v *Value) bool { 19674 v_1 := v.Args[1] 19675 v_0 := v.Args[0] 19676 b := v.Block 19677 typ := &b.Func.Config.Types 19678 // match: (Less32U zero:(MOVDconst [0]) x) 19679 // result: (Neq32 zero x) 19680 for { 19681 zero := v_0 19682 if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 { 19683 break 19684 } 19685 x := v_1 19686 v.reset(OpNeq32) 19687 v.AddArg2(zero, x) 19688 return true 19689 } 19690 // match: (Less32U x (MOVDconst [1])) 19691 // result: (Eq32 x (MOVDconst [0])) 19692 for { 19693 x := v_0 19694 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 { 19695 break 19696 } 19697 v.reset(OpEq32) 19698 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 19699 v0.AuxInt = int64ToAuxInt(0) 19700 v.AddArg2(x, v0) 19701 return true 19702 } 19703 // match: (Less32U x y) 19704 // result: (LessThanU (CMPW x y)) 19705 for { 19706 x := v_0 19707 y := v_1 19708 v.reset(OpARM64LessThanU) 19709 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 19710 v0.AddArg2(x, y) 19711 v.AddArg(v0) 19712 return true 19713 } 19714 } 19715 func rewriteValueARM64_OpLess64(v *Value) bool { 19716 v_1 := v.Args[1] 19717 v_0 := v.Args[0] 19718 b := v.Block 19719 // match: (Less64 x y) 19720 // result: (LessThan (CMP x y)) 19721 for { 19722 x := v_0 19723 y := v_1 19724 v.reset(OpARM64LessThan) 19725 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 19726 v0.AddArg2(x, y) 19727 v.AddArg(v0) 19728 return true 19729 } 19730 } 19731 func rewriteValueARM64_OpLess64F(v *Value) bool { 19732 v_1 := v.Args[1] 19733 v_0 := v.Args[0] 19734 b := v.Block 19735 // match: (Less64F x y) 19736 // result: (LessThanF (FCMPD x y)) 19737 for { 19738 x := v_0 19739 y := v_1 19740 v.reset(OpARM64LessThanF) 19741 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 19742 v0.AddArg2(x, y) 19743 v.AddArg(v0) 19744 return true 19745 } 19746 } 19747 func rewriteValueARM64_OpLess64U(v *Value) bool { 19748 v_1 := v.Args[1] 19749 v_0 := v.Args[0] 19750 b := v.Block 19751 typ := &b.Func.Config.Types 19752 // match: (Less64U zero:(MOVDconst [0]) x) 19753 // result: (Neq64 zero x) 19754 for { 19755 zero := v_0 19756 if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 { 19757 break 19758 } 19759 x := v_1 19760 v.reset(OpNeq64) 19761 v.AddArg2(zero, x) 19762 return true 19763 } 19764 // match: (Less64U x (MOVDconst [1])) 19765 // result: (Eq64 x (MOVDconst [0])) 19766 for { 19767 x := v_0 19768 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 { 19769 break 19770 } 19771 v.reset(OpEq64) 19772 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 19773 v0.AuxInt = int64ToAuxInt(0) 19774 v.AddArg2(x, v0) 19775 return true 19776 } 19777 // match: (Less64U x y) 19778 // result: (LessThanU (CMP x y)) 19779 for { 19780 x := v_0 19781 y := v_1 19782 v.reset(OpARM64LessThanU) 19783 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 19784 v0.AddArg2(x, y) 19785 v.AddArg(v0) 19786 return true 19787 } 19788 } 19789 func rewriteValueARM64_OpLess8(v *Value) bool { 19790 v_1 := v.Args[1] 19791 v_0 := v.Args[0] 19792 b := v.Block 19793 typ := &b.Func.Config.Types 19794 // match: (Less8 x y) 19795 // result: (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y))) 19796 for { 19797 x := v_0 19798 y := v_1 19799 v.reset(OpARM64LessThan) 19800 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 19801 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 19802 v1.AddArg(x) 19803 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 19804 v2.AddArg(y) 19805 v0.AddArg2(v1, v2) 19806 v.AddArg(v0) 19807 return true 19808 } 19809 } 19810 func rewriteValueARM64_OpLess8U(v *Value) bool { 19811 v_1 := v.Args[1] 19812 v_0 := v.Args[0] 19813 b := v.Block 19814 typ := &b.Func.Config.Types 19815 // match: (Less8U zero:(MOVDconst [0]) x) 19816 // result: (Neq8 zero x) 19817 for { 19818 zero := v_0 19819 if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 { 19820 break 19821 } 19822 x := v_1 19823 v.reset(OpNeq8) 19824 v.AddArg2(zero, x) 19825 return true 19826 } 19827 // match: (Less8U x (MOVDconst [1])) 19828 // result: (Eq8 x (MOVDconst [0])) 19829 for { 19830 x := v_0 19831 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 { 19832 break 19833 } 19834 v.reset(OpEq8) 19835 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 19836 v0.AuxInt = int64ToAuxInt(0) 19837 v.AddArg2(x, v0) 19838 return true 19839 } 19840 // match: (Less8U x y) 19841 // result: (LessThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 19842 for { 19843 x := v_0 19844 y := v_1 19845 v.reset(OpARM64LessThanU) 19846 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 19847 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 19848 v1.AddArg(x) 19849 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 19850 v2.AddArg(y) 19851 v0.AddArg2(v1, v2) 19852 v.AddArg(v0) 19853 return true 19854 } 19855 } 19856 func rewriteValueARM64_OpLoad(v *Value) bool { 19857 v_1 := v.Args[1] 19858 v_0 := v.Args[0] 19859 // match: (Load <t> ptr mem) 19860 // cond: t.IsBoolean() 19861 // result: (MOVBUload ptr mem) 19862 for { 19863 t := v.Type 19864 ptr := v_0 19865 mem := v_1 19866 if !(t.IsBoolean()) { 19867 break 19868 } 19869 v.reset(OpARM64MOVBUload) 19870 v.AddArg2(ptr, mem) 19871 return true 19872 } 19873 // match: (Load <t> ptr mem) 19874 // cond: (is8BitInt(t) && t.IsSigned()) 19875 // result: (MOVBload ptr mem) 19876 for { 19877 t := v.Type 19878 ptr := v_0 19879 mem := v_1 19880 if !(is8BitInt(t) && t.IsSigned()) { 19881 break 19882 } 19883 v.reset(OpARM64MOVBload) 19884 v.AddArg2(ptr, mem) 19885 return true 19886 } 19887 // match: (Load <t> ptr mem) 19888 // cond: (is8BitInt(t) && !t.IsSigned()) 19889 // result: (MOVBUload ptr mem) 19890 for { 19891 t := v.Type 19892 ptr := v_0 19893 mem := v_1 19894 if !(is8BitInt(t) && !t.IsSigned()) { 19895 break 19896 } 19897 v.reset(OpARM64MOVBUload) 19898 v.AddArg2(ptr, mem) 19899 return true 19900 } 19901 // match: (Load <t> ptr mem) 19902 // cond: (is16BitInt(t) && t.IsSigned()) 19903 // result: (MOVHload ptr mem) 19904 for { 19905 t := v.Type 19906 ptr := v_0 19907 mem := v_1 19908 if !(is16BitInt(t) && t.IsSigned()) { 19909 break 19910 } 19911 v.reset(OpARM64MOVHload) 19912 v.AddArg2(ptr, mem) 19913 return true 19914 } 19915 // match: (Load <t> ptr mem) 19916 // cond: (is16BitInt(t) && !t.IsSigned()) 19917 // result: (MOVHUload ptr mem) 19918 for { 19919 t := v.Type 19920 ptr := v_0 19921 mem := v_1 19922 if !(is16BitInt(t) && !t.IsSigned()) { 19923 break 19924 } 19925 v.reset(OpARM64MOVHUload) 19926 v.AddArg2(ptr, mem) 19927 return true 19928 } 19929 // match: (Load <t> ptr mem) 19930 // cond: (is32BitInt(t) && t.IsSigned()) 19931 // result: (MOVWload ptr mem) 19932 for { 19933 t := v.Type 19934 ptr := v_0 19935 mem := v_1 19936 if !(is32BitInt(t) && t.IsSigned()) { 19937 break 19938 } 19939 v.reset(OpARM64MOVWload) 19940 v.AddArg2(ptr, mem) 19941 return true 19942 } 19943 // match: (Load <t> ptr mem) 19944 // cond: (is32BitInt(t) && !t.IsSigned()) 19945 // result: (MOVWUload ptr mem) 19946 for { 19947 t := v.Type 19948 ptr := v_0 19949 mem := v_1 19950 if !(is32BitInt(t) && !t.IsSigned()) { 19951 break 19952 } 19953 v.reset(OpARM64MOVWUload) 19954 v.AddArg2(ptr, mem) 19955 return true 19956 } 19957 // match: (Load <t> ptr mem) 19958 // cond: (is64BitInt(t) || isPtr(t)) 19959 // result: (MOVDload ptr mem) 19960 for { 19961 t := v.Type 19962 ptr := v_0 19963 mem := v_1 19964 if !(is64BitInt(t) || isPtr(t)) { 19965 break 19966 } 19967 v.reset(OpARM64MOVDload) 19968 v.AddArg2(ptr, mem) 19969 return true 19970 } 19971 // match: (Load <t> ptr mem) 19972 // cond: is32BitFloat(t) 19973 // result: (FMOVSload ptr mem) 19974 for { 19975 t := v.Type 19976 ptr := v_0 19977 mem := v_1 19978 if !(is32BitFloat(t)) { 19979 break 19980 } 19981 v.reset(OpARM64FMOVSload) 19982 v.AddArg2(ptr, mem) 19983 return true 19984 } 19985 // match: (Load <t> ptr mem) 19986 // cond: is64BitFloat(t) 19987 // result: (FMOVDload ptr mem) 19988 for { 19989 t := v.Type 19990 ptr := v_0 19991 mem := v_1 19992 if !(is64BitFloat(t)) { 19993 break 19994 } 19995 v.reset(OpARM64FMOVDload) 19996 v.AddArg2(ptr, mem) 19997 return true 19998 } 19999 return false 20000 } 20001 func rewriteValueARM64_OpLocalAddr(v *Value) bool { 20002 v_1 := v.Args[1] 20003 v_0 := v.Args[0] 20004 b := v.Block 20005 typ := &b.Func.Config.Types 20006 // match: (LocalAddr <t> {sym} base mem) 20007 // cond: t.Elem().HasPointers() 20008 // result: (MOVDaddr {sym} (SPanchored base mem)) 20009 for { 20010 t := v.Type 20011 sym := auxToSym(v.Aux) 20012 base := v_0 20013 mem := v_1 20014 if !(t.Elem().HasPointers()) { 20015 break 20016 } 20017 v.reset(OpARM64MOVDaddr) 20018 v.Aux = symToAux(sym) 20019 v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr) 20020 v0.AddArg2(base, mem) 20021 v.AddArg(v0) 20022 return true 20023 } 20024 // match: (LocalAddr <t> {sym} base _) 20025 // cond: !t.Elem().HasPointers() 20026 // result: (MOVDaddr {sym} base) 20027 for { 20028 t := v.Type 20029 sym := auxToSym(v.Aux) 20030 base := v_0 20031 if !(!t.Elem().HasPointers()) { 20032 break 20033 } 20034 v.reset(OpARM64MOVDaddr) 20035 v.Aux = symToAux(sym) 20036 v.AddArg(base) 20037 return true 20038 } 20039 return false 20040 } 20041 func rewriteValueARM64_OpLsh16x16(v *Value) bool { 20042 v_1 := v.Args[1] 20043 v_0 := v.Args[0] 20044 b := v.Block 20045 typ := &b.Func.Config.Types 20046 // match: (Lsh16x16 <t> x y) 20047 // cond: shiftIsBounded(v) 20048 // result: (SLL <t> x y) 20049 for { 20050 t := v.Type 20051 x := v_0 20052 y := v_1 20053 if !(shiftIsBounded(v)) { 20054 break 20055 } 20056 v.reset(OpARM64SLL) 20057 v.Type = t 20058 v.AddArg2(x, y) 20059 return true 20060 } 20061 // match: (Lsh16x16 <t> x y) 20062 // cond: !shiftIsBounded(v) 20063 // result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 20064 for { 20065 t := v.Type 20066 x := v_0 20067 y := v_1 20068 if !(!shiftIsBounded(v)) { 20069 break 20070 } 20071 v.reset(OpARM64CSEL) 20072 v.AuxInt = opToAuxInt(OpARM64LessThanU) 20073 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 20074 v0.AddArg2(x, y) 20075 v1 := b.NewValue0(v.Pos, OpConst64, t) 20076 v1.AuxInt = int64ToAuxInt(0) 20077 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 20078 v2.AuxInt = int64ToAuxInt(64) 20079 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 20080 v3.AddArg(y) 20081 v2.AddArg(v3) 20082 v.AddArg3(v0, v1, v2) 20083 return true 20084 } 20085 return false 20086 } 20087 func rewriteValueARM64_OpLsh16x32(v *Value) bool { 20088 v_1 := v.Args[1] 20089 v_0 := v.Args[0] 20090 b := v.Block 20091 typ := &b.Func.Config.Types 20092 // match: (Lsh16x32 <t> x y) 20093 // cond: shiftIsBounded(v) 20094 // result: (SLL <t> x y) 20095 for { 20096 t := v.Type 20097 x := v_0 20098 y := v_1 20099 if !(shiftIsBounded(v)) { 20100 break 20101 } 20102 v.reset(OpARM64SLL) 20103 v.Type = t 20104 v.AddArg2(x, y) 20105 return true 20106 } 20107 // match: (Lsh16x32 <t> x y) 20108 // cond: !shiftIsBounded(v) 20109 // result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 20110 for { 20111 t := v.Type 20112 x := v_0 20113 y := v_1 20114 if !(!shiftIsBounded(v)) { 20115 break 20116 } 20117 v.reset(OpARM64CSEL) 20118 v.AuxInt = opToAuxInt(OpARM64LessThanU) 20119 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 20120 v0.AddArg2(x, y) 20121 v1 := b.NewValue0(v.Pos, OpConst64, t) 20122 v1.AuxInt = int64ToAuxInt(0) 20123 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 20124 v2.AuxInt = int64ToAuxInt(64) 20125 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 20126 v3.AddArg(y) 20127 v2.AddArg(v3) 20128 v.AddArg3(v0, v1, v2) 20129 return true 20130 } 20131 return false 20132 } 20133 func rewriteValueARM64_OpLsh16x64(v *Value) bool { 20134 v_1 := v.Args[1] 20135 v_0 := v.Args[0] 20136 b := v.Block 20137 // match: (Lsh16x64 <t> x y) 20138 // cond: shiftIsBounded(v) 20139 // result: (SLL <t> x y) 20140 for { 20141 t := v.Type 20142 x := v_0 20143 y := v_1 20144 if !(shiftIsBounded(v)) { 20145 break 20146 } 20147 v.reset(OpARM64SLL) 20148 v.Type = t 20149 v.AddArg2(x, y) 20150 return true 20151 } 20152 // match: (Lsh16x64 <t> x y) 20153 // cond: !shiftIsBounded(v) 20154 // result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 20155 for { 20156 t := v.Type 20157 x := v_0 20158 y := v_1 20159 if !(!shiftIsBounded(v)) { 20160 break 20161 } 20162 v.reset(OpARM64CSEL) 20163 v.AuxInt = opToAuxInt(OpARM64LessThanU) 20164 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 20165 v0.AddArg2(x, y) 20166 v1 := b.NewValue0(v.Pos, OpConst64, t) 20167 v1.AuxInt = int64ToAuxInt(0) 20168 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 20169 v2.AuxInt = int64ToAuxInt(64) 20170 v2.AddArg(y) 20171 v.AddArg3(v0, v1, v2) 20172 return true 20173 } 20174 return false 20175 } 20176 func rewriteValueARM64_OpLsh16x8(v *Value) bool { 20177 v_1 := v.Args[1] 20178 v_0 := v.Args[0] 20179 b := v.Block 20180 typ := &b.Func.Config.Types 20181 // match: (Lsh16x8 <t> x y) 20182 // cond: shiftIsBounded(v) 20183 // result: (SLL <t> x y) 20184 for { 20185 t := v.Type 20186 x := v_0 20187 y := v_1 20188 if !(shiftIsBounded(v)) { 20189 break 20190 } 20191 v.reset(OpARM64SLL) 20192 v.Type = t 20193 v.AddArg2(x, y) 20194 return true 20195 } 20196 // match: (Lsh16x8 <t> x y) 20197 // cond: !shiftIsBounded(v) 20198 // result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 20199 for { 20200 t := v.Type 20201 x := v_0 20202 y := v_1 20203 if !(!shiftIsBounded(v)) { 20204 break 20205 } 20206 v.reset(OpARM64CSEL) 20207 v.AuxInt = opToAuxInt(OpARM64LessThanU) 20208 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 20209 v0.AddArg2(x, y) 20210 v1 := b.NewValue0(v.Pos, OpConst64, t) 20211 v1.AuxInt = int64ToAuxInt(0) 20212 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 20213 v2.AuxInt = int64ToAuxInt(64) 20214 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 20215 v3.AddArg(y) 20216 v2.AddArg(v3) 20217 v.AddArg3(v0, v1, v2) 20218 return true 20219 } 20220 return false 20221 } 20222 func rewriteValueARM64_OpLsh32x16(v *Value) bool { 20223 v_1 := v.Args[1] 20224 v_0 := v.Args[0] 20225 b := v.Block 20226 typ := &b.Func.Config.Types 20227 // match: (Lsh32x16 <t> x y) 20228 // cond: shiftIsBounded(v) 20229 // result: (SLL <t> x y) 20230 for { 20231 t := v.Type 20232 x := v_0 20233 y := v_1 20234 if !(shiftIsBounded(v)) { 20235 break 20236 } 20237 v.reset(OpARM64SLL) 20238 v.Type = t 20239 v.AddArg2(x, y) 20240 return true 20241 } 20242 // match: (Lsh32x16 <t> x y) 20243 // cond: !shiftIsBounded(v) 20244 // result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 20245 for { 20246 t := v.Type 20247 x := v_0 20248 y := v_1 20249 if !(!shiftIsBounded(v)) { 20250 break 20251 } 20252 v.reset(OpARM64CSEL) 20253 v.AuxInt = opToAuxInt(OpARM64LessThanU) 20254 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 20255 v0.AddArg2(x, y) 20256 v1 := b.NewValue0(v.Pos, OpConst64, t) 20257 v1.AuxInt = int64ToAuxInt(0) 20258 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 20259 v2.AuxInt = int64ToAuxInt(64) 20260 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 20261 v3.AddArg(y) 20262 v2.AddArg(v3) 20263 v.AddArg3(v0, v1, v2) 20264 return true 20265 } 20266 return false 20267 } 20268 func rewriteValueARM64_OpLsh32x32(v *Value) bool { 20269 v_1 := v.Args[1] 20270 v_0 := v.Args[0] 20271 b := v.Block 20272 typ := &b.Func.Config.Types 20273 // match: (Lsh32x32 <t> x y) 20274 // cond: shiftIsBounded(v) 20275 // result: (SLL <t> x y) 20276 for { 20277 t := v.Type 20278 x := v_0 20279 y := v_1 20280 if !(shiftIsBounded(v)) { 20281 break 20282 } 20283 v.reset(OpARM64SLL) 20284 v.Type = t 20285 v.AddArg2(x, y) 20286 return true 20287 } 20288 // match: (Lsh32x32 <t> x y) 20289 // cond: !shiftIsBounded(v) 20290 // result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 20291 for { 20292 t := v.Type 20293 x := v_0 20294 y := v_1 20295 if !(!shiftIsBounded(v)) { 20296 break 20297 } 20298 v.reset(OpARM64CSEL) 20299 v.AuxInt = opToAuxInt(OpARM64LessThanU) 20300 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 20301 v0.AddArg2(x, y) 20302 v1 := b.NewValue0(v.Pos, OpConst64, t) 20303 v1.AuxInt = int64ToAuxInt(0) 20304 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 20305 v2.AuxInt = int64ToAuxInt(64) 20306 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 20307 v3.AddArg(y) 20308 v2.AddArg(v3) 20309 v.AddArg3(v0, v1, v2) 20310 return true 20311 } 20312 return false 20313 } 20314 func rewriteValueARM64_OpLsh32x64(v *Value) bool { 20315 v_1 := v.Args[1] 20316 v_0 := v.Args[0] 20317 b := v.Block 20318 // match: (Lsh32x64 <t> x y) 20319 // cond: shiftIsBounded(v) 20320 // result: (SLL <t> x y) 20321 for { 20322 t := v.Type 20323 x := v_0 20324 y := v_1 20325 if !(shiftIsBounded(v)) { 20326 break 20327 } 20328 v.reset(OpARM64SLL) 20329 v.Type = t 20330 v.AddArg2(x, y) 20331 return true 20332 } 20333 // match: (Lsh32x64 <t> x y) 20334 // cond: !shiftIsBounded(v) 20335 // result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 20336 for { 20337 t := v.Type 20338 x := v_0 20339 y := v_1 20340 if !(!shiftIsBounded(v)) { 20341 break 20342 } 20343 v.reset(OpARM64CSEL) 20344 v.AuxInt = opToAuxInt(OpARM64LessThanU) 20345 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 20346 v0.AddArg2(x, y) 20347 v1 := b.NewValue0(v.Pos, OpConst64, t) 20348 v1.AuxInt = int64ToAuxInt(0) 20349 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 20350 v2.AuxInt = int64ToAuxInt(64) 20351 v2.AddArg(y) 20352 v.AddArg3(v0, v1, v2) 20353 return true 20354 } 20355 return false 20356 } 20357 func rewriteValueARM64_OpLsh32x8(v *Value) bool { 20358 v_1 := v.Args[1] 20359 v_0 := v.Args[0] 20360 b := v.Block 20361 typ := &b.Func.Config.Types 20362 // match: (Lsh32x8 <t> x y) 20363 // cond: shiftIsBounded(v) 20364 // result: (SLL <t> x y) 20365 for { 20366 t := v.Type 20367 x := v_0 20368 y := v_1 20369 if !(shiftIsBounded(v)) { 20370 break 20371 } 20372 v.reset(OpARM64SLL) 20373 v.Type = t 20374 v.AddArg2(x, y) 20375 return true 20376 } 20377 // match: (Lsh32x8 <t> x y) 20378 // cond: !shiftIsBounded(v) 20379 // result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 20380 for { 20381 t := v.Type 20382 x := v_0 20383 y := v_1 20384 if !(!shiftIsBounded(v)) { 20385 break 20386 } 20387 v.reset(OpARM64CSEL) 20388 v.AuxInt = opToAuxInt(OpARM64LessThanU) 20389 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 20390 v0.AddArg2(x, y) 20391 v1 := b.NewValue0(v.Pos, OpConst64, t) 20392 v1.AuxInt = int64ToAuxInt(0) 20393 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 20394 v2.AuxInt = int64ToAuxInt(64) 20395 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 20396 v3.AddArg(y) 20397 v2.AddArg(v3) 20398 v.AddArg3(v0, v1, v2) 20399 return true 20400 } 20401 return false 20402 } 20403 func rewriteValueARM64_OpLsh64x16(v *Value) bool { 20404 v_1 := v.Args[1] 20405 v_0 := v.Args[0] 20406 b := v.Block 20407 typ := &b.Func.Config.Types 20408 // match: (Lsh64x16 <t> x y) 20409 // cond: shiftIsBounded(v) 20410 // result: (SLL <t> x y) 20411 for { 20412 t := v.Type 20413 x := v_0 20414 y := v_1 20415 if !(shiftIsBounded(v)) { 20416 break 20417 } 20418 v.reset(OpARM64SLL) 20419 v.Type = t 20420 v.AddArg2(x, y) 20421 return true 20422 } 20423 // match: (Lsh64x16 <t> x y) 20424 // cond: !shiftIsBounded(v) 20425 // result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 20426 for { 20427 t := v.Type 20428 x := v_0 20429 y := v_1 20430 if !(!shiftIsBounded(v)) { 20431 break 20432 } 20433 v.reset(OpARM64CSEL) 20434 v.AuxInt = opToAuxInt(OpARM64LessThanU) 20435 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 20436 v0.AddArg2(x, y) 20437 v1 := b.NewValue0(v.Pos, OpConst64, t) 20438 v1.AuxInt = int64ToAuxInt(0) 20439 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 20440 v2.AuxInt = int64ToAuxInt(64) 20441 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 20442 v3.AddArg(y) 20443 v2.AddArg(v3) 20444 v.AddArg3(v0, v1, v2) 20445 return true 20446 } 20447 return false 20448 } 20449 func rewriteValueARM64_OpLsh64x32(v *Value) bool { 20450 v_1 := v.Args[1] 20451 v_0 := v.Args[0] 20452 b := v.Block 20453 typ := &b.Func.Config.Types 20454 // match: (Lsh64x32 <t> x y) 20455 // cond: shiftIsBounded(v) 20456 // result: (SLL <t> x y) 20457 for { 20458 t := v.Type 20459 x := v_0 20460 y := v_1 20461 if !(shiftIsBounded(v)) { 20462 break 20463 } 20464 v.reset(OpARM64SLL) 20465 v.Type = t 20466 v.AddArg2(x, y) 20467 return true 20468 } 20469 // match: (Lsh64x32 <t> x y) 20470 // cond: !shiftIsBounded(v) 20471 // result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 20472 for { 20473 t := v.Type 20474 x := v_0 20475 y := v_1 20476 if !(!shiftIsBounded(v)) { 20477 break 20478 } 20479 v.reset(OpARM64CSEL) 20480 v.AuxInt = opToAuxInt(OpARM64LessThanU) 20481 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 20482 v0.AddArg2(x, y) 20483 v1 := b.NewValue0(v.Pos, OpConst64, t) 20484 v1.AuxInt = int64ToAuxInt(0) 20485 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 20486 v2.AuxInt = int64ToAuxInt(64) 20487 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 20488 v3.AddArg(y) 20489 v2.AddArg(v3) 20490 v.AddArg3(v0, v1, v2) 20491 return true 20492 } 20493 return false 20494 } 20495 func rewriteValueARM64_OpLsh64x64(v *Value) bool { 20496 v_1 := v.Args[1] 20497 v_0 := v.Args[0] 20498 b := v.Block 20499 // match: (Lsh64x64 <t> x y) 20500 // cond: shiftIsBounded(v) 20501 // result: (SLL <t> x y) 20502 for { 20503 t := v.Type 20504 x := v_0 20505 y := v_1 20506 if !(shiftIsBounded(v)) { 20507 break 20508 } 20509 v.reset(OpARM64SLL) 20510 v.Type = t 20511 v.AddArg2(x, y) 20512 return true 20513 } 20514 // match: (Lsh64x64 <t> x y) 20515 // cond: !shiftIsBounded(v) 20516 // result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 20517 for { 20518 t := v.Type 20519 x := v_0 20520 y := v_1 20521 if !(!shiftIsBounded(v)) { 20522 break 20523 } 20524 v.reset(OpARM64CSEL) 20525 v.AuxInt = opToAuxInt(OpARM64LessThanU) 20526 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 20527 v0.AddArg2(x, y) 20528 v1 := b.NewValue0(v.Pos, OpConst64, t) 20529 v1.AuxInt = int64ToAuxInt(0) 20530 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 20531 v2.AuxInt = int64ToAuxInt(64) 20532 v2.AddArg(y) 20533 v.AddArg3(v0, v1, v2) 20534 return true 20535 } 20536 return false 20537 } 20538 func rewriteValueARM64_OpLsh64x8(v *Value) bool { 20539 v_1 := v.Args[1] 20540 v_0 := v.Args[0] 20541 b := v.Block 20542 typ := &b.Func.Config.Types 20543 // match: (Lsh64x8 <t> x y) 20544 // cond: shiftIsBounded(v) 20545 // result: (SLL <t> x y) 20546 for { 20547 t := v.Type 20548 x := v_0 20549 y := v_1 20550 if !(shiftIsBounded(v)) { 20551 break 20552 } 20553 v.reset(OpARM64SLL) 20554 v.Type = t 20555 v.AddArg2(x, y) 20556 return true 20557 } 20558 // match: (Lsh64x8 <t> x y) 20559 // cond: !shiftIsBounded(v) 20560 // result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 20561 for { 20562 t := v.Type 20563 x := v_0 20564 y := v_1 20565 if !(!shiftIsBounded(v)) { 20566 break 20567 } 20568 v.reset(OpARM64CSEL) 20569 v.AuxInt = opToAuxInt(OpARM64LessThanU) 20570 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 20571 v0.AddArg2(x, y) 20572 v1 := b.NewValue0(v.Pos, OpConst64, t) 20573 v1.AuxInt = int64ToAuxInt(0) 20574 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 20575 v2.AuxInt = int64ToAuxInt(64) 20576 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 20577 v3.AddArg(y) 20578 v2.AddArg(v3) 20579 v.AddArg3(v0, v1, v2) 20580 return true 20581 } 20582 return false 20583 } 20584 func rewriteValueARM64_OpLsh8x16(v *Value) bool { 20585 v_1 := v.Args[1] 20586 v_0 := v.Args[0] 20587 b := v.Block 20588 typ := &b.Func.Config.Types 20589 // match: (Lsh8x16 <t> x y) 20590 // cond: shiftIsBounded(v) 20591 // result: (SLL <t> x y) 20592 for { 20593 t := v.Type 20594 x := v_0 20595 y := v_1 20596 if !(shiftIsBounded(v)) { 20597 break 20598 } 20599 v.reset(OpARM64SLL) 20600 v.Type = t 20601 v.AddArg2(x, y) 20602 return true 20603 } 20604 // match: (Lsh8x16 <t> x y) 20605 // cond: !shiftIsBounded(v) 20606 // result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 20607 for { 20608 t := v.Type 20609 x := v_0 20610 y := v_1 20611 if !(!shiftIsBounded(v)) { 20612 break 20613 } 20614 v.reset(OpARM64CSEL) 20615 v.AuxInt = opToAuxInt(OpARM64LessThanU) 20616 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 20617 v0.AddArg2(x, y) 20618 v1 := b.NewValue0(v.Pos, OpConst64, t) 20619 v1.AuxInt = int64ToAuxInt(0) 20620 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 20621 v2.AuxInt = int64ToAuxInt(64) 20622 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 20623 v3.AddArg(y) 20624 v2.AddArg(v3) 20625 v.AddArg3(v0, v1, v2) 20626 return true 20627 } 20628 return false 20629 } 20630 func rewriteValueARM64_OpLsh8x32(v *Value) bool { 20631 v_1 := v.Args[1] 20632 v_0 := v.Args[0] 20633 b := v.Block 20634 typ := &b.Func.Config.Types 20635 // match: (Lsh8x32 <t> x y) 20636 // cond: shiftIsBounded(v) 20637 // result: (SLL <t> x y) 20638 for { 20639 t := v.Type 20640 x := v_0 20641 y := v_1 20642 if !(shiftIsBounded(v)) { 20643 break 20644 } 20645 v.reset(OpARM64SLL) 20646 v.Type = t 20647 v.AddArg2(x, y) 20648 return true 20649 } 20650 // match: (Lsh8x32 <t> x y) 20651 // cond: !shiftIsBounded(v) 20652 // result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 20653 for { 20654 t := v.Type 20655 x := v_0 20656 y := v_1 20657 if !(!shiftIsBounded(v)) { 20658 break 20659 } 20660 v.reset(OpARM64CSEL) 20661 v.AuxInt = opToAuxInt(OpARM64LessThanU) 20662 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 20663 v0.AddArg2(x, y) 20664 v1 := b.NewValue0(v.Pos, OpConst64, t) 20665 v1.AuxInt = int64ToAuxInt(0) 20666 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 20667 v2.AuxInt = int64ToAuxInt(64) 20668 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 20669 v3.AddArg(y) 20670 v2.AddArg(v3) 20671 v.AddArg3(v0, v1, v2) 20672 return true 20673 } 20674 return false 20675 } 20676 func rewriteValueARM64_OpLsh8x64(v *Value) bool { 20677 v_1 := v.Args[1] 20678 v_0 := v.Args[0] 20679 b := v.Block 20680 // match: (Lsh8x64 <t> x y) 20681 // cond: shiftIsBounded(v) 20682 // result: (SLL <t> x y) 20683 for { 20684 t := v.Type 20685 x := v_0 20686 y := v_1 20687 if !(shiftIsBounded(v)) { 20688 break 20689 } 20690 v.reset(OpARM64SLL) 20691 v.Type = t 20692 v.AddArg2(x, y) 20693 return true 20694 } 20695 // match: (Lsh8x64 <t> x y) 20696 // cond: !shiftIsBounded(v) 20697 // result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 20698 for { 20699 t := v.Type 20700 x := v_0 20701 y := v_1 20702 if !(!shiftIsBounded(v)) { 20703 break 20704 } 20705 v.reset(OpARM64CSEL) 20706 v.AuxInt = opToAuxInt(OpARM64LessThanU) 20707 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 20708 v0.AddArg2(x, y) 20709 v1 := b.NewValue0(v.Pos, OpConst64, t) 20710 v1.AuxInt = int64ToAuxInt(0) 20711 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 20712 v2.AuxInt = int64ToAuxInt(64) 20713 v2.AddArg(y) 20714 v.AddArg3(v0, v1, v2) 20715 return true 20716 } 20717 return false 20718 } 20719 func rewriteValueARM64_OpLsh8x8(v *Value) bool { 20720 v_1 := v.Args[1] 20721 v_0 := v.Args[0] 20722 b := v.Block 20723 typ := &b.Func.Config.Types 20724 // match: (Lsh8x8 <t> x y) 20725 // cond: shiftIsBounded(v) 20726 // result: (SLL <t> x y) 20727 for { 20728 t := v.Type 20729 x := v_0 20730 y := v_1 20731 if !(shiftIsBounded(v)) { 20732 break 20733 } 20734 v.reset(OpARM64SLL) 20735 v.Type = t 20736 v.AddArg2(x, y) 20737 return true 20738 } 20739 // match: (Lsh8x8 <t> x y) 20740 // cond: !shiftIsBounded(v) 20741 // result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 20742 for { 20743 t := v.Type 20744 x := v_0 20745 y := v_1 20746 if !(!shiftIsBounded(v)) { 20747 break 20748 } 20749 v.reset(OpARM64CSEL) 20750 v.AuxInt = opToAuxInt(OpARM64LessThanU) 20751 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 20752 v0.AddArg2(x, y) 20753 v1 := b.NewValue0(v.Pos, OpConst64, t) 20754 v1.AuxInt = int64ToAuxInt(0) 20755 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 20756 v2.AuxInt = int64ToAuxInt(64) 20757 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 20758 v3.AddArg(y) 20759 v2.AddArg(v3) 20760 v.AddArg3(v0, v1, v2) 20761 return true 20762 } 20763 return false 20764 } 20765 func rewriteValueARM64_OpMod16(v *Value) bool { 20766 v_1 := v.Args[1] 20767 v_0 := v.Args[0] 20768 b := v.Block 20769 typ := &b.Func.Config.Types 20770 // match: (Mod16 x y) 20771 // result: (MODW (SignExt16to32 x) (SignExt16to32 y)) 20772 for { 20773 x := v_0 20774 y := v_1 20775 v.reset(OpARM64MODW) 20776 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 20777 v0.AddArg(x) 20778 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 20779 v1.AddArg(y) 20780 v.AddArg2(v0, v1) 20781 return true 20782 } 20783 } 20784 func rewriteValueARM64_OpMod16u(v *Value) bool { 20785 v_1 := v.Args[1] 20786 v_0 := v.Args[0] 20787 b := v.Block 20788 typ := &b.Func.Config.Types 20789 // match: (Mod16u x y) 20790 // result: (UMODW (ZeroExt16to32 x) (ZeroExt16to32 y)) 20791 for { 20792 x := v_0 20793 y := v_1 20794 v.reset(OpARM64UMODW) 20795 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 20796 v0.AddArg(x) 20797 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 20798 v1.AddArg(y) 20799 v.AddArg2(v0, v1) 20800 return true 20801 } 20802 } 20803 func rewriteValueARM64_OpMod32(v *Value) bool { 20804 v_1 := v.Args[1] 20805 v_0 := v.Args[0] 20806 // match: (Mod32 x y) 20807 // result: (MODW x y) 20808 for { 20809 x := v_0 20810 y := v_1 20811 v.reset(OpARM64MODW) 20812 v.AddArg2(x, y) 20813 return true 20814 } 20815 } 20816 func rewriteValueARM64_OpMod64(v *Value) bool { 20817 v_1 := v.Args[1] 20818 v_0 := v.Args[0] 20819 // match: (Mod64 x y) 20820 // result: (MOD x y) 20821 for { 20822 x := v_0 20823 y := v_1 20824 v.reset(OpARM64MOD) 20825 v.AddArg2(x, y) 20826 return true 20827 } 20828 } 20829 func rewriteValueARM64_OpMod8(v *Value) bool { 20830 v_1 := v.Args[1] 20831 v_0 := v.Args[0] 20832 b := v.Block 20833 typ := &b.Func.Config.Types 20834 // match: (Mod8 x y) 20835 // result: (MODW (SignExt8to32 x) (SignExt8to32 y)) 20836 for { 20837 x := v_0 20838 y := v_1 20839 v.reset(OpARM64MODW) 20840 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 20841 v0.AddArg(x) 20842 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 20843 v1.AddArg(y) 20844 v.AddArg2(v0, v1) 20845 return true 20846 } 20847 } 20848 func rewriteValueARM64_OpMod8u(v *Value) bool { 20849 v_1 := v.Args[1] 20850 v_0 := v.Args[0] 20851 b := v.Block 20852 typ := &b.Func.Config.Types 20853 // match: (Mod8u x y) 20854 // result: (UMODW (ZeroExt8to32 x) (ZeroExt8to32 y)) 20855 for { 20856 x := v_0 20857 y := v_1 20858 v.reset(OpARM64UMODW) 20859 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 20860 v0.AddArg(x) 20861 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 20862 v1.AddArg(y) 20863 v.AddArg2(v0, v1) 20864 return true 20865 } 20866 } 20867 func rewriteValueARM64_OpMove(v *Value) bool { 20868 v_2 := v.Args[2] 20869 v_1 := v.Args[1] 20870 v_0 := v.Args[0] 20871 b := v.Block 20872 config := b.Func.Config 20873 typ := &b.Func.Config.Types 20874 // match: (Move [0] _ _ mem) 20875 // result: mem 20876 for { 20877 if auxIntToInt64(v.AuxInt) != 0 { 20878 break 20879 } 20880 mem := v_2 20881 v.copyOf(mem) 20882 return true 20883 } 20884 // match: (Move [1] dst src mem) 20885 // result: (MOVBstore dst (MOVBUload src mem) mem) 20886 for { 20887 if auxIntToInt64(v.AuxInt) != 1 { 20888 break 20889 } 20890 dst := v_0 20891 src := v_1 20892 mem := v_2 20893 v.reset(OpARM64MOVBstore) 20894 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 20895 v0.AddArg2(src, mem) 20896 v.AddArg3(dst, v0, mem) 20897 return true 20898 } 20899 // match: (Move [2] dst src mem) 20900 // result: (MOVHstore dst (MOVHUload src mem) mem) 20901 for { 20902 if auxIntToInt64(v.AuxInt) != 2 { 20903 break 20904 } 20905 dst := v_0 20906 src := v_1 20907 mem := v_2 20908 v.reset(OpARM64MOVHstore) 20909 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 20910 v0.AddArg2(src, mem) 20911 v.AddArg3(dst, v0, mem) 20912 return true 20913 } 20914 // match: (Move [3] dst src mem) 20915 // result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem)) 20916 for { 20917 if auxIntToInt64(v.AuxInt) != 3 { 20918 break 20919 } 20920 dst := v_0 20921 src := v_1 20922 mem := v_2 20923 v.reset(OpARM64MOVBstore) 20924 v.AuxInt = int32ToAuxInt(2) 20925 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 20926 v0.AuxInt = int32ToAuxInt(2) 20927 v0.AddArg2(src, mem) 20928 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 20929 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 20930 v2.AddArg2(src, mem) 20931 v1.AddArg3(dst, v2, mem) 20932 v.AddArg3(dst, v0, v1) 20933 return true 20934 } 20935 // match: (Move [4] dst src mem) 20936 // result: (MOVWstore dst (MOVWUload src mem) mem) 20937 for { 20938 if auxIntToInt64(v.AuxInt) != 4 { 20939 break 20940 } 20941 dst := v_0 20942 src := v_1 20943 mem := v_2 20944 v.reset(OpARM64MOVWstore) 20945 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 20946 v0.AddArg2(src, mem) 20947 v.AddArg3(dst, v0, mem) 20948 return true 20949 } 20950 // match: (Move [5] dst src mem) 20951 // result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 20952 for { 20953 if auxIntToInt64(v.AuxInt) != 5 { 20954 break 20955 } 20956 dst := v_0 20957 src := v_1 20958 mem := v_2 20959 v.reset(OpARM64MOVBstore) 20960 v.AuxInt = int32ToAuxInt(4) 20961 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 20962 v0.AuxInt = int32ToAuxInt(4) 20963 v0.AddArg2(src, mem) 20964 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 20965 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 20966 v2.AddArg2(src, mem) 20967 v1.AddArg3(dst, v2, mem) 20968 v.AddArg3(dst, v0, v1) 20969 return true 20970 } 20971 // match: (Move [6] dst src mem) 20972 // result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 20973 for { 20974 if auxIntToInt64(v.AuxInt) != 6 { 20975 break 20976 } 20977 dst := v_0 20978 src := v_1 20979 mem := v_2 20980 v.reset(OpARM64MOVHstore) 20981 v.AuxInt = int32ToAuxInt(4) 20982 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 20983 v0.AuxInt = int32ToAuxInt(4) 20984 v0.AddArg2(src, mem) 20985 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 20986 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 20987 v2.AddArg2(src, mem) 20988 v1.AddArg3(dst, v2, mem) 20989 v.AddArg3(dst, v0, v1) 20990 return true 20991 } 20992 // match: (Move [7] dst src mem) 20993 // result: (MOVWstore [3] dst (MOVWUload [3] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 20994 for { 20995 if auxIntToInt64(v.AuxInt) != 7 { 20996 break 20997 } 20998 dst := v_0 20999 src := v_1 21000 mem := v_2 21001 v.reset(OpARM64MOVWstore) 21002 v.AuxInt = int32ToAuxInt(3) 21003 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 21004 v0.AuxInt = int32ToAuxInt(3) 21005 v0.AddArg2(src, mem) 21006 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 21007 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 21008 v2.AddArg2(src, mem) 21009 v1.AddArg3(dst, v2, mem) 21010 v.AddArg3(dst, v0, v1) 21011 return true 21012 } 21013 // match: (Move [8] dst src mem) 21014 // result: (MOVDstore dst (MOVDload src mem) mem) 21015 for { 21016 if auxIntToInt64(v.AuxInt) != 8 { 21017 break 21018 } 21019 dst := v_0 21020 src := v_1 21021 mem := v_2 21022 v.reset(OpARM64MOVDstore) 21023 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 21024 v0.AddArg2(src, mem) 21025 v.AddArg3(dst, v0, mem) 21026 return true 21027 } 21028 // match: (Move [9] dst src mem) 21029 // result: (MOVBstore [8] dst (MOVBUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 21030 for { 21031 if auxIntToInt64(v.AuxInt) != 9 { 21032 break 21033 } 21034 dst := v_0 21035 src := v_1 21036 mem := v_2 21037 v.reset(OpARM64MOVBstore) 21038 v.AuxInt = int32ToAuxInt(8) 21039 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 21040 v0.AuxInt = int32ToAuxInt(8) 21041 v0.AddArg2(src, mem) 21042 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 21043 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 21044 v2.AddArg2(src, mem) 21045 v1.AddArg3(dst, v2, mem) 21046 v.AddArg3(dst, v0, v1) 21047 return true 21048 } 21049 // match: (Move [10] dst src mem) 21050 // result: (MOVHstore [8] dst (MOVHUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 21051 for { 21052 if auxIntToInt64(v.AuxInt) != 10 { 21053 break 21054 } 21055 dst := v_0 21056 src := v_1 21057 mem := v_2 21058 v.reset(OpARM64MOVHstore) 21059 v.AuxInt = int32ToAuxInt(8) 21060 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 21061 v0.AuxInt = int32ToAuxInt(8) 21062 v0.AddArg2(src, mem) 21063 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 21064 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 21065 v2.AddArg2(src, mem) 21066 v1.AddArg3(dst, v2, mem) 21067 v.AddArg3(dst, v0, v1) 21068 return true 21069 } 21070 // match: (Move [11] dst src mem) 21071 // result: (MOVDstore [3] dst (MOVDload [3] src mem) (MOVDstore dst (MOVDload src mem) mem)) 21072 for { 21073 if auxIntToInt64(v.AuxInt) != 11 { 21074 break 21075 } 21076 dst := v_0 21077 src := v_1 21078 mem := v_2 21079 v.reset(OpARM64MOVDstore) 21080 v.AuxInt = int32ToAuxInt(3) 21081 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 21082 v0.AuxInt = int32ToAuxInt(3) 21083 v0.AddArg2(src, mem) 21084 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 21085 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 21086 v2.AddArg2(src, mem) 21087 v1.AddArg3(dst, v2, mem) 21088 v.AddArg3(dst, v0, v1) 21089 return true 21090 } 21091 // match: (Move [12] dst src mem) 21092 // result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 21093 for { 21094 if auxIntToInt64(v.AuxInt) != 12 { 21095 break 21096 } 21097 dst := v_0 21098 src := v_1 21099 mem := v_2 21100 v.reset(OpARM64MOVWstore) 21101 v.AuxInt = int32ToAuxInt(8) 21102 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 21103 v0.AuxInt = int32ToAuxInt(8) 21104 v0.AddArg2(src, mem) 21105 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 21106 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 21107 v2.AddArg2(src, mem) 21108 v1.AddArg3(dst, v2, mem) 21109 v.AddArg3(dst, v0, v1) 21110 return true 21111 } 21112 // match: (Move [13] dst src mem) 21113 // result: (MOVDstore [5] dst (MOVDload [5] src mem) (MOVDstore dst (MOVDload src mem) mem)) 21114 for { 21115 if auxIntToInt64(v.AuxInt) != 13 { 21116 break 21117 } 21118 dst := v_0 21119 src := v_1 21120 mem := v_2 21121 v.reset(OpARM64MOVDstore) 21122 v.AuxInt = int32ToAuxInt(5) 21123 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 21124 v0.AuxInt = int32ToAuxInt(5) 21125 v0.AddArg2(src, mem) 21126 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 21127 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 21128 v2.AddArg2(src, mem) 21129 v1.AddArg3(dst, v2, mem) 21130 v.AddArg3(dst, v0, v1) 21131 return true 21132 } 21133 // match: (Move [14] dst src mem) 21134 // result: (MOVDstore [6] dst (MOVDload [6] src mem) (MOVDstore dst (MOVDload src mem) mem)) 21135 for { 21136 if auxIntToInt64(v.AuxInt) != 14 { 21137 break 21138 } 21139 dst := v_0 21140 src := v_1 21141 mem := v_2 21142 v.reset(OpARM64MOVDstore) 21143 v.AuxInt = int32ToAuxInt(6) 21144 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 21145 v0.AuxInt = int32ToAuxInt(6) 21146 v0.AddArg2(src, mem) 21147 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 21148 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 21149 v2.AddArg2(src, mem) 21150 v1.AddArg3(dst, v2, mem) 21151 v.AddArg3(dst, v0, v1) 21152 return true 21153 } 21154 // match: (Move [15] dst src mem) 21155 // result: (MOVDstore [7] dst (MOVDload [7] src mem) (MOVDstore dst (MOVDload src mem) mem)) 21156 for { 21157 if auxIntToInt64(v.AuxInt) != 15 { 21158 break 21159 } 21160 dst := v_0 21161 src := v_1 21162 mem := v_2 21163 v.reset(OpARM64MOVDstore) 21164 v.AuxInt = int32ToAuxInt(7) 21165 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 21166 v0.AuxInt = int32ToAuxInt(7) 21167 v0.AddArg2(src, mem) 21168 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 21169 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 21170 v2.AddArg2(src, mem) 21171 v1.AddArg3(dst, v2, mem) 21172 v.AddArg3(dst, v0, v1) 21173 return true 21174 } 21175 // match: (Move [16] dst src mem) 21176 // result: (STP dst (Select0 <typ.UInt64> (LDP src mem)) (Select1 <typ.UInt64> (LDP src mem)) mem) 21177 for { 21178 if auxIntToInt64(v.AuxInt) != 16 { 21179 break 21180 } 21181 dst := v_0 21182 src := v_1 21183 mem := v_2 21184 v.reset(OpARM64STP) 21185 v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64) 21186 v1 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64)) 21187 v1.AddArg2(src, mem) 21188 v0.AddArg(v1) 21189 v2 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64) 21190 v2.AddArg(v1) 21191 v.AddArg4(dst, v0, v2, mem) 21192 return true 21193 } 21194 // match: (Move [32] dst src mem) 21195 // result: (STP [16] dst (Select0 <typ.UInt64> (LDP [16] src mem)) (Select1 <typ.UInt64> (LDP [16] src mem)) (STP dst (Select0 <typ.UInt64> (LDP src mem)) (Select1 <typ.UInt64> (LDP src mem)) mem)) 21196 for { 21197 if auxIntToInt64(v.AuxInt) != 32 { 21198 break 21199 } 21200 dst := v_0 21201 src := v_1 21202 mem := v_2 21203 v.reset(OpARM64STP) 21204 v.AuxInt = int32ToAuxInt(16) 21205 v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64) 21206 v1 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64)) 21207 v1.AuxInt = int32ToAuxInt(16) 21208 v1.AddArg2(src, mem) 21209 v0.AddArg(v1) 21210 v2 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64) 21211 v2.AddArg(v1) 21212 v3 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 21213 v4 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64) 21214 v5 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64)) 21215 v5.AddArg2(src, mem) 21216 v4.AddArg(v5) 21217 v6 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64) 21218 v6.AddArg(v5) 21219 v3.AddArg4(dst, v4, v6, mem) 21220 v.AddArg4(dst, v0, v2, v3) 21221 return true 21222 } 21223 // match: (Move [48] dst src mem) 21224 // result: (STP [32] dst (Select0 <typ.UInt64> (LDP [32] src mem)) (Select1 <typ.UInt64> (LDP [32] src mem)) (STP [16] dst (Select0 <typ.UInt64> (LDP [16] src mem)) (Select1 <typ.UInt64> (LDP [16] src mem)) (STP dst (Select0 <typ.UInt64> (LDP src mem)) (Select1 <typ.UInt64> (LDP src mem)) mem))) 21225 for { 21226 if auxIntToInt64(v.AuxInt) != 48 { 21227 break 21228 } 21229 dst := v_0 21230 src := v_1 21231 mem := v_2 21232 v.reset(OpARM64STP) 21233 v.AuxInt = int32ToAuxInt(32) 21234 v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64) 21235 v1 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64)) 21236 v1.AuxInt = int32ToAuxInt(32) 21237 v1.AddArg2(src, mem) 21238 v0.AddArg(v1) 21239 v2 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64) 21240 v2.AddArg(v1) 21241 v3 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 21242 v3.AuxInt = int32ToAuxInt(16) 21243 v4 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64) 21244 v5 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64)) 21245 v5.AuxInt = int32ToAuxInt(16) 21246 v5.AddArg2(src, mem) 21247 v4.AddArg(v5) 21248 v6 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64) 21249 v6.AddArg(v5) 21250 v7 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 21251 v8 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64) 21252 v9 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64)) 21253 v9.AddArg2(src, mem) 21254 v8.AddArg(v9) 21255 v10 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64) 21256 v10.AddArg(v9) 21257 v7.AddArg4(dst, v8, v10, mem) 21258 v3.AddArg4(dst, v4, v6, v7) 21259 v.AddArg4(dst, v0, v2, v3) 21260 return true 21261 } 21262 // match: (Move [64] dst src mem) 21263 // result: (STP [48] dst (Select0 <typ.UInt64> (LDP [48] src mem)) (Select1 <typ.UInt64> (LDP [48] src mem)) (STP [32] dst (Select0 <typ.UInt64> (LDP [32] src mem)) (Select1 <typ.UInt64> (LDP [32] src mem)) (STP [16] dst (Select0 <typ.UInt64> (LDP [16] src mem)) (Select1 <typ.UInt64> (LDP [16] src mem)) (STP dst (Select0 <typ.UInt64> (LDP src mem)) (Select1 <typ.UInt64> (LDP src mem)) mem)))) 21264 for { 21265 if auxIntToInt64(v.AuxInt) != 64 { 21266 break 21267 } 21268 dst := v_0 21269 src := v_1 21270 mem := v_2 21271 v.reset(OpARM64STP) 21272 v.AuxInt = int32ToAuxInt(48) 21273 v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64) 21274 v1 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64)) 21275 v1.AuxInt = int32ToAuxInt(48) 21276 v1.AddArg2(src, mem) 21277 v0.AddArg(v1) 21278 v2 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64) 21279 v2.AddArg(v1) 21280 v3 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 21281 v3.AuxInt = int32ToAuxInt(32) 21282 v4 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64) 21283 v5 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64)) 21284 v5.AuxInt = int32ToAuxInt(32) 21285 v5.AddArg2(src, mem) 21286 v4.AddArg(v5) 21287 v6 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64) 21288 v6.AddArg(v5) 21289 v7 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 21290 v7.AuxInt = int32ToAuxInt(16) 21291 v8 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64) 21292 v9 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64)) 21293 v9.AuxInt = int32ToAuxInt(16) 21294 v9.AddArg2(src, mem) 21295 v8.AddArg(v9) 21296 v10 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64) 21297 v10.AddArg(v9) 21298 v11 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 21299 v12 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64) 21300 v13 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64)) 21301 v13.AddArg2(src, mem) 21302 v12.AddArg(v13) 21303 v14 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64) 21304 v14.AddArg(v13) 21305 v11.AddArg4(dst, v12, v14, mem) 21306 v7.AddArg4(dst, v8, v10, v11) 21307 v3.AddArg4(dst, v4, v6, v7) 21308 v.AddArg4(dst, v0, v2, v3) 21309 return true 21310 } 21311 // match: (Move [s] dst src mem) 21312 // cond: s%16 != 0 && s%16 <= 8 && s > 16 21313 // result: (Move [8] (OffPtr <dst.Type> dst [s-8]) (OffPtr <src.Type> src [s-8]) (Move [s-s%16] dst src mem)) 21314 for { 21315 s := auxIntToInt64(v.AuxInt) 21316 dst := v_0 21317 src := v_1 21318 mem := v_2 21319 if !(s%16 != 0 && s%16 <= 8 && s > 16) { 21320 break 21321 } 21322 v.reset(OpMove) 21323 v.AuxInt = int64ToAuxInt(8) 21324 v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type) 21325 v0.AuxInt = int64ToAuxInt(s - 8) 21326 v0.AddArg(dst) 21327 v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type) 21328 v1.AuxInt = int64ToAuxInt(s - 8) 21329 v1.AddArg(src) 21330 v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem) 21331 v2.AuxInt = int64ToAuxInt(s - s%16) 21332 v2.AddArg3(dst, src, mem) 21333 v.AddArg3(v0, v1, v2) 21334 return true 21335 } 21336 // match: (Move [s] dst src mem) 21337 // cond: s%16 != 0 && s%16 > 8 && s > 16 21338 // result: (Move [16] (OffPtr <dst.Type> dst [s-16]) (OffPtr <src.Type> src [s-16]) (Move [s-s%16] dst src mem)) 21339 for { 21340 s := auxIntToInt64(v.AuxInt) 21341 dst := v_0 21342 src := v_1 21343 mem := v_2 21344 if !(s%16 != 0 && s%16 > 8 && s > 16) { 21345 break 21346 } 21347 v.reset(OpMove) 21348 v.AuxInt = int64ToAuxInt(16) 21349 v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type) 21350 v0.AuxInt = int64ToAuxInt(s - 16) 21351 v0.AddArg(dst) 21352 v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type) 21353 v1.AuxInt = int64ToAuxInt(s - 16) 21354 v1.AddArg(src) 21355 v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem) 21356 v2.AuxInt = int64ToAuxInt(s - s%16) 21357 v2.AddArg3(dst, src, mem) 21358 v.AddArg3(v0, v1, v2) 21359 return true 21360 } 21361 // match: (Move [s] dst src mem) 21362 // cond: s > 64 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice && logLargeCopy(v, s) 21363 // result: (DUFFCOPY [8 * (64 - s/16)] dst src mem) 21364 for { 21365 s := auxIntToInt64(v.AuxInt) 21366 dst := v_0 21367 src := v_1 21368 mem := v_2 21369 if !(s > 64 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) { 21370 break 21371 } 21372 v.reset(OpARM64DUFFCOPY) 21373 v.AuxInt = int64ToAuxInt(8 * (64 - s/16)) 21374 v.AddArg3(dst, src, mem) 21375 return true 21376 } 21377 // match: (Move [s] dst src mem) 21378 // cond: s%16 == 0 && (s > 16*64 || config.noDuffDevice) && logLargeCopy(v, s) 21379 // result: (LoweredMove dst src (ADDconst <src.Type> src [s-16]) mem) 21380 for { 21381 s := auxIntToInt64(v.AuxInt) 21382 dst := v_0 21383 src := v_1 21384 mem := v_2 21385 if !(s%16 == 0 && (s > 16*64 || config.noDuffDevice) && logLargeCopy(v, s)) { 21386 break 21387 } 21388 v.reset(OpARM64LoweredMove) 21389 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, src.Type) 21390 v0.AuxInt = int64ToAuxInt(s - 16) 21391 v0.AddArg(src) 21392 v.AddArg4(dst, src, v0, mem) 21393 return true 21394 } 21395 return false 21396 } 21397 func rewriteValueARM64_OpNeq16(v *Value) bool { 21398 v_1 := v.Args[1] 21399 v_0 := v.Args[0] 21400 b := v.Block 21401 typ := &b.Func.Config.Types 21402 // match: (Neq16 x y) 21403 // result: (NotEqual (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 21404 for { 21405 x := v_0 21406 y := v_1 21407 v.reset(OpARM64NotEqual) 21408 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 21409 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 21410 v1.AddArg(x) 21411 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 21412 v2.AddArg(y) 21413 v0.AddArg2(v1, v2) 21414 v.AddArg(v0) 21415 return true 21416 } 21417 } 21418 func rewriteValueARM64_OpNeq32(v *Value) bool { 21419 v_1 := v.Args[1] 21420 v_0 := v.Args[0] 21421 b := v.Block 21422 // match: (Neq32 x y) 21423 // result: (NotEqual (CMPW x y)) 21424 for { 21425 x := v_0 21426 y := v_1 21427 v.reset(OpARM64NotEqual) 21428 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 21429 v0.AddArg2(x, y) 21430 v.AddArg(v0) 21431 return true 21432 } 21433 } 21434 func rewriteValueARM64_OpNeq32F(v *Value) bool { 21435 v_1 := v.Args[1] 21436 v_0 := v.Args[0] 21437 b := v.Block 21438 // match: (Neq32F x y) 21439 // result: (NotEqual (FCMPS x y)) 21440 for { 21441 x := v_0 21442 y := v_1 21443 v.reset(OpARM64NotEqual) 21444 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 21445 v0.AddArg2(x, y) 21446 v.AddArg(v0) 21447 return true 21448 } 21449 } 21450 func rewriteValueARM64_OpNeq64(v *Value) bool { 21451 v_1 := v.Args[1] 21452 v_0 := v.Args[0] 21453 b := v.Block 21454 // match: (Neq64 x y) 21455 // result: (NotEqual (CMP x y)) 21456 for { 21457 x := v_0 21458 y := v_1 21459 v.reset(OpARM64NotEqual) 21460 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 21461 v0.AddArg2(x, y) 21462 v.AddArg(v0) 21463 return true 21464 } 21465 } 21466 func rewriteValueARM64_OpNeq64F(v *Value) bool { 21467 v_1 := v.Args[1] 21468 v_0 := v.Args[0] 21469 b := v.Block 21470 // match: (Neq64F x y) 21471 // result: (NotEqual (FCMPD x y)) 21472 for { 21473 x := v_0 21474 y := v_1 21475 v.reset(OpARM64NotEqual) 21476 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 21477 v0.AddArg2(x, y) 21478 v.AddArg(v0) 21479 return true 21480 } 21481 } 21482 func rewriteValueARM64_OpNeq8(v *Value) bool { 21483 v_1 := v.Args[1] 21484 v_0 := v.Args[0] 21485 b := v.Block 21486 typ := &b.Func.Config.Types 21487 // match: (Neq8 x y) 21488 // result: (NotEqual (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 21489 for { 21490 x := v_0 21491 y := v_1 21492 v.reset(OpARM64NotEqual) 21493 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 21494 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 21495 v1.AddArg(x) 21496 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 21497 v2.AddArg(y) 21498 v0.AddArg2(v1, v2) 21499 v.AddArg(v0) 21500 return true 21501 } 21502 } 21503 func rewriteValueARM64_OpNeqPtr(v *Value) bool { 21504 v_1 := v.Args[1] 21505 v_0 := v.Args[0] 21506 b := v.Block 21507 // match: (NeqPtr x y) 21508 // result: (NotEqual (CMP x y)) 21509 for { 21510 x := v_0 21511 y := v_1 21512 v.reset(OpARM64NotEqual) 21513 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 21514 v0.AddArg2(x, y) 21515 v.AddArg(v0) 21516 return true 21517 } 21518 } 21519 func rewriteValueARM64_OpNot(v *Value) bool { 21520 v_0 := v.Args[0] 21521 b := v.Block 21522 typ := &b.Func.Config.Types 21523 // match: (Not x) 21524 // result: (XOR (MOVDconst [1]) x) 21525 for { 21526 x := v_0 21527 v.reset(OpARM64XOR) 21528 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 21529 v0.AuxInt = int64ToAuxInt(1) 21530 v.AddArg2(v0, x) 21531 return true 21532 } 21533 } 21534 func rewriteValueARM64_OpOffPtr(v *Value) bool { 21535 v_0 := v.Args[0] 21536 // match: (OffPtr [off] ptr:(SP)) 21537 // cond: is32Bit(off) 21538 // result: (MOVDaddr [int32(off)] ptr) 21539 for { 21540 off := auxIntToInt64(v.AuxInt) 21541 ptr := v_0 21542 if ptr.Op != OpSP || !(is32Bit(off)) { 21543 break 21544 } 21545 v.reset(OpARM64MOVDaddr) 21546 v.AuxInt = int32ToAuxInt(int32(off)) 21547 v.AddArg(ptr) 21548 return true 21549 } 21550 // match: (OffPtr [off] ptr) 21551 // result: (ADDconst [off] ptr) 21552 for { 21553 off := auxIntToInt64(v.AuxInt) 21554 ptr := v_0 21555 v.reset(OpARM64ADDconst) 21556 v.AuxInt = int64ToAuxInt(off) 21557 v.AddArg(ptr) 21558 return true 21559 } 21560 } 21561 func rewriteValueARM64_OpPanicBounds(v *Value) bool { 21562 v_2 := v.Args[2] 21563 v_1 := v.Args[1] 21564 v_0 := v.Args[0] 21565 // match: (PanicBounds [kind] x y mem) 21566 // cond: boundsABI(kind) == 0 21567 // result: (LoweredPanicBoundsA [kind] x y mem) 21568 for { 21569 kind := auxIntToInt64(v.AuxInt) 21570 x := v_0 21571 y := v_1 21572 mem := v_2 21573 if !(boundsABI(kind) == 0) { 21574 break 21575 } 21576 v.reset(OpARM64LoweredPanicBoundsA) 21577 v.AuxInt = int64ToAuxInt(kind) 21578 v.AddArg3(x, y, mem) 21579 return true 21580 } 21581 // match: (PanicBounds [kind] x y mem) 21582 // cond: boundsABI(kind) == 1 21583 // result: (LoweredPanicBoundsB [kind] x y mem) 21584 for { 21585 kind := auxIntToInt64(v.AuxInt) 21586 x := v_0 21587 y := v_1 21588 mem := v_2 21589 if !(boundsABI(kind) == 1) { 21590 break 21591 } 21592 v.reset(OpARM64LoweredPanicBoundsB) 21593 v.AuxInt = int64ToAuxInt(kind) 21594 v.AddArg3(x, y, mem) 21595 return true 21596 } 21597 // match: (PanicBounds [kind] x y mem) 21598 // cond: boundsABI(kind) == 2 21599 // result: (LoweredPanicBoundsC [kind] x y mem) 21600 for { 21601 kind := auxIntToInt64(v.AuxInt) 21602 x := v_0 21603 y := v_1 21604 mem := v_2 21605 if !(boundsABI(kind) == 2) { 21606 break 21607 } 21608 v.reset(OpARM64LoweredPanicBoundsC) 21609 v.AuxInt = int64ToAuxInt(kind) 21610 v.AddArg3(x, y, mem) 21611 return true 21612 } 21613 return false 21614 } 21615 func rewriteValueARM64_OpPopCount16(v *Value) bool { 21616 v_0 := v.Args[0] 21617 b := v.Block 21618 typ := &b.Func.Config.Types 21619 // match: (PopCount16 <t> x) 21620 // result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> (ZeroExt16to64 x))))) 21621 for { 21622 t := v.Type 21623 x := v_0 21624 v.reset(OpARM64FMOVDfpgp) 21625 v.Type = t 21626 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64) 21627 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64) 21628 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64) 21629 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 21630 v3.AddArg(x) 21631 v2.AddArg(v3) 21632 v1.AddArg(v2) 21633 v0.AddArg(v1) 21634 v.AddArg(v0) 21635 return true 21636 } 21637 } 21638 func rewriteValueARM64_OpPopCount32(v *Value) bool { 21639 v_0 := v.Args[0] 21640 b := v.Block 21641 typ := &b.Func.Config.Types 21642 // match: (PopCount32 <t> x) 21643 // result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> (ZeroExt32to64 x))))) 21644 for { 21645 t := v.Type 21646 x := v_0 21647 v.reset(OpARM64FMOVDfpgp) 21648 v.Type = t 21649 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64) 21650 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64) 21651 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64) 21652 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 21653 v3.AddArg(x) 21654 v2.AddArg(v3) 21655 v1.AddArg(v2) 21656 v0.AddArg(v1) 21657 v.AddArg(v0) 21658 return true 21659 } 21660 } 21661 func rewriteValueARM64_OpPopCount64(v *Value) bool { 21662 v_0 := v.Args[0] 21663 b := v.Block 21664 typ := &b.Func.Config.Types 21665 // match: (PopCount64 <t> x) 21666 // result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> x)))) 21667 for { 21668 t := v.Type 21669 x := v_0 21670 v.reset(OpARM64FMOVDfpgp) 21671 v.Type = t 21672 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64) 21673 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64) 21674 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64) 21675 v2.AddArg(x) 21676 v1.AddArg(v2) 21677 v0.AddArg(v1) 21678 v.AddArg(v0) 21679 return true 21680 } 21681 } 21682 func rewriteValueARM64_OpPrefetchCache(v *Value) bool { 21683 v_1 := v.Args[1] 21684 v_0 := v.Args[0] 21685 // match: (PrefetchCache addr mem) 21686 // result: (PRFM [0] addr mem) 21687 for { 21688 addr := v_0 21689 mem := v_1 21690 v.reset(OpARM64PRFM) 21691 v.AuxInt = int64ToAuxInt(0) 21692 v.AddArg2(addr, mem) 21693 return true 21694 } 21695 } 21696 func rewriteValueARM64_OpPrefetchCacheStreamed(v *Value) bool { 21697 v_1 := v.Args[1] 21698 v_0 := v.Args[0] 21699 // match: (PrefetchCacheStreamed addr mem) 21700 // result: (PRFM [1] addr mem) 21701 for { 21702 addr := v_0 21703 mem := v_1 21704 v.reset(OpARM64PRFM) 21705 v.AuxInt = int64ToAuxInt(1) 21706 v.AddArg2(addr, mem) 21707 return true 21708 } 21709 } 21710 func rewriteValueARM64_OpPubBarrier(v *Value) bool { 21711 v_0 := v.Args[0] 21712 // match: (PubBarrier mem) 21713 // result: (DMB [0xe] mem) 21714 for { 21715 mem := v_0 21716 v.reset(OpARM64DMB) 21717 v.AuxInt = int64ToAuxInt(0xe) 21718 v.AddArg(mem) 21719 return true 21720 } 21721 } 21722 func rewriteValueARM64_OpRotateLeft16(v *Value) bool { 21723 v_1 := v.Args[1] 21724 v_0 := v.Args[0] 21725 b := v.Block 21726 typ := &b.Func.Config.Types 21727 // match: (RotateLeft16 <t> x (MOVDconst [c])) 21728 // result: (Or16 (Lsh16x64 <t> x (MOVDconst [c&15])) (Rsh16Ux64 <t> x (MOVDconst [-c&15]))) 21729 for { 21730 t := v.Type 21731 x := v_0 21732 if v_1.Op != OpARM64MOVDconst { 21733 break 21734 } 21735 c := auxIntToInt64(v_1.AuxInt) 21736 v.reset(OpOr16) 21737 v0 := b.NewValue0(v.Pos, OpLsh16x64, t) 21738 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 21739 v1.AuxInt = int64ToAuxInt(c & 15) 21740 v0.AddArg2(x, v1) 21741 v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t) 21742 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 21743 v3.AuxInt = int64ToAuxInt(-c & 15) 21744 v2.AddArg2(x, v3) 21745 v.AddArg2(v0, v2) 21746 return true 21747 } 21748 // match: (RotateLeft16 <t> x y) 21749 // result: (RORW <t> (ORshiftLL <typ.UInt32> (ZeroExt16to32 x) (ZeroExt16to32 x) [16]) (NEG <typ.Int64> y)) 21750 for { 21751 t := v.Type 21752 x := v_0 21753 y := v_1 21754 v.reset(OpARM64RORW) 21755 v.Type = t 21756 v0 := b.NewValue0(v.Pos, OpARM64ORshiftLL, typ.UInt32) 21757 v0.AuxInt = int64ToAuxInt(16) 21758 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 21759 v1.AddArg(x) 21760 v0.AddArg2(v1, v1) 21761 v2 := b.NewValue0(v.Pos, OpARM64NEG, typ.Int64) 21762 v2.AddArg(y) 21763 v.AddArg2(v0, v2) 21764 return true 21765 } 21766 } 21767 func rewriteValueARM64_OpRotateLeft32(v *Value) bool { 21768 v_1 := v.Args[1] 21769 v_0 := v.Args[0] 21770 b := v.Block 21771 // match: (RotateLeft32 x y) 21772 // result: (RORW x (NEG <y.Type> y)) 21773 for { 21774 x := v_0 21775 y := v_1 21776 v.reset(OpARM64RORW) 21777 v0 := b.NewValue0(v.Pos, OpARM64NEG, y.Type) 21778 v0.AddArg(y) 21779 v.AddArg2(x, v0) 21780 return true 21781 } 21782 } 21783 func rewriteValueARM64_OpRotateLeft64(v *Value) bool { 21784 v_1 := v.Args[1] 21785 v_0 := v.Args[0] 21786 b := v.Block 21787 // match: (RotateLeft64 x y) 21788 // result: (ROR x (NEG <y.Type> y)) 21789 for { 21790 x := v_0 21791 y := v_1 21792 v.reset(OpARM64ROR) 21793 v0 := b.NewValue0(v.Pos, OpARM64NEG, y.Type) 21794 v0.AddArg(y) 21795 v.AddArg2(x, v0) 21796 return true 21797 } 21798 } 21799 func rewriteValueARM64_OpRotateLeft8(v *Value) bool { 21800 v_1 := v.Args[1] 21801 v_0 := v.Args[0] 21802 b := v.Block 21803 typ := &b.Func.Config.Types 21804 // match: (RotateLeft8 <t> x (MOVDconst [c])) 21805 // result: (Or8 (Lsh8x64 <t> x (MOVDconst [c&7])) (Rsh8Ux64 <t> x (MOVDconst [-c&7]))) 21806 for { 21807 t := v.Type 21808 x := v_0 21809 if v_1.Op != OpARM64MOVDconst { 21810 break 21811 } 21812 c := auxIntToInt64(v_1.AuxInt) 21813 v.reset(OpOr8) 21814 v0 := b.NewValue0(v.Pos, OpLsh8x64, t) 21815 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 21816 v1.AuxInt = int64ToAuxInt(c & 7) 21817 v0.AddArg2(x, v1) 21818 v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t) 21819 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 21820 v3.AuxInt = int64ToAuxInt(-c & 7) 21821 v2.AddArg2(x, v3) 21822 v.AddArg2(v0, v2) 21823 return true 21824 } 21825 // match: (RotateLeft8 <t> x y) 21826 // result: (OR <t> (SLL <t> x (ANDconst <typ.Int64> [7] y)) (SRL <t> (ZeroExt8to64 x) (ANDconst <typ.Int64> [7] (NEG <typ.Int64> y)))) 21827 for { 21828 t := v.Type 21829 x := v_0 21830 y := v_1 21831 v.reset(OpARM64OR) 21832 v.Type = t 21833 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 21834 v1 := b.NewValue0(v.Pos, OpARM64ANDconst, typ.Int64) 21835 v1.AuxInt = int64ToAuxInt(7) 21836 v1.AddArg(y) 21837 v0.AddArg2(x, v1) 21838 v2 := b.NewValue0(v.Pos, OpARM64SRL, t) 21839 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 21840 v3.AddArg(x) 21841 v4 := b.NewValue0(v.Pos, OpARM64ANDconst, typ.Int64) 21842 v4.AuxInt = int64ToAuxInt(7) 21843 v5 := b.NewValue0(v.Pos, OpARM64NEG, typ.Int64) 21844 v5.AddArg(y) 21845 v4.AddArg(v5) 21846 v2.AddArg2(v3, v4) 21847 v.AddArg2(v0, v2) 21848 return true 21849 } 21850 } 21851 func rewriteValueARM64_OpRsh16Ux16(v *Value) bool { 21852 v_1 := v.Args[1] 21853 v_0 := v.Args[0] 21854 b := v.Block 21855 typ := &b.Func.Config.Types 21856 // match: (Rsh16Ux16 <t> x y) 21857 // cond: shiftIsBounded(v) 21858 // result: (SRL <t> (ZeroExt16to64 x) y) 21859 for { 21860 t := v.Type 21861 x := v_0 21862 y := v_1 21863 if !(shiftIsBounded(v)) { 21864 break 21865 } 21866 v.reset(OpARM64SRL) 21867 v.Type = t 21868 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 21869 v0.AddArg(x) 21870 v.AddArg2(v0, y) 21871 return true 21872 } 21873 // match: (Rsh16Ux16 <t> x y) 21874 // cond: !shiftIsBounded(v) 21875 // result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 21876 for { 21877 t := v.Type 21878 x := v_0 21879 y := v_1 21880 if !(!shiftIsBounded(v)) { 21881 break 21882 } 21883 v.reset(OpARM64CSEL) 21884 v.AuxInt = opToAuxInt(OpARM64LessThanU) 21885 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 21886 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 21887 v1.AddArg(x) 21888 v0.AddArg2(v1, y) 21889 v2 := b.NewValue0(v.Pos, OpConst64, t) 21890 v2.AuxInt = int64ToAuxInt(0) 21891 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 21892 v3.AuxInt = int64ToAuxInt(64) 21893 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 21894 v4.AddArg(y) 21895 v3.AddArg(v4) 21896 v.AddArg3(v0, v2, v3) 21897 return true 21898 } 21899 return false 21900 } 21901 func rewriteValueARM64_OpRsh16Ux32(v *Value) bool { 21902 v_1 := v.Args[1] 21903 v_0 := v.Args[0] 21904 b := v.Block 21905 typ := &b.Func.Config.Types 21906 // match: (Rsh16Ux32 <t> x y) 21907 // cond: shiftIsBounded(v) 21908 // result: (SRL <t> (ZeroExt16to64 x) y) 21909 for { 21910 t := v.Type 21911 x := v_0 21912 y := v_1 21913 if !(shiftIsBounded(v)) { 21914 break 21915 } 21916 v.reset(OpARM64SRL) 21917 v.Type = t 21918 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 21919 v0.AddArg(x) 21920 v.AddArg2(v0, y) 21921 return true 21922 } 21923 // match: (Rsh16Ux32 <t> x y) 21924 // cond: !shiftIsBounded(v) 21925 // result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 21926 for { 21927 t := v.Type 21928 x := v_0 21929 y := v_1 21930 if !(!shiftIsBounded(v)) { 21931 break 21932 } 21933 v.reset(OpARM64CSEL) 21934 v.AuxInt = opToAuxInt(OpARM64LessThanU) 21935 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 21936 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 21937 v1.AddArg(x) 21938 v0.AddArg2(v1, y) 21939 v2 := b.NewValue0(v.Pos, OpConst64, t) 21940 v2.AuxInt = int64ToAuxInt(0) 21941 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 21942 v3.AuxInt = int64ToAuxInt(64) 21943 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 21944 v4.AddArg(y) 21945 v3.AddArg(v4) 21946 v.AddArg3(v0, v2, v3) 21947 return true 21948 } 21949 return false 21950 } 21951 func rewriteValueARM64_OpRsh16Ux64(v *Value) bool { 21952 v_1 := v.Args[1] 21953 v_0 := v.Args[0] 21954 b := v.Block 21955 typ := &b.Func.Config.Types 21956 // match: (Rsh16Ux64 <t> x y) 21957 // cond: shiftIsBounded(v) 21958 // result: (SRL <t> (ZeroExt16to64 x) y) 21959 for { 21960 t := v.Type 21961 x := v_0 21962 y := v_1 21963 if !(shiftIsBounded(v)) { 21964 break 21965 } 21966 v.reset(OpARM64SRL) 21967 v.Type = t 21968 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 21969 v0.AddArg(x) 21970 v.AddArg2(v0, y) 21971 return true 21972 } 21973 // match: (Rsh16Ux64 <t> x y) 21974 // cond: !shiftIsBounded(v) 21975 // result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 21976 for { 21977 t := v.Type 21978 x := v_0 21979 y := v_1 21980 if !(!shiftIsBounded(v)) { 21981 break 21982 } 21983 v.reset(OpARM64CSEL) 21984 v.AuxInt = opToAuxInt(OpARM64LessThanU) 21985 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 21986 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 21987 v1.AddArg(x) 21988 v0.AddArg2(v1, y) 21989 v2 := b.NewValue0(v.Pos, OpConst64, t) 21990 v2.AuxInt = int64ToAuxInt(0) 21991 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 21992 v3.AuxInt = int64ToAuxInt(64) 21993 v3.AddArg(y) 21994 v.AddArg3(v0, v2, v3) 21995 return true 21996 } 21997 return false 21998 } 21999 func rewriteValueARM64_OpRsh16Ux8(v *Value) bool { 22000 v_1 := v.Args[1] 22001 v_0 := v.Args[0] 22002 b := v.Block 22003 typ := &b.Func.Config.Types 22004 // match: (Rsh16Ux8 <t> x y) 22005 // cond: shiftIsBounded(v) 22006 // result: (SRL <t> (ZeroExt16to64 x) y) 22007 for { 22008 t := v.Type 22009 x := v_0 22010 y := v_1 22011 if !(shiftIsBounded(v)) { 22012 break 22013 } 22014 v.reset(OpARM64SRL) 22015 v.Type = t 22016 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 22017 v0.AddArg(x) 22018 v.AddArg2(v0, y) 22019 return true 22020 } 22021 // match: (Rsh16Ux8 <t> x y) 22022 // cond: !shiftIsBounded(v) 22023 // result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 22024 for { 22025 t := v.Type 22026 x := v_0 22027 y := v_1 22028 if !(!shiftIsBounded(v)) { 22029 break 22030 } 22031 v.reset(OpARM64CSEL) 22032 v.AuxInt = opToAuxInt(OpARM64LessThanU) 22033 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 22034 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 22035 v1.AddArg(x) 22036 v0.AddArg2(v1, y) 22037 v2 := b.NewValue0(v.Pos, OpConst64, t) 22038 v2.AuxInt = int64ToAuxInt(0) 22039 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22040 v3.AuxInt = int64ToAuxInt(64) 22041 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 22042 v4.AddArg(y) 22043 v3.AddArg(v4) 22044 v.AddArg3(v0, v2, v3) 22045 return true 22046 } 22047 return false 22048 } 22049 func rewriteValueARM64_OpRsh16x16(v *Value) bool { 22050 v_1 := v.Args[1] 22051 v_0 := v.Args[0] 22052 b := v.Block 22053 typ := &b.Func.Config.Types 22054 // match: (Rsh16x16 <t> x y) 22055 // cond: shiftIsBounded(v) 22056 // result: (SRA <t> (SignExt16to64 x) y) 22057 for { 22058 t := v.Type 22059 x := v_0 22060 y := v_1 22061 if !(shiftIsBounded(v)) { 22062 break 22063 } 22064 v.reset(OpARM64SRA) 22065 v.Type = t 22066 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 22067 v0.AddArg(x) 22068 v.AddArg2(v0, y) 22069 return true 22070 } 22071 // match: (Rsh16x16 x y) 22072 // cond: !shiftIsBounded(v) 22073 // result: (SRA (SignExt16to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 22074 for { 22075 x := v_0 22076 y := v_1 22077 if !(!shiftIsBounded(v)) { 22078 break 22079 } 22080 v.reset(OpARM64SRA) 22081 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 22082 v0.AddArg(x) 22083 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 22084 v1.AuxInt = opToAuxInt(OpARM64LessThanU) 22085 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 22086 v2.AuxInt = int64ToAuxInt(63) 22087 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22088 v3.AuxInt = int64ToAuxInt(64) 22089 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 22090 v4.AddArg(y) 22091 v3.AddArg(v4) 22092 v1.AddArg3(y, v2, v3) 22093 v.AddArg2(v0, v1) 22094 return true 22095 } 22096 return false 22097 } 22098 func rewriteValueARM64_OpRsh16x32(v *Value) bool { 22099 v_1 := v.Args[1] 22100 v_0 := v.Args[0] 22101 b := v.Block 22102 typ := &b.Func.Config.Types 22103 // match: (Rsh16x32 <t> x y) 22104 // cond: shiftIsBounded(v) 22105 // result: (SRA <t> (SignExt16to64 x) y) 22106 for { 22107 t := v.Type 22108 x := v_0 22109 y := v_1 22110 if !(shiftIsBounded(v)) { 22111 break 22112 } 22113 v.reset(OpARM64SRA) 22114 v.Type = t 22115 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 22116 v0.AddArg(x) 22117 v.AddArg2(v0, y) 22118 return true 22119 } 22120 // match: (Rsh16x32 x y) 22121 // cond: !shiftIsBounded(v) 22122 // result: (SRA (SignExt16to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 22123 for { 22124 x := v_0 22125 y := v_1 22126 if !(!shiftIsBounded(v)) { 22127 break 22128 } 22129 v.reset(OpARM64SRA) 22130 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 22131 v0.AddArg(x) 22132 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 22133 v1.AuxInt = opToAuxInt(OpARM64LessThanU) 22134 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 22135 v2.AuxInt = int64ToAuxInt(63) 22136 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22137 v3.AuxInt = int64ToAuxInt(64) 22138 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 22139 v4.AddArg(y) 22140 v3.AddArg(v4) 22141 v1.AddArg3(y, v2, v3) 22142 v.AddArg2(v0, v1) 22143 return true 22144 } 22145 return false 22146 } 22147 func rewriteValueARM64_OpRsh16x64(v *Value) bool { 22148 v_1 := v.Args[1] 22149 v_0 := v.Args[0] 22150 b := v.Block 22151 typ := &b.Func.Config.Types 22152 // match: (Rsh16x64 <t> x y) 22153 // cond: shiftIsBounded(v) 22154 // result: (SRA <t> (SignExt16to64 x) y) 22155 for { 22156 t := v.Type 22157 x := v_0 22158 y := v_1 22159 if !(shiftIsBounded(v)) { 22160 break 22161 } 22162 v.reset(OpARM64SRA) 22163 v.Type = t 22164 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 22165 v0.AddArg(x) 22166 v.AddArg2(v0, y) 22167 return true 22168 } 22169 // match: (Rsh16x64 x y) 22170 // cond: !shiftIsBounded(v) 22171 // result: (SRA (SignExt16to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 22172 for { 22173 x := v_0 22174 y := v_1 22175 if !(!shiftIsBounded(v)) { 22176 break 22177 } 22178 v.reset(OpARM64SRA) 22179 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 22180 v0.AddArg(x) 22181 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 22182 v1.AuxInt = opToAuxInt(OpARM64LessThanU) 22183 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 22184 v2.AuxInt = int64ToAuxInt(63) 22185 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22186 v3.AuxInt = int64ToAuxInt(64) 22187 v3.AddArg(y) 22188 v1.AddArg3(y, v2, v3) 22189 v.AddArg2(v0, v1) 22190 return true 22191 } 22192 return false 22193 } 22194 func rewriteValueARM64_OpRsh16x8(v *Value) bool { 22195 v_1 := v.Args[1] 22196 v_0 := v.Args[0] 22197 b := v.Block 22198 typ := &b.Func.Config.Types 22199 // match: (Rsh16x8 <t> x y) 22200 // cond: shiftIsBounded(v) 22201 // result: (SRA <t> (SignExt16to64 x) y) 22202 for { 22203 t := v.Type 22204 x := v_0 22205 y := v_1 22206 if !(shiftIsBounded(v)) { 22207 break 22208 } 22209 v.reset(OpARM64SRA) 22210 v.Type = t 22211 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 22212 v0.AddArg(x) 22213 v.AddArg2(v0, y) 22214 return true 22215 } 22216 // match: (Rsh16x8 x y) 22217 // cond: !shiftIsBounded(v) 22218 // result: (SRA (SignExt16to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 22219 for { 22220 x := v_0 22221 y := v_1 22222 if !(!shiftIsBounded(v)) { 22223 break 22224 } 22225 v.reset(OpARM64SRA) 22226 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 22227 v0.AddArg(x) 22228 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 22229 v1.AuxInt = opToAuxInt(OpARM64LessThanU) 22230 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 22231 v2.AuxInt = int64ToAuxInt(63) 22232 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22233 v3.AuxInt = int64ToAuxInt(64) 22234 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 22235 v4.AddArg(y) 22236 v3.AddArg(v4) 22237 v1.AddArg3(y, v2, v3) 22238 v.AddArg2(v0, v1) 22239 return true 22240 } 22241 return false 22242 } 22243 func rewriteValueARM64_OpRsh32Ux16(v *Value) bool { 22244 v_1 := v.Args[1] 22245 v_0 := v.Args[0] 22246 b := v.Block 22247 typ := &b.Func.Config.Types 22248 // match: (Rsh32Ux16 <t> x y) 22249 // cond: shiftIsBounded(v) 22250 // result: (SRL <t> (ZeroExt32to64 x) y) 22251 for { 22252 t := v.Type 22253 x := v_0 22254 y := v_1 22255 if !(shiftIsBounded(v)) { 22256 break 22257 } 22258 v.reset(OpARM64SRL) 22259 v.Type = t 22260 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 22261 v0.AddArg(x) 22262 v.AddArg2(v0, y) 22263 return true 22264 } 22265 // match: (Rsh32Ux16 <t> x y) 22266 // cond: !shiftIsBounded(v) 22267 // result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 22268 for { 22269 t := v.Type 22270 x := v_0 22271 y := v_1 22272 if !(!shiftIsBounded(v)) { 22273 break 22274 } 22275 v.reset(OpARM64CSEL) 22276 v.AuxInt = opToAuxInt(OpARM64LessThanU) 22277 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 22278 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 22279 v1.AddArg(x) 22280 v0.AddArg2(v1, y) 22281 v2 := b.NewValue0(v.Pos, OpConst64, t) 22282 v2.AuxInt = int64ToAuxInt(0) 22283 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22284 v3.AuxInt = int64ToAuxInt(64) 22285 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 22286 v4.AddArg(y) 22287 v3.AddArg(v4) 22288 v.AddArg3(v0, v2, v3) 22289 return true 22290 } 22291 return false 22292 } 22293 func rewriteValueARM64_OpRsh32Ux32(v *Value) bool { 22294 v_1 := v.Args[1] 22295 v_0 := v.Args[0] 22296 b := v.Block 22297 typ := &b.Func.Config.Types 22298 // match: (Rsh32Ux32 <t> x y) 22299 // cond: shiftIsBounded(v) 22300 // result: (SRL <t> (ZeroExt32to64 x) y) 22301 for { 22302 t := v.Type 22303 x := v_0 22304 y := v_1 22305 if !(shiftIsBounded(v)) { 22306 break 22307 } 22308 v.reset(OpARM64SRL) 22309 v.Type = t 22310 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 22311 v0.AddArg(x) 22312 v.AddArg2(v0, y) 22313 return true 22314 } 22315 // match: (Rsh32Ux32 <t> x y) 22316 // cond: !shiftIsBounded(v) 22317 // result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 22318 for { 22319 t := v.Type 22320 x := v_0 22321 y := v_1 22322 if !(!shiftIsBounded(v)) { 22323 break 22324 } 22325 v.reset(OpARM64CSEL) 22326 v.AuxInt = opToAuxInt(OpARM64LessThanU) 22327 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 22328 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 22329 v1.AddArg(x) 22330 v0.AddArg2(v1, y) 22331 v2 := b.NewValue0(v.Pos, OpConst64, t) 22332 v2.AuxInt = int64ToAuxInt(0) 22333 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22334 v3.AuxInt = int64ToAuxInt(64) 22335 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 22336 v4.AddArg(y) 22337 v3.AddArg(v4) 22338 v.AddArg3(v0, v2, v3) 22339 return true 22340 } 22341 return false 22342 } 22343 func rewriteValueARM64_OpRsh32Ux64(v *Value) bool { 22344 v_1 := v.Args[1] 22345 v_0 := v.Args[0] 22346 b := v.Block 22347 typ := &b.Func.Config.Types 22348 // match: (Rsh32Ux64 <t> x y) 22349 // cond: shiftIsBounded(v) 22350 // result: (SRL <t> (ZeroExt32to64 x) y) 22351 for { 22352 t := v.Type 22353 x := v_0 22354 y := v_1 22355 if !(shiftIsBounded(v)) { 22356 break 22357 } 22358 v.reset(OpARM64SRL) 22359 v.Type = t 22360 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 22361 v0.AddArg(x) 22362 v.AddArg2(v0, y) 22363 return true 22364 } 22365 // match: (Rsh32Ux64 <t> x y) 22366 // cond: !shiftIsBounded(v) 22367 // result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 22368 for { 22369 t := v.Type 22370 x := v_0 22371 y := v_1 22372 if !(!shiftIsBounded(v)) { 22373 break 22374 } 22375 v.reset(OpARM64CSEL) 22376 v.AuxInt = opToAuxInt(OpARM64LessThanU) 22377 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 22378 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 22379 v1.AddArg(x) 22380 v0.AddArg2(v1, y) 22381 v2 := b.NewValue0(v.Pos, OpConst64, t) 22382 v2.AuxInt = int64ToAuxInt(0) 22383 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22384 v3.AuxInt = int64ToAuxInt(64) 22385 v3.AddArg(y) 22386 v.AddArg3(v0, v2, v3) 22387 return true 22388 } 22389 return false 22390 } 22391 func rewriteValueARM64_OpRsh32Ux8(v *Value) bool { 22392 v_1 := v.Args[1] 22393 v_0 := v.Args[0] 22394 b := v.Block 22395 typ := &b.Func.Config.Types 22396 // match: (Rsh32Ux8 <t> x y) 22397 // cond: shiftIsBounded(v) 22398 // result: (SRL <t> (ZeroExt32to64 x) y) 22399 for { 22400 t := v.Type 22401 x := v_0 22402 y := v_1 22403 if !(shiftIsBounded(v)) { 22404 break 22405 } 22406 v.reset(OpARM64SRL) 22407 v.Type = t 22408 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 22409 v0.AddArg(x) 22410 v.AddArg2(v0, y) 22411 return true 22412 } 22413 // match: (Rsh32Ux8 <t> x y) 22414 // cond: !shiftIsBounded(v) 22415 // result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 22416 for { 22417 t := v.Type 22418 x := v_0 22419 y := v_1 22420 if !(!shiftIsBounded(v)) { 22421 break 22422 } 22423 v.reset(OpARM64CSEL) 22424 v.AuxInt = opToAuxInt(OpARM64LessThanU) 22425 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 22426 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 22427 v1.AddArg(x) 22428 v0.AddArg2(v1, y) 22429 v2 := b.NewValue0(v.Pos, OpConst64, t) 22430 v2.AuxInt = int64ToAuxInt(0) 22431 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22432 v3.AuxInt = int64ToAuxInt(64) 22433 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 22434 v4.AddArg(y) 22435 v3.AddArg(v4) 22436 v.AddArg3(v0, v2, v3) 22437 return true 22438 } 22439 return false 22440 } 22441 func rewriteValueARM64_OpRsh32x16(v *Value) bool { 22442 v_1 := v.Args[1] 22443 v_0 := v.Args[0] 22444 b := v.Block 22445 typ := &b.Func.Config.Types 22446 // match: (Rsh32x16 <t> x y) 22447 // cond: shiftIsBounded(v) 22448 // result: (SRA <t> (SignExt32to64 x) y) 22449 for { 22450 t := v.Type 22451 x := v_0 22452 y := v_1 22453 if !(shiftIsBounded(v)) { 22454 break 22455 } 22456 v.reset(OpARM64SRA) 22457 v.Type = t 22458 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 22459 v0.AddArg(x) 22460 v.AddArg2(v0, y) 22461 return true 22462 } 22463 // match: (Rsh32x16 x y) 22464 // cond: !shiftIsBounded(v) 22465 // result: (SRA (SignExt32to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 22466 for { 22467 x := v_0 22468 y := v_1 22469 if !(!shiftIsBounded(v)) { 22470 break 22471 } 22472 v.reset(OpARM64SRA) 22473 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 22474 v0.AddArg(x) 22475 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 22476 v1.AuxInt = opToAuxInt(OpARM64LessThanU) 22477 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 22478 v2.AuxInt = int64ToAuxInt(63) 22479 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22480 v3.AuxInt = int64ToAuxInt(64) 22481 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 22482 v4.AddArg(y) 22483 v3.AddArg(v4) 22484 v1.AddArg3(y, v2, v3) 22485 v.AddArg2(v0, v1) 22486 return true 22487 } 22488 return false 22489 } 22490 func rewriteValueARM64_OpRsh32x32(v *Value) bool { 22491 v_1 := v.Args[1] 22492 v_0 := v.Args[0] 22493 b := v.Block 22494 typ := &b.Func.Config.Types 22495 // match: (Rsh32x32 <t> x y) 22496 // cond: shiftIsBounded(v) 22497 // result: (SRA <t> (SignExt32to64 x) y) 22498 for { 22499 t := v.Type 22500 x := v_0 22501 y := v_1 22502 if !(shiftIsBounded(v)) { 22503 break 22504 } 22505 v.reset(OpARM64SRA) 22506 v.Type = t 22507 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 22508 v0.AddArg(x) 22509 v.AddArg2(v0, y) 22510 return true 22511 } 22512 // match: (Rsh32x32 x y) 22513 // cond: !shiftIsBounded(v) 22514 // result: (SRA (SignExt32to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 22515 for { 22516 x := v_0 22517 y := v_1 22518 if !(!shiftIsBounded(v)) { 22519 break 22520 } 22521 v.reset(OpARM64SRA) 22522 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 22523 v0.AddArg(x) 22524 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 22525 v1.AuxInt = opToAuxInt(OpARM64LessThanU) 22526 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 22527 v2.AuxInt = int64ToAuxInt(63) 22528 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22529 v3.AuxInt = int64ToAuxInt(64) 22530 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 22531 v4.AddArg(y) 22532 v3.AddArg(v4) 22533 v1.AddArg3(y, v2, v3) 22534 v.AddArg2(v0, v1) 22535 return true 22536 } 22537 return false 22538 } 22539 func rewriteValueARM64_OpRsh32x64(v *Value) bool { 22540 v_1 := v.Args[1] 22541 v_0 := v.Args[0] 22542 b := v.Block 22543 typ := &b.Func.Config.Types 22544 // match: (Rsh32x64 <t> x y) 22545 // cond: shiftIsBounded(v) 22546 // result: (SRA <t> (SignExt32to64 x) y) 22547 for { 22548 t := v.Type 22549 x := v_0 22550 y := v_1 22551 if !(shiftIsBounded(v)) { 22552 break 22553 } 22554 v.reset(OpARM64SRA) 22555 v.Type = t 22556 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 22557 v0.AddArg(x) 22558 v.AddArg2(v0, y) 22559 return true 22560 } 22561 // match: (Rsh32x64 x y) 22562 // cond: !shiftIsBounded(v) 22563 // result: (SRA (SignExt32to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 22564 for { 22565 x := v_0 22566 y := v_1 22567 if !(!shiftIsBounded(v)) { 22568 break 22569 } 22570 v.reset(OpARM64SRA) 22571 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 22572 v0.AddArg(x) 22573 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 22574 v1.AuxInt = opToAuxInt(OpARM64LessThanU) 22575 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 22576 v2.AuxInt = int64ToAuxInt(63) 22577 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22578 v3.AuxInt = int64ToAuxInt(64) 22579 v3.AddArg(y) 22580 v1.AddArg3(y, v2, v3) 22581 v.AddArg2(v0, v1) 22582 return true 22583 } 22584 return false 22585 } 22586 func rewriteValueARM64_OpRsh32x8(v *Value) bool { 22587 v_1 := v.Args[1] 22588 v_0 := v.Args[0] 22589 b := v.Block 22590 typ := &b.Func.Config.Types 22591 // match: (Rsh32x8 <t> x y) 22592 // cond: shiftIsBounded(v) 22593 // result: (SRA <t> (SignExt32to64 x) y) 22594 for { 22595 t := v.Type 22596 x := v_0 22597 y := v_1 22598 if !(shiftIsBounded(v)) { 22599 break 22600 } 22601 v.reset(OpARM64SRA) 22602 v.Type = t 22603 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 22604 v0.AddArg(x) 22605 v.AddArg2(v0, y) 22606 return true 22607 } 22608 // match: (Rsh32x8 x y) 22609 // cond: !shiftIsBounded(v) 22610 // result: (SRA (SignExt32to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 22611 for { 22612 x := v_0 22613 y := v_1 22614 if !(!shiftIsBounded(v)) { 22615 break 22616 } 22617 v.reset(OpARM64SRA) 22618 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 22619 v0.AddArg(x) 22620 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 22621 v1.AuxInt = opToAuxInt(OpARM64LessThanU) 22622 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 22623 v2.AuxInt = int64ToAuxInt(63) 22624 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22625 v3.AuxInt = int64ToAuxInt(64) 22626 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 22627 v4.AddArg(y) 22628 v3.AddArg(v4) 22629 v1.AddArg3(y, v2, v3) 22630 v.AddArg2(v0, v1) 22631 return true 22632 } 22633 return false 22634 } 22635 func rewriteValueARM64_OpRsh64Ux16(v *Value) bool { 22636 v_1 := v.Args[1] 22637 v_0 := v.Args[0] 22638 b := v.Block 22639 typ := &b.Func.Config.Types 22640 // match: (Rsh64Ux16 <t> x y) 22641 // cond: shiftIsBounded(v) 22642 // result: (SRL <t> x y) 22643 for { 22644 t := v.Type 22645 x := v_0 22646 y := v_1 22647 if !(shiftIsBounded(v)) { 22648 break 22649 } 22650 v.reset(OpARM64SRL) 22651 v.Type = t 22652 v.AddArg2(x, y) 22653 return true 22654 } 22655 // match: (Rsh64Ux16 <t> x y) 22656 // cond: !shiftIsBounded(v) 22657 // result: (CSEL [OpARM64LessThanU] (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 22658 for { 22659 t := v.Type 22660 x := v_0 22661 y := v_1 22662 if !(!shiftIsBounded(v)) { 22663 break 22664 } 22665 v.reset(OpARM64CSEL) 22666 v.AuxInt = opToAuxInt(OpARM64LessThanU) 22667 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 22668 v0.AddArg2(x, y) 22669 v1 := b.NewValue0(v.Pos, OpConst64, t) 22670 v1.AuxInt = int64ToAuxInt(0) 22671 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22672 v2.AuxInt = int64ToAuxInt(64) 22673 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 22674 v3.AddArg(y) 22675 v2.AddArg(v3) 22676 v.AddArg3(v0, v1, v2) 22677 return true 22678 } 22679 return false 22680 } 22681 func rewriteValueARM64_OpRsh64Ux32(v *Value) bool { 22682 v_1 := v.Args[1] 22683 v_0 := v.Args[0] 22684 b := v.Block 22685 typ := &b.Func.Config.Types 22686 // match: (Rsh64Ux32 <t> x y) 22687 // cond: shiftIsBounded(v) 22688 // result: (SRL <t> x y) 22689 for { 22690 t := v.Type 22691 x := v_0 22692 y := v_1 22693 if !(shiftIsBounded(v)) { 22694 break 22695 } 22696 v.reset(OpARM64SRL) 22697 v.Type = t 22698 v.AddArg2(x, y) 22699 return true 22700 } 22701 // match: (Rsh64Ux32 <t> x y) 22702 // cond: !shiftIsBounded(v) 22703 // result: (CSEL [OpARM64LessThanU] (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 22704 for { 22705 t := v.Type 22706 x := v_0 22707 y := v_1 22708 if !(!shiftIsBounded(v)) { 22709 break 22710 } 22711 v.reset(OpARM64CSEL) 22712 v.AuxInt = opToAuxInt(OpARM64LessThanU) 22713 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 22714 v0.AddArg2(x, y) 22715 v1 := b.NewValue0(v.Pos, OpConst64, t) 22716 v1.AuxInt = int64ToAuxInt(0) 22717 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22718 v2.AuxInt = int64ToAuxInt(64) 22719 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 22720 v3.AddArg(y) 22721 v2.AddArg(v3) 22722 v.AddArg3(v0, v1, v2) 22723 return true 22724 } 22725 return false 22726 } 22727 func rewriteValueARM64_OpRsh64Ux64(v *Value) bool { 22728 v_1 := v.Args[1] 22729 v_0 := v.Args[0] 22730 b := v.Block 22731 // match: (Rsh64Ux64 <t> x y) 22732 // cond: shiftIsBounded(v) 22733 // result: (SRL <t> x y) 22734 for { 22735 t := v.Type 22736 x := v_0 22737 y := v_1 22738 if !(shiftIsBounded(v)) { 22739 break 22740 } 22741 v.reset(OpARM64SRL) 22742 v.Type = t 22743 v.AddArg2(x, y) 22744 return true 22745 } 22746 // match: (Rsh64Ux64 <t> x y) 22747 // cond: !shiftIsBounded(v) 22748 // result: (CSEL [OpARM64LessThanU] (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 22749 for { 22750 t := v.Type 22751 x := v_0 22752 y := v_1 22753 if !(!shiftIsBounded(v)) { 22754 break 22755 } 22756 v.reset(OpARM64CSEL) 22757 v.AuxInt = opToAuxInt(OpARM64LessThanU) 22758 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 22759 v0.AddArg2(x, y) 22760 v1 := b.NewValue0(v.Pos, OpConst64, t) 22761 v1.AuxInt = int64ToAuxInt(0) 22762 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22763 v2.AuxInt = int64ToAuxInt(64) 22764 v2.AddArg(y) 22765 v.AddArg3(v0, v1, v2) 22766 return true 22767 } 22768 return false 22769 } 22770 func rewriteValueARM64_OpRsh64Ux8(v *Value) bool { 22771 v_1 := v.Args[1] 22772 v_0 := v.Args[0] 22773 b := v.Block 22774 typ := &b.Func.Config.Types 22775 // match: (Rsh64Ux8 <t> x y) 22776 // cond: shiftIsBounded(v) 22777 // result: (SRL <t> x y) 22778 for { 22779 t := v.Type 22780 x := v_0 22781 y := v_1 22782 if !(shiftIsBounded(v)) { 22783 break 22784 } 22785 v.reset(OpARM64SRL) 22786 v.Type = t 22787 v.AddArg2(x, y) 22788 return true 22789 } 22790 // match: (Rsh64Ux8 <t> x y) 22791 // cond: !shiftIsBounded(v) 22792 // result: (CSEL [OpARM64LessThanU] (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 22793 for { 22794 t := v.Type 22795 x := v_0 22796 y := v_1 22797 if !(!shiftIsBounded(v)) { 22798 break 22799 } 22800 v.reset(OpARM64CSEL) 22801 v.AuxInt = opToAuxInt(OpARM64LessThanU) 22802 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 22803 v0.AddArg2(x, y) 22804 v1 := b.NewValue0(v.Pos, OpConst64, t) 22805 v1.AuxInt = int64ToAuxInt(0) 22806 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22807 v2.AuxInt = int64ToAuxInt(64) 22808 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 22809 v3.AddArg(y) 22810 v2.AddArg(v3) 22811 v.AddArg3(v0, v1, v2) 22812 return true 22813 } 22814 return false 22815 } 22816 func rewriteValueARM64_OpRsh64x16(v *Value) bool { 22817 v_1 := v.Args[1] 22818 v_0 := v.Args[0] 22819 b := v.Block 22820 typ := &b.Func.Config.Types 22821 // match: (Rsh64x16 <t> x y) 22822 // cond: shiftIsBounded(v) 22823 // result: (SRA <t> x y) 22824 for { 22825 t := v.Type 22826 x := v_0 22827 y := v_1 22828 if !(shiftIsBounded(v)) { 22829 break 22830 } 22831 v.reset(OpARM64SRA) 22832 v.Type = t 22833 v.AddArg2(x, y) 22834 return true 22835 } 22836 // match: (Rsh64x16 x y) 22837 // cond: !shiftIsBounded(v) 22838 // result: (SRA x (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 22839 for { 22840 x := v_0 22841 y := v_1 22842 if !(!shiftIsBounded(v)) { 22843 break 22844 } 22845 v.reset(OpARM64SRA) 22846 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 22847 v0.AuxInt = opToAuxInt(OpARM64LessThanU) 22848 v1 := b.NewValue0(v.Pos, OpConst64, y.Type) 22849 v1.AuxInt = int64ToAuxInt(63) 22850 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22851 v2.AuxInt = int64ToAuxInt(64) 22852 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 22853 v3.AddArg(y) 22854 v2.AddArg(v3) 22855 v0.AddArg3(y, v1, v2) 22856 v.AddArg2(x, v0) 22857 return true 22858 } 22859 return false 22860 } 22861 func rewriteValueARM64_OpRsh64x32(v *Value) bool { 22862 v_1 := v.Args[1] 22863 v_0 := v.Args[0] 22864 b := v.Block 22865 typ := &b.Func.Config.Types 22866 // match: (Rsh64x32 <t> x y) 22867 // cond: shiftIsBounded(v) 22868 // result: (SRA <t> x y) 22869 for { 22870 t := v.Type 22871 x := v_0 22872 y := v_1 22873 if !(shiftIsBounded(v)) { 22874 break 22875 } 22876 v.reset(OpARM64SRA) 22877 v.Type = t 22878 v.AddArg2(x, y) 22879 return true 22880 } 22881 // match: (Rsh64x32 x y) 22882 // cond: !shiftIsBounded(v) 22883 // result: (SRA x (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 22884 for { 22885 x := v_0 22886 y := v_1 22887 if !(!shiftIsBounded(v)) { 22888 break 22889 } 22890 v.reset(OpARM64SRA) 22891 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 22892 v0.AuxInt = opToAuxInt(OpARM64LessThanU) 22893 v1 := b.NewValue0(v.Pos, OpConst64, y.Type) 22894 v1.AuxInt = int64ToAuxInt(63) 22895 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22896 v2.AuxInt = int64ToAuxInt(64) 22897 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 22898 v3.AddArg(y) 22899 v2.AddArg(v3) 22900 v0.AddArg3(y, v1, v2) 22901 v.AddArg2(x, v0) 22902 return true 22903 } 22904 return false 22905 } 22906 func rewriteValueARM64_OpRsh64x64(v *Value) bool { 22907 v_1 := v.Args[1] 22908 v_0 := v.Args[0] 22909 b := v.Block 22910 // match: (Rsh64x64 <t> x y) 22911 // cond: shiftIsBounded(v) 22912 // result: (SRA <t> x y) 22913 for { 22914 t := v.Type 22915 x := v_0 22916 y := v_1 22917 if !(shiftIsBounded(v)) { 22918 break 22919 } 22920 v.reset(OpARM64SRA) 22921 v.Type = t 22922 v.AddArg2(x, y) 22923 return true 22924 } 22925 // match: (Rsh64x64 x y) 22926 // cond: !shiftIsBounded(v) 22927 // result: (SRA x (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 22928 for { 22929 x := v_0 22930 y := v_1 22931 if !(!shiftIsBounded(v)) { 22932 break 22933 } 22934 v.reset(OpARM64SRA) 22935 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 22936 v0.AuxInt = opToAuxInt(OpARM64LessThanU) 22937 v1 := b.NewValue0(v.Pos, OpConst64, y.Type) 22938 v1.AuxInt = int64ToAuxInt(63) 22939 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22940 v2.AuxInt = int64ToAuxInt(64) 22941 v2.AddArg(y) 22942 v0.AddArg3(y, v1, v2) 22943 v.AddArg2(x, v0) 22944 return true 22945 } 22946 return false 22947 } 22948 func rewriteValueARM64_OpRsh64x8(v *Value) bool { 22949 v_1 := v.Args[1] 22950 v_0 := v.Args[0] 22951 b := v.Block 22952 typ := &b.Func.Config.Types 22953 // match: (Rsh64x8 <t> x y) 22954 // cond: shiftIsBounded(v) 22955 // result: (SRA <t> x y) 22956 for { 22957 t := v.Type 22958 x := v_0 22959 y := v_1 22960 if !(shiftIsBounded(v)) { 22961 break 22962 } 22963 v.reset(OpARM64SRA) 22964 v.Type = t 22965 v.AddArg2(x, y) 22966 return true 22967 } 22968 // match: (Rsh64x8 x y) 22969 // cond: !shiftIsBounded(v) 22970 // result: (SRA x (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 22971 for { 22972 x := v_0 22973 y := v_1 22974 if !(!shiftIsBounded(v)) { 22975 break 22976 } 22977 v.reset(OpARM64SRA) 22978 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 22979 v0.AuxInt = opToAuxInt(OpARM64LessThanU) 22980 v1 := b.NewValue0(v.Pos, OpConst64, y.Type) 22981 v1.AuxInt = int64ToAuxInt(63) 22982 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 22983 v2.AuxInt = int64ToAuxInt(64) 22984 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 22985 v3.AddArg(y) 22986 v2.AddArg(v3) 22987 v0.AddArg3(y, v1, v2) 22988 v.AddArg2(x, v0) 22989 return true 22990 } 22991 return false 22992 } 22993 func rewriteValueARM64_OpRsh8Ux16(v *Value) bool { 22994 v_1 := v.Args[1] 22995 v_0 := v.Args[0] 22996 b := v.Block 22997 typ := &b.Func.Config.Types 22998 // match: (Rsh8Ux16 <t> x y) 22999 // cond: shiftIsBounded(v) 23000 // result: (SRL <t> (ZeroExt8to64 x) y) 23001 for { 23002 t := v.Type 23003 x := v_0 23004 y := v_1 23005 if !(shiftIsBounded(v)) { 23006 break 23007 } 23008 v.reset(OpARM64SRL) 23009 v.Type = t 23010 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 23011 v0.AddArg(x) 23012 v.AddArg2(v0, y) 23013 return true 23014 } 23015 // match: (Rsh8Ux16 <t> x y) 23016 // cond: !shiftIsBounded(v) 23017 // result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 23018 for { 23019 t := v.Type 23020 x := v_0 23021 y := v_1 23022 if !(!shiftIsBounded(v)) { 23023 break 23024 } 23025 v.reset(OpARM64CSEL) 23026 v.AuxInt = opToAuxInt(OpARM64LessThanU) 23027 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 23028 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 23029 v1.AddArg(x) 23030 v0.AddArg2(v1, y) 23031 v2 := b.NewValue0(v.Pos, OpConst64, t) 23032 v2.AuxInt = int64ToAuxInt(0) 23033 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 23034 v3.AuxInt = int64ToAuxInt(64) 23035 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 23036 v4.AddArg(y) 23037 v3.AddArg(v4) 23038 v.AddArg3(v0, v2, v3) 23039 return true 23040 } 23041 return false 23042 } 23043 func rewriteValueARM64_OpRsh8Ux32(v *Value) bool { 23044 v_1 := v.Args[1] 23045 v_0 := v.Args[0] 23046 b := v.Block 23047 typ := &b.Func.Config.Types 23048 // match: (Rsh8Ux32 <t> x y) 23049 // cond: shiftIsBounded(v) 23050 // result: (SRL <t> (ZeroExt8to64 x) y) 23051 for { 23052 t := v.Type 23053 x := v_0 23054 y := v_1 23055 if !(shiftIsBounded(v)) { 23056 break 23057 } 23058 v.reset(OpARM64SRL) 23059 v.Type = t 23060 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 23061 v0.AddArg(x) 23062 v.AddArg2(v0, y) 23063 return true 23064 } 23065 // match: (Rsh8Ux32 <t> x y) 23066 // cond: !shiftIsBounded(v) 23067 // result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 23068 for { 23069 t := v.Type 23070 x := v_0 23071 y := v_1 23072 if !(!shiftIsBounded(v)) { 23073 break 23074 } 23075 v.reset(OpARM64CSEL) 23076 v.AuxInt = opToAuxInt(OpARM64LessThanU) 23077 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 23078 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 23079 v1.AddArg(x) 23080 v0.AddArg2(v1, y) 23081 v2 := b.NewValue0(v.Pos, OpConst64, t) 23082 v2.AuxInt = int64ToAuxInt(0) 23083 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 23084 v3.AuxInt = int64ToAuxInt(64) 23085 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 23086 v4.AddArg(y) 23087 v3.AddArg(v4) 23088 v.AddArg3(v0, v2, v3) 23089 return true 23090 } 23091 return false 23092 } 23093 func rewriteValueARM64_OpRsh8Ux64(v *Value) bool { 23094 v_1 := v.Args[1] 23095 v_0 := v.Args[0] 23096 b := v.Block 23097 typ := &b.Func.Config.Types 23098 // match: (Rsh8Ux64 <t> x y) 23099 // cond: shiftIsBounded(v) 23100 // result: (SRL <t> (ZeroExt8to64 x) y) 23101 for { 23102 t := v.Type 23103 x := v_0 23104 y := v_1 23105 if !(shiftIsBounded(v)) { 23106 break 23107 } 23108 v.reset(OpARM64SRL) 23109 v.Type = t 23110 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 23111 v0.AddArg(x) 23112 v.AddArg2(v0, y) 23113 return true 23114 } 23115 // match: (Rsh8Ux64 <t> x y) 23116 // cond: !shiftIsBounded(v) 23117 // result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 23118 for { 23119 t := v.Type 23120 x := v_0 23121 y := v_1 23122 if !(!shiftIsBounded(v)) { 23123 break 23124 } 23125 v.reset(OpARM64CSEL) 23126 v.AuxInt = opToAuxInt(OpARM64LessThanU) 23127 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 23128 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 23129 v1.AddArg(x) 23130 v0.AddArg2(v1, y) 23131 v2 := b.NewValue0(v.Pos, OpConst64, t) 23132 v2.AuxInt = int64ToAuxInt(0) 23133 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 23134 v3.AuxInt = int64ToAuxInt(64) 23135 v3.AddArg(y) 23136 v.AddArg3(v0, v2, v3) 23137 return true 23138 } 23139 return false 23140 } 23141 func rewriteValueARM64_OpRsh8Ux8(v *Value) bool { 23142 v_1 := v.Args[1] 23143 v_0 := v.Args[0] 23144 b := v.Block 23145 typ := &b.Func.Config.Types 23146 // match: (Rsh8Ux8 <t> x y) 23147 // cond: shiftIsBounded(v) 23148 // result: (SRL <t> (ZeroExt8to64 x) y) 23149 for { 23150 t := v.Type 23151 x := v_0 23152 y := v_1 23153 if !(shiftIsBounded(v)) { 23154 break 23155 } 23156 v.reset(OpARM64SRL) 23157 v.Type = t 23158 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 23159 v0.AddArg(x) 23160 v.AddArg2(v0, y) 23161 return true 23162 } 23163 // match: (Rsh8Ux8 <t> x y) 23164 // cond: !shiftIsBounded(v) 23165 // result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 23166 for { 23167 t := v.Type 23168 x := v_0 23169 y := v_1 23170 if !(!shiftIsBounded(v)) { 23171 break 23172 } 23173 v.reset(OpARM64CSEL) 23174 v.AuxInt = opToAuxInt(OpARM64LessThanU) 23175 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 23176 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 23177 v1.AddArg(x) 23178 v0.AddArg2(v1, y) 23179 v2 := b.NewValue0(v.Pos, OpConst64, t) 23180 v2.AuxInt = int64ToAuxInt(0) 23181 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 23182 v3.AuxInt = int64ToAuxInt(64) 23183 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 23184 v4.AddArg(y) 23185 v3.AddArg(v4) 23186 v.AddArg3(v0, v2, v3) 23187 return true 23188 } 23189 return false 23190 } 23191 func rewriteValueARM64_OpRsh8x16(v *Value) bool { 23192 v_1 := v.Args[1] 23193 v_0 := v.Args[0] 23194 b := v.Block 23195 typ := &b.Func.Config.Types 23196 // match: (Rsh8x16 <t> x y) 23197 // cond: shiftIsBounded(v) 23198 // result: (SRA <t> (SignExt8to64 x) y) 23199 for { 23200 t := v.Type 23201 x := v_0 23202 y := v_1 23203 if !(shiftIsBounded(v)) { 23204 break 23205 } 23206 v.reset(OpARM64SRA) 23207 v.Type = t 23208 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 23209 v0.AddArg(x) 23210 v.AddArg2(v0, y) 23211 return true 23212 } 23213 // match: (Rsh8x16 x y) 23214 // cond: !shiftIsBounded(v) 23215 // result: (SRA (SignExt8to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 23216 for { 23217 x := v_0 23218 y := v_1 23219 if !(!shiftIsBounded(v)) { 23220 break 23221 } 23222 v.reset(OpARM64SRA) 23223 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 23224 v0.AddArg(x) 23225 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 23226 v1.AuxInt = opToAuxInt(OpARM64LessThanU) 23227 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 23228 v2.AuxInt = int64ToAuxInt(63) 23229 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 23230 v3.AuxInt = int64ToAuxInt(64) 23231 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 23232 v4.AddArg(y) 23233 v3.AddArg(v4) 23234 v1.AddArg3(y, v2, v3) 23235 v.AddArg2(v0, v1) 23236 return true 23237 } 23238 return false 23239 } 23240 func rewriteValueARM64_OpRsh8x32(v *Value) bool { 23241 v_1 := v.Args[1] 23242 v_0 := v.Args[0] 23243 b := v.Block 23244 typ := &b.Func.Config.Types 23245 // match: (Rsh8x32 <t> x y) 23246 // cond: shiftIsBounded(v) 23247 // result: (SRA <t> (SignExt8to64 x) y) 23248 for { 23249 t := v.Type 23250 x := v_0 23251 y := v_1 23252 if !(shiftIsBounded(v)) { 23253 break 23254 } 23255 v.reset(OpARM64SRA) 23256 v.Type = t 23257 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 23258 v0.AddArg(x) 23259 v.AddArg2(v0, y) 23260 return true 23261 } 23262 // match: (Rsh8x32 x y) 23263 // cond: !shiftIsBounded(v) 23264 // result: (SRA (SignExt8to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 23265 for { 23266 x := v_0 23267 y := v_1 23268 if !(!shiftIsBounded(v)) { 23269 break 23270 } 23271 v.reset(OpARM64SRA) 23272 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 23273 v0.AddArg(x) 23274 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 23275 v1.AuxInt = opToAuxInt(OpARM64LessThanU) 23276 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 23277 v2.AuxInt = int64ToAuxInt(63) 23278 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 23279 v3.AuxInt = int64ToAuxInt(64) 23280 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 23281 v4.AddArg(y) 23282 v3.AddArg(v4) 23283 v1.AddArg3(y, v2, v3) 23284 v.AddArg2(v0, v1) 23285 return true 23286 } 23287 return false 23288 } 23289 func rewriteValueARM64_OpRsh8x64(v *Value) bool { 23290 v_1 := v.Args[1] 23291 v_0 := v.Args[0] 23292 b := v.Block 23293 typ := &b.Func.Config.Types 23294 // match: (Rsh8x64 <t> x y) 23295 // cond: shiftIsBounded(v) 23296 // result: (SRA <t> (SignExt8to64 x) y) 23297 for { 23298 t := v.Type 23299 x := v_0 23300 y := v_1 23301 if !(shiftIsBounded(v)) { 23302 break 23303 } 23304 v.reset(OpARM64SRA) 23305 v.Type = t 23306 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 23307 v0.AddArg(x) 23308 v.AddArg2(v0, y) 23309 return true 23310 } 23311 // match: (Rsh8x64 x y) 23312 // cond: !shiftIsBounded(v) 23313 // result: (SRA (SignExt8to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 23314 for { 23315 x := v_0 23316 y := v_1 23317 if !(!shiftIsBounded(v)) { 23318 break 23319 } 23320 v.reset(OpARM64SRA) 23321 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 23322 v0.AddArg(x) 23323 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 23324 v1.AuxInt = opToAuxInt(OpARM64LessThanU) 23325 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 23326 v2.AuxInt = int64ToAuxInt(63) 23327 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 23328 v3.AuxInt = int64ToAuxInt(64) 23329 v3.AddArg(y) 23330 v1.AddArg3(y, v2, v3) 23331 v.AddArg2(v0, v1) 23332 return true 23333 } 23334 return false 23335 } 23336 func rewriteValueARM64_OpRsh8x8(v *Value) bool { 23337 v_1 := v.Args[1] 23338 v_0 := v.Args[0] 23339 b := v.Block 23340 typ := &b.Func.Config.Types 23341 // match: (Rsh8x8 <t> x y) 23342 // cond: shiftIsBounded(v) 23343 // result: (SRA <t> (SignExt8to64 x) y) 23344 for { 23345 t := v.Type 23346 x := v_0 23347 y := v_1 23348 if !(shiftIsBounded(v)) { 23349 break 23350 } 23351 v.reset(OpARM64SRA) 23352 v.Type = t 23353 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 23354 v0.AddArg(x) 23355 v.AddArg2(v0, y) 23356 return true 23357 } 23358 // match: (Rsh8x8 x y) 23359 // cond: !shiftIsBounded(v) 23360 // result: (SRA (SignExt8to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 23361 for { 23362 x := v_0 23363 y := v_1 23364 if !(!shiftIsBounded(v)) { 23365 break 23366 } 23367 v.reset(OpARM64SRA) 23368 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 23369 v0.AddArg(x) 23370 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 23371 v1.AuxInt = opToAuxInt(OpARM64LessThanU) 23372 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 23373 v2.AuxInt = int64ToAuxInt(63) 23374 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 23375 v3.AuxInt = int64ToAuxInt(64) 23376 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 23377 v4.AddArg(y) 23378 v3.AddArg(v4) 23379 v1.AddArg3(y, v2, v3) 23380 v.AddArg2(v0, v1) 23381 return true 23382 } 23383 return false 23384 } 23385 func rewriteValueARM64_OpSelect0(v *Value) bool { 23386 v_0 := v.Args[0] 23387 b := v.Block 23388 typ := &b.Func.Config.Types 23389 // match: (Select0 (Mul64uhilo x y)) 23390 // result: (UMULH x y) 23391 for { 23392 if v_0.Op != OpMul64uhilo { 23393 break 23394 } 23395 y := v_0.Args[1] 23396 x := v_0.Args[0] 23397 v.reset(OpARM64UMULH) 23398 v.AddArg2(x, y) 23399 return true 23400 } 23401 // match: (Select0 (Add64carry x y c)) 23402 // result: (Select0 <typ.UInt64> (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] c)))) 23403 for { 23404 if v_0.Op != OpAdd64carry { 23405 break 23406 } 23407 c := v_0.Args[2] 23408 x := v_0.Args[0] 23409 y := v_0.Args[1] 23410 v.reset(OpSelect0) 23411 v.Type = typ.UInt64 23412 v0 := b.NewValue0(v.Pos, OpARM64ADCSflags, types.NewTuple(typ.UInt64, types.TypeFlags)) 23413 v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags) 23414 v2 := b.NewValue0(v.Pos, OpARM64ADDSconstflags, types.NewTuple(typ.UInt64, types.TypeFlags)) 23415 v2.AuxInt = int64ToAuxInt(-1) 23416 v2.AddArg(c) 23417 v1.AddArg(v2) 23418 v0.AddArg3(x, y, v1) 23419 v.AddArg(v0) 23420 return true 23421 } 23422 // match: (Select0 (Sub64borrow x y bo)) 23423 // result: (Select0 <typ.UInt64> (SBCSflags x y (Select1 <types.TypeFlags> (NEGSflags bo)))) 23424 for { 23425 if v_0.Op != OpSub64borrow { 23426 break 23427 } 23428 bo := v_0.Args[2] 23429 x := v_0.Args[0] 23430 y := v_0.Args[1] 23431 v.reset(OpSelect0) 23432 v.Type = typ.UInt64 23433 v0 := b.NewValue0(v.Pos, OpARM64SBCSflags, types.NewTuple(typ.UInt64, types.TypeFlags)) 23434 v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags) 23435 v2 := b.NewValue0(v.Pos, OpARM64NEGSflags, types.NewTuple(typ.UInt64, types.TypeFlags)) 23436 v2.AddArg(bo) 23437 v1.AddArg(v2) 23438 v0.AddArg3(x, y, v1) 23439 v.AddArg(v0) 23440 return true 23441 } 23442 // match: (Select0 (Mul64uover x y)) 23443 // result: (MUL x y) 23444 for { 23445 if v_0.Op != OpMul64uover { 23446 break 23447 } 23448 y := v_0.Args[1] 23449 x := v_0.Args[0] 23450 v.reset(OpARM64MUL) 23451 v.AddArg2(x, y) 23452 return true 23453 } 23454 return false 23455 } 23456 func rewriteValueARM64_OpSelect1(v *Value) bool { 23457 v_0 := v.Args[0] 23458 b := v.Block 23459 typ := &b.Func.Config.Types 23460 // match: (Select1 (Mul64uhilo x y)) 23461 // result: (MUL x y) 23462 for { 23463 if v_0.Op != OpMul64uhilo { 23464 break 23465 } 23466 y := v_0.Args[1] 23467 x := v_0.Args[0] 23468 v.reset(OpARM64MUL) 23469 v.AddArg2(x, y) 23470 return true 23471 } 23472 // match: (Select1 (Add64carry x y c)) 23473 // result: (ADCzerocarry <typ.UInt64> (Select1 <types.TypeFlags> (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] c))))) 23474 for { 23475 if v_0.Op != OpAdd64carry { 23476 break 23477 } 23478 c := v_0.Args[2] 23479 x := v_0.Args[0] 23480 y := v_0.Args[1] 23481 v.reset(OpARM64ADCzerocarry) 23482 v.Type = typ.UInt64 23483 v0 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags) 23484 v1 := b.NewValue0(v.Pos, OpARM64ADCSflags, types.NewTuple(typ.UInt64, types.TypeFlags)) 23485 v2 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags) 23486 v3 := b.NewValue0(v.Pos, OpARM64ADDSconstflags, types.NewTuple(typ.UInt64, types.TypeFlags)) 23487 v3.AuxInt = int64ToAuxInt(-1) 23488 v3.AddArg(c) 23489 v2.AddArg(v3) 23490 v1.AddArg3(x, y, v2) 23491 v0.AddArg(v1) 23492 v.AddArg(v0) 23493 return true 23494 } 23495 // match: (Select1 (Sub64borrow x y bo)) 23496 // result: (NEG <typ.UInt64> (NGCzerocarry <typ.UInt64> (Select1 <types.TypeFlags> (SBCSflags x y (Select1 <types.TypeFlags> (NEGSflags bo)))))) 23497 for { 23498 if v_0.Op != OpSub64borrow { 23499 break 23500 } 23501 bo := v_0.Args[2] 23502 x := v_0.Args[0] 23503 y := v_0.Args[1] 23504 v.reset(OpARM64NEG) 23505 v.Type = typ.UInt64 23506 v0 := b.NewValue0(v.Pos, OpARM64NGCzerocarry, typ.UInt64) 23507 v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags) 23508 v2 := b.NewValue0(v.Pos, OpARM64SBCSflags, types.NewTuple(typ.UInt64, types.TypeFlags)) 23509 v3 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags) 23510 v4 := b.NewValue0(v.Pos, OpARM64NEGSflags, types.NewTuple(typ.UInt64, types.TypeFlags)) 23511 v4.AddArg(bo) 23512 v3.AddArg(v4) 23513 v2.AddArg3(x, y, v3) 23514 v1.AddArg(v2) 23515 v0.AddArg(v1) 23516 v.AddArg(v0) 23517 return true 23518 } 23519 // match: (Select1 (Mul64uover x y)) 23520 // result: (NotEqual (CMPconst (UMULH <typ.UInt64> x y) [0])) 23521 for { 23522 if v_0.Op != OpMul64uover { 23523 break 23524 } 23525 y := v_0.Args[1] 23526 x := v_0.Args[0] 23527 v.reset(OpARM64NotEqual) 23528 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 23529 v0.AuxInt = int64ToAuxInt(0) 23530 v1 := b.NewValue0(v.Pos, OpARM64UMULH, typ.UInt64) 23531 v1.AddArg2(x, y) 23532 v0.AddArg(v1) 23533 v.AddArg(v0) 23534 return true 23535 } 23536 return false 23537 } 23538 func rewriteValueARM64_OpSelectN(v *Value) bool { 23539 v_0 := v.Args[0] 23540 b := v.Block 23541 config := b.Func.Config 23542 // match: (SelectN [0] call:(CALLstatic {sym} s1:(MOVDstore _ (MOVDconst [sz]) s2:(MOVDstore _ src s3:(MOVDstore {t} _ dst mem))))) 23543 // cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(s1, s2, s3, call) 23544 // result: (Move [sz] dst src mem) 23545 for { 23546 if auxIntToInt64(v.AuxInt) != 0 { 23547 break 23548 } 23549 call := v_0 23550 if call.Op != OpARM64CALLstatic || len(call.Args) != 1 { 23551 break 23552 } 23553 sym := auxToCall(call.Aux) 23554 s1 := call.Args[0] 23555 if s1.Op != OpARM64MOVDstore { 23556 break 23557 } 23558 _ = s1.Args[2] 23559 s1_1 := s1.Args[1] 23560 if s1_1.Op != OpARM64MOVDconst { 23561 break 23562 } 23563 sz := auxIntToInt64(s1_1.AuxInt) 23564 s2 := s1.Args[2] 23565 if s2.Op != OpARM64MOVDstore { 23566 break 23567 } 23568 _ = s2.Args[2] 23569 src := s2.Args[1] 23570 s3 := s2.Args[2] 23571 if s3.Op != OpARM64MOVDstore { 23572 break 23573 } 23574 mem := s3.Args[2] 23575 dst := s3.Args[1] 23576 if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(s1, s2, s3, call)) { 23577 break 23578 } 23579 v.reset(OpMove) 23580 v.AuxInt = int64ToAuxInt(sz) 23581 v.AddArg3(dst, src, mem) 23582 return true 23583 } 23584 // match: (SelectN [0] call:(CALLstatic {sym} dst src (MOVDconst [sz]) mem)) 23585 // cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call) 23586 // result: (Move [sz] dst src mem) 23587 for { 23588 if auxIntToInt64(v.AuxInt) != 0 { 23589 break 23590 } 23591 call := v_0 23592 if call.Op != OpARM64CALLstatic || len(call.Args) != 4 { 23593 break 23594 } 23595 sym := auxToCall(call.Aux) 23596 mem := call.Args[3] 23597 dst := call.Args[0] 23598 src := call.Args[1] 23599 call_2 := call.Args[2] 23600 if call_2.Op != OpARM64MOVDconst { 23601 break 23602 } 23603 sz := auxIntToInt64(call_2.AuxInt) 23604 if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)) { 23605 break 23606 } 23607 v.reset(OpMove) 23608 v.AuxInt = int64ToAuxInt(sz) 23609 v.AddArg3(dst, src, mem) 23610 return true 23611 } 23612 return false 23613 } 23614 func rewriteValueARM64_OpSlicemask(v *Value) bool { 23615 v_0 := v.Args[0] 23616 b := v.Block 23617 // match: (Slicemask <t> x) 23618 // result: (SRAconst (NEG <t> x) [63]) 23619 for { 23620 t := v.Type 23621 x := v_0 23622 v.reset(OpARM64SRAconst) 23623 v.AuxInt = int64ToAuxInt(63) 23624 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 23625 v0.AddArg(x) 23626 v.AddArg(v0) 23627 return true 23628 } 23629 } 23630 func rewriteValueARM64_OpStore(v *Value) bool { 23631 v_2 := v.Args[2] 23632 v_1 := v.Args[1] 23633 v_0 := v.Args[0] 23634 // match: (Store {t} ptr val mem) 23635 // cond: t.Size() == 1 23636 // result: (MOVBstore ptr val mem) 23637 for { 23638 t := auxToType(v.Aux) 23639 ptr := v_0 23640 val := v_1 23641 mem := v_2 23642 if !(t.Size() == 1) { 23643 break 23644 } 23645 v.reset(OpARM64MOVBstore) 23646 v.AddArg3(ptr, val, mem) 23647 return true 23648 } 23649 // match: (Store {t} ptr val mem) 23650 // cond: t.Size() == 2 23651 // result: (MOVHstore ptr val mem) 23652 for { 23653 t := auxToType(v.Aux) 23654 ptr := v_0 23655 val := v_1 23656 mem := v_2 23657 if !(t.Size() == 2) { 23658 break 23659 } 23660 v.reset(OpARM64MOVHstore) 23661 v.AddArg3(ptr, val, mem) 23662 return true 23663 } 23664 // match: (Store {t} ptr val mem) 23665 // cond: t.Size() == 4 && !t.IsFloat() 23666 // result: (MOVWstore ptr val mem) 23667 for { 23668 t := auxToType(v.Aux) 23669 ptr := v_0 23670 val := v_1 23671 mem := v_2 23672 if !(t.Size() == 4 && !t.IsFloat()) { 23673 break 23674 } 23675 v.reset(OpARM64MOVWstore) 23676 v.AddArg3(ptr, val, mem) 23677 return true 23678 } 23679 // match: (Store {t} ptr val mem) 23680 // cond: t.Size() == 8 && !t.IsFloat() 23681 // result: (MOVDstore ptr val mem) 23682 for { 23683 t := auxToType(v.Aux) 23684 ptr := v_0 23685 val := v_1 23686 mem := v_2 23687 if !(t.Size() == 8 && !t.IsFloat()) { 23688 break 23689 } 23690 v.reset(OpARM64MOVDstore) 23691 v.AddArg3(ptr, val, mem) 23692 return true 23693 } 23694 // match: (Store {t} ptr val mem) 23695 // cond: t.Size() == 4 && t.IsFloat() 23696 // result: (FMOVSstore ptr val mem) 23697 for { 23698 t := auxToType(v.Aux) 23699 ptr := v_0 23700 val := v_1 23701 mem := v_2 23702 if !(t.Size() == 4 && t.IsFloat()) { 23703 break 23704 } 23705 v.reset(OpARM64FMOVSstore) 23706 v.AddArg3(ptr, val, mem) 23707 return true 23708 } 23709 // match: (Store {t} ptr val mem) 23710 // cond: t.Size() == 8 && t.IsFloat() 23711 // result: (FMOVDstore ptr val mem) 23712 for { 23713 t := auxToType(v.Aux) 23714 ptr := v_0 23715 val := v_1 23716 mem := v_2 23717 if !(t.Size() == 8 && t.IsFloat()) { 23718 break 23719 } 23720 v.reset(OpARM64FMOVDstore) 23721 v.AddArg3(ptr, val, mem) 23722 return true 23723 } 23724 return false 23725 } 23726 func rewriteValueARM64_OpZero(v *Value) bool { 23727 v_1 := v.Args[1] 23728 v_0 := v.Args[0] 23729 b := v.Block 23730 config := b.Func.Config 23731 typ := &b.Func.Config.Types 23732 // match: (Zero [0] _ mem) 23733 // result: mem 23734 for { 23735 if auxIntToInt64(v.AuxInt) != 0 { 23736 break 23737 } 23738 mem := v_1 23739 v.copyOf(mem) 23740 return true 23741 } 23742 // match: (Zero [1] ptr mem) 23743 // result: (MOVBstore ptr (MOVDconst [0]) mem) 23744 for { 23745 if auxIntToInt64(v.AuxInt) != 1 { 23746 break 23747 } 23748 ptr := v_0 23749 mem := v_1 23750 v.reset(OpARM64MOVBstore) 23751 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 23752 v0.AuxInt = int64ToAuxInt(0) 23753 v.AddArg3(ptr, v0, mem) 23754 return true 23755 } 23756 // match: (Zero [2] ptr mem) 23757 // result: (MOVHstore ptr (MOVDconst [0]) mem) 23758 for { 23759 if auxIntToInt64(v.AuxInt) != 2 { 23760 break 23761 } 23762 ptr := v_0 23763 mem := v_1 23764 v.reset(OpARM64MOVHstore) 23765 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 23766 v0.AuxInt = int64ToAuxInt(0) 23767 v.AddArg3(ptr, v0, mem) 23768 return true 23769 } 23770 // match: (Zero [4] ptr mem) 23771 // result: (MOVWstore ptr (MOVDconst [0]) mem) 23772 for { 23773 if auxIntToInt64(v.AuxInt) != 4 { 23774 break 23775 } 23776 ptr := v_0 23777 mem := v_1 23778 v.reset(OpARM64MOVWstore) 23779 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 23780 v0.AuxInt = int64ToAuxInt(0) 23781 v.AddArg3(ptr, v0, mem) 23782 return true 23783 } 23784 // match: (Zero [3] ptr mem) 23785 // result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)) 23786 for { 23787 if auxIntToInt64(v.AuxInt) != 3 { 23788 break 23789 } 23790 ptr := v_0 23791 mem := v_1 23792 v.reset(OpARM64MOVBstore) 23793 v.AuxInt = int32ToAuxInt(2) 23794 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 23795 v0.AuxInt = int64ToAuxInt(0) 23796 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 23797 v1.AddArg3(ptr, v0, mem) 23798 v.AddArg3(ptr, v0, v1) 23799 return true 23800 } 23801 // match: (Zero [5] ptr mem) 23802 // result: (MOVBstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 23803 for { 23804 if auxIntToInt64(v.AuxInt) != 5 { 23805 break 23806 } 23807 ptr := v_0 23808 mem := v_1 23809 v.reset(OpARM64MOVBstore) 23810 v.AuxInt = int32ToAuxInt(4) 23811 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 23812 v0.AuxInt = int64ToAuxInt(0) 23813 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 23814 v1.AddArg3(ptr, v0, mem) 23815 v.AddArg3(ptr, v0, v1) 23816 return true 23817 } 23818 // match: (Zero [6] ptr mem) 23819 // result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 23820 for { 23821 if auxIntToInt64(v.AuxInt) != 6 { 23822 break 23823 } 23824 ptr := v_0 23825 mem := v_1 23826 v.reset(OpARM64MOVHstore) 23827 v.AuxInt = int32ToAuxInt(4) 23828 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 23829 v0.AuxInt = int64ToAuxInt(0) 23830 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 23831 v1.AddArg3(ptr, v0, mem) 23832 v.AddArg3(ptr, v0, v1) 23833 return true 23834 } 23835 // match: (Zero [7] ptr mem) 23836 // result: (MOVWstore [3] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 23837 for { 23838 if auxIntToInt64(v.AuxInt) != 7 { 23839 break 23840 } 23841 ptr := v_0 23842 mem := v_1 23843 v.reset(OpARM64MOVWstore) 23844 v.AuxInt = int32ToAuxInt(3) 23845 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 23846 v0.AuxInt = int64ToAuxInt(0) 23847 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 23848 v1.AddArg3(ptr, v0, mem) 23849 v.AddArg3(ptr, v0, v1) 23850 return true 23851 } 23852 // match: (Zero [8] ptr mem) 23853 // result: (MOVDstore ptr (MOVDconst [0]) mem) 23854 for { 23855 if auxIntToInt64(v.AuxInt) != 8 { 23856 break 23857 } 23858 ptr := v_0 23859 mem := v_1 23860 v.reset(OpARM64MOVDstore) 23861 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 23862 v0.AuxInt = int64ToAuxInt(0) 23863 v.AddArg3(ptr, v0, mem) 23864 return true 23865 } 23866 // match: (Zero [9] ptr mem) 23867 // result: (MOVBstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 23868 for { 23869 if auxIntToInt64(v.AuxInt) != 9 { 23870 break 23871 } 23872 ptr := v_0 23873 mem := v_1 23874 v.reset(OpARM64MOVBstore) 23875 v.AuxInt = int32ToAuxInt(8) 23876 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 23877 v0.AuxInt = int64ToAuxInt(0) 23878 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 23879 v1.AddArg3(ptr, v0, mem) 23880 v.AddArg3(ptr, v0, v1) 23881 return true 23882 } 23883 // match: (Zero [10] ptr mem) 23884 // result: (MOVHstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 23885 for { 23886 if auxIntToInt64(v.AuxInt) != 10 { 23887 break 23888 } 23889 ptr := v_0 23890 mem := v_1 23891 v.reset(OpARM64MOVHstore) 23892 v.AuxInt = int32ToAuxInt(8) 23893 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 23894 v0.AuxInt = int64ToAuxInt(0) 23895 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 23896 v1.AddArg3(ptr, v0, mem) 23897 v.AddArg3(ptr, v0, v1) 23898 return true 23899 } 23900 // match: (Zero [11] ptr mem) 23901 // result: (MOVDstore [3] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 23902 for { 23903 if auxIntToInt64(v.AuxInt) != 11 { 23904 break 23905 } 23906 ptr := v_0 23907 mem := v_1 23908 v.reset(OpARM64MOVDstore) 23909 v.AuxInt = int32ToAuxInt(3) 23910 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 23911 v0.AuxInt = int64ToAuxInt(0) 23912 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 23913 v1.AddArg3(ptr, v0, mem) 23914 v.AddArg3(ptr, v0, v1) 23915 return true 23916 } 23917 // match: (Zero [12] ptr mem) 23918 // result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 23919 for { 23920 if auxIntToInt64(v.AuxInt) != 12 { 23921 break 23922 } 23923 ptr := v_0 23924 mem := v_1 23925 v.reset(OpARM64MOVWstore) 23926 v.AuxInt = int32ToAuxInt(8) 23927 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 23928 v0.AuxInt = int64ToAuxInt(0) 23929 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 23930 v1.AddArg3(ptr, v0, mem) 23931 v.AddArg3(ptr, v0, v1) 23932 return true 23933 } 23934 // match: (Zero [13] ptr mem) 23935 // result: (MOVDstore [5] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 23936 for { 23937 if auxIntToInt64(v.AuxInt) != 13 { 23938 break 23939 } 23940 ptr := v_0 23941 mem := v_1 23942 v.reset(OpARM64MOVDstore) 23943 v.AuxInt = int32ToAuxInt(5) 23944 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 23945 v0.AuxInt = int64ToAuxInt(0) 23946 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 23947 v1.AddArg3(ptr, v0, mem) 23948 v.AddArg3(ptr, v0, v1) 23949 return true 23950 } 23951 // match: (Zero [14] ptr mem) 23952 // result: (MOVDstore [6] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 23953 for { 23954 if auxIntToInt64(v.AuxInt) != 14 { 23955 break 23956 } 23957 ptr := v_0 23958 mem := v_1 23959 v.reset(OpARM64MOVDstore) 23960 v.AuxInt = int32ToAuxInt(6) 23961 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 23962 v0.AuxInt = int64ToAuxInt(0) 23963 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 23964 v1.AddArg3(ptr, v0, mem) 23965 v.AddArg3(ptr, v0, v1) 23966 return true 23967 } 23968 // match: (Zero [15] ptr mem) 23969 // result: (MOVDstore [7] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 23970 for { 23971 if auxIntToInt64(v.AuxInt) != 15 { 23972 break 23973 } 23974 ptr := v_0 23975 mem := v_1 23976 v.reset(OpARM64MOVDstore) 23977 v.AuxInt = int32ToAuxInt(7) 23978 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 23979 v0.AuxInt = int64ToAuxInt(0) 23980 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 23981 v1.AddArg3(ptr, v0, mem) 23982 v.AddArg3(ptr, v0, v1) 23983 return true 23984 } 23985 // match: (Zero [16] ptr mem) 23986 // result: (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem) 23987 for { 23988 if auxIntToInt64(v.AuxInt) != 16 { 23989 break 23990 } 23991 ptr := v_0 23992 mem := v_1 23993 v.reset(OpARM64STP) 23994 v.AuxInt = int32ToAuxInt(0) 23995 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 23996 v0.AuxInt = int64ToAuxInt(0) 23997 v.AddArg4(ptr, v0, v0, mem) 23998 return true 23999 } 24000 // match: (Zero [32] ptr mem) 24001 // result: (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)) 24002 for { 24003 if auxIntToInt64(v.AuxInt) != 32 { 24004 break 24005 } 24006 ptr := v_0 24007 mem := v_1 24008 v.reset(OpARM64STP) 24009 v.AuxInt = int32ToAuxInt(16) 24010 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 24011 v0.AuxInt = int64ToAuxInt(0) 24012 v1 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 24013 v1.AuxInt = int32ToAuxInt(0) 24014 v1.AddArg4(ptr, v0, v0, mem) 24015 v.AddArg4(ptr, v0, v0, v1) 24016 return true 24017 } 24018 // match: (Zero [48] ptr mem) 24019 // result: (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem))) 24020 for { 24021 if auxIntToInt64(v.AuxInt) != 48 { 24022 break 24023 } 24024 ptr := v_0 24025 mem := v_1 24026 v.reset(OpARM64STP) 24027 v.AuxInt = int32ToAuxInt(32) 24028 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 24029 v0.AuxInt = int64ToAuxInt(0) 24030 v1 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 24031 v1.AuxInt = int32ToAuxInt(16) 24032 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 24033 v2.AuxInt = int32ToAuxInt(0) 24034 v2.AddArg4(ptr, v0, v0, mem) 24035 v1.AddArg4(ptr, v0, v0, v2) 24036 v.AddArg4(ptr, v0, v0, v1) 24037 return true 24038 } 24039 // match: (Zero [64] ptr mem) 24040 // result: (STP [48] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)))) 24041 for { 24042 if auxIntToInt64(v.AuxInt) != 64 { 24043 break 24044 } 24045 ptr := v_0 24046 mem := v_1 24047 v.reset(OpARM64STP) 24048 v.AuxInt = int32ToAuxInt(48) 24049 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 24050 v0.AuxInt = int64ToAuxInt(0) 24051 v1 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 24052 v1.AuxInt = int32ToAuxInt(32) 24053 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 24054 v2.AuxInt = int32ToAuxInt(16) 24055 v3 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 24056 v3.AuxInt = int32ToAuxInt(0) 24057 v3.AddArg4(ptr, v0, v0, mem) 24058 v2.AddArg4(ptr, v0, v0, v3) 24059 v1.AddArg4(ptr, v0, v0, v2) 24060 v.AddArg4(ptr, v0, v0, v1) 24061 return true 24062 } 24063 // match: (Zero [s] ptr mem) 24064 // cond: s%16 != 0 && s%16 <= 8 && s > 16 24065 // result: (Zero [8] (OffPtr <ptr.Type> ptr [s-8]) (Zero [s-s%16] ptr mem)) 24066 for { 24067 s := auxIntToInt64(v.AuxInt) 24068 ptr := v_0 24069 mem := v_1 24070 if !(s%16 != 0 && s%16 <= 8 && s > 16) { 24071 break 24072 } 24073 v.reset(OpZero) 24074 v.AuxInt = int64ToAuxInt(8) 24075 v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type) 24076 v0.AuxInt = int64ToAuxInt(s - 8) 24077 v0.AddArg(ptr) 24078 v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem) 24079 v1.AuxInt = int64ToAuxInt(s - s%16) 24080 v1.AddArg2(ptr, mem) 24081 v.AddArg2(v0, v1) 24082 return true 24083 } 24084 // match: (Zero [s] ptr mem) 24085 // cond: s%16 != 0 && s%16 > 8 && s > 16 24086 // result: (Zero [16] (OffPtr <ptr.Type> ptr [s-16]) (Zero [s-s%16] ptr mem)) 24087 for { 24088 s := auxIntToInt64(v.AuxInt) 24089 ptr := v_0 24090 mem := v_1 24091 if !(s%16 != 0 && s%16 > 8 && s > 16) { 24092 break 24093 } 24094 v.reset(OpZero) 24095 v.AuxInt = int64ToAuxInt(16) 24096 v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type) 24097 v0.AuxInt = int64ToAuxInt(s - 16) 24098 v0.AddArg(ptr) 24099 v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem) 24100 v1.AuxInt = int64ToAuxInt(s - s%16) 24101 v1.AddArg2(ptr, mem) 24102 v.AddArg2(v0, v1) 24103 return true 24104 } 24105 // match: (Zero [s] ptr mem) 24106 // cond: s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice 24107 // result: (DUFFZERO [4 * (64 - s/16)] ptr mem) 24108 for { 24109 s := auxIntToInt64(v.AuxInt) 24110 ptr := v_0 24111 mem := v_1 24112 if !(s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice) { 24113 break 24114 } 24115 v.reset(OpARM64DUFFZERO) 24116 v.AuxInt = int64ToAuxInt(4 * (64 - s/16)) 24117 v.AddArg2(ptr, mem) 24118 return true 24119 } 24120 // match: (Zero [s] ptr mem) 24121 // cond: s%16 == 0 && (s > 16*64 || config.noDuffDevice) 24122 // result: (LoweredZero ptr (ADDconst <ptr.Type> [s-16] ptr) mem) 24123 for { 24124 s := auxIntToInt64(v.AuxInt) 24125 ptr := v_0 24126 mem := v_1 24127 if !(s%16 == 0 && (s > 16*64 || config.noDuffDevice)) { 24128 break 24129 } 24130 v.reset(OpARM64LoweredZero) 24131 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, ptr.Type) 24132 v0.AuxInt = int64ToAuxInt(s - 16) 24133 v0.AddArg(ptr) 24134 v.AddArg3(ptr, v0, mem) 24135 return true 24136 } 24137 return false 24138 } 24139 func rewriteBlockARM64(b *Block) bool { 24140 typ := &b.Func.Config.Types 24141 switch b.Kind { 24142 case BlockARM64EQ: 24143 // match: (EQ (CMPconst [0] z:(AND x y)) yes no) 24144 // cond: z.Uses == 1 24145 // result: (EQ (TST x y) yes no) 24146 for b.Controls[0].Op == OpARM64CMPconst { 24147 v_0 := b.Controls[0] 24148 if auxIntToInt64(v_0.AuxInt) != 0 { 24149 break 24150 } 24151 z := v_0.Args[0] 24152 if z.Op != OpARM64AND { 24153 break 24154 } 24155 _ = z.Args[1] 24156 z_0 := z.Args[0] 24157 z_1 := z.Args[1] 24158 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 24159 x := z_0 24160 y := z_1 24161 if !(z.Uses == 1) { 24162 continue 24163 } 24164 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags) 24165 v0.AddArg2(x, y) 24166 b.resetWithControl(BlockARM64EQ, v0) 24167 return true 24168 } 24169 break 24170 } 24171 // match: (EQ (CMPconst [0] x:(ANDconst [c] y)) yes no) 24172 // cond: x.Uses == 1 24173 // result: (EQ (TSTconst [c] y) yes no) 24174 for b.Controls[0].Op == OpARM64CMPconst { 24175 v_0 := b.Controls[0] 24176 if auxIntToInt64(v_0.AuxInt) != 0 { 24177 break 24178 } 24179 x := v_0.Args[0] 24180 if x.Op != OpARM64ANDconst { 24181 break 24182 } 24183 c := auxIntToInt64(x.AuxInt) 24184 y := x.Args[0] 24185 if !(x.Uses == 1) { 24186 break 24187 } 24188 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags) 24189 v0.AuxInt = int64ToAuxInt(c) 24190 v0.AddArg(y) 24191 b.resetWithControl(BlockARM64EQ, v0) 24192 return true 24193 } 24194 // match: (EQ (CMPWconst [0] z:(AND x y)) yes no) 24195 // cond: z.Uses == 1 24196 // result: (EQ (TSTW x y) yes no) 24197 for b.Controls[0].Op == OpARM64CMPWconst { 24198 v_0 := b.Controls[0] 24199 if auxIntToInt32(v_0.AuxInt) != 0 { 24200 break 24201 } 24202 z := v_0.Args[0] 24203 if z.Op != OpARM64AND { 24204 break 24205 } 24206 _ = z.Args[1] 24207 z_0 := z.Args[0] 24208 z_1 := z.Args[1] 24209 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 24210 x := z_0 24211 y := z_1 24212 if !(z.Uses == 1) { 24213 continue 24214 } 24215 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags) 24216 v0.AddArg2(x, y) 24217 b.resetWithControl(BlockARM64EQ, v0) 24218 return true 24219 } 24220 break 24221 } 24222 // match: (EQ (CMPWconst [0] x:(ANDconst [c] y)) yes no) 24223 // cond: x.Uses == 1 24224 // result: (EQ (TSTWconst [int32(c)] y) yes no) 24225 for b.Controls[0].Op == OpARM64CMPWconst { 24226 v_0 := b.Controls[0] 24227 if auxIntToInt32(v_0.AuxInt) != 0 { 24228 break 24229 } 24230 x := v_0.Args[0] 24231 if x.Op != OpARM64ANDconst { 24232 break 24233 } 24234 c := auxIntToInt64(x.AuxInt) 24235 y := x.Args[0] 24236 if !(x.Uses == 1) { 24237 break 24238 } 24239 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags) 24240 v0.AuxInt = int32ToAuxInt(int32(c)) 24241 v0.AddArg(y) 24242 b.resetWithControl(BlockARM64EQ, v0) 24243 return true 24244 } 24245 // match: (EQ (CMPconst [0] x:(ADDconst [c] y)) yes no) 24246 // cond: x.Uses == 1 24247 // result: (EQ (CMNconst [c] y) yes no) 24248 for b.Controls[0].Op == OpARM64CMPconst { 24249 v_0 := b.Controls[0] 24250 if auxIntToInt64(v_0.AuxInt) != 0 { 24251 break 24252 } 24253 x := v_0.Args[0] 24254 if x.Op != OpARM64ADDconst { 24255 break 24256 } 24257 c := auxIntToInt64(x.AuxInt) 24258 y := x.Args[0] 24259 if !(x.Uses == 1) { 24260 break 24261 } 24262 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags) 24263 v0.AuxInt = int64ToAuxInt(c) 24264 v0.AddArg(y) 24265 b.resetWithControl(BlockARM64EQ, v0) 24266 return true 24267 } 24268 // match: (EQ (CMPWconst [0] x:(ADDconst [c] y)) yes no) 24269 // cond: x.Uses == 1 24270 // result: (EQ (CMNWconst [int32(c)] y) yes no) 24271 for b.Controls[0].Op == OpARM64CMPWconst { 24272 v_0 := b.Controls[0] 24273 if auxIntToInt32(v_0.AuxInt) != 0 { 24274 break 24275 } 24276 x := v_0.Args[0] 24277 if x.Op != OpARM64ADDconst { 24278 break 24279 } 24280 c := auxIntToInt64(x.AuxInt) 24281 y := x.Args[0] 24282 if !(x.Uses == 1) { 24283 break 24284 } 24285 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags) 24286 v0.AuxInt = int32ToAuxInt(int32(c)) 24287 v0.AddArg(y) 24288 b.resetWithControl(BlockARM64EQ, v0) 24289 return true 24290 } 24291 // match: (EQ (CMPconst [0] z:(ADD x y)) yes no) 24292 // cond: z.Uses == 1 24293 // result: (EQ (CMN x y) yes no) 24294 for b.Controls[0].Op == OpARM64CMPconst { 24295 v_0 := b.Controls[0] 24296 if auxIntToInt64(v_0.AuxInt) != 0 { 24297 break 24298 } 24299 z := v_0.Args[0] 24300 if z.Op != OpARM64ADD { 24301 break 24302 } 24303 _ = z.Args[1] 24304 z_0 := z.Args[0] 24305 z_1 := z.Args[1] 24306 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 24307 x := z_0 24308 y := z_1 24309 if !(z.Uses == 1) { 24310 continue 24311 } 24312 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 24313 v0.AddArg2(x, y) 24314 b.resetWithControl(BlockARM64EQ, v0) 24315 return true 24316 } 24317 break 24318 } 24319 // match: (EQ (CMPWconst [0] z:(ADD x y)) yes no) 24320 // cond: z.Uses == 1 24321 // result: (EQ (CMNW x y) yes no) 24322 for b.Controls[0].Op == OpARM64CMPWconst { 24323 v_0 := b.Controls[0] 24324 if auxIntToInt32(v_0.AuxInt) != 0 { 24325 break 24326 } 24327 z := v_0.Args[0] 24328 if z.Op != OpARM64ADD { 24329 break 24330 } 24331 _ = z.Args[1] 24332 z_0 := z.Args[0] 24333 z_1 := z.Args[1] 24334 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 24335 x := z_0 24336 y := z_1 24337 if !(z.Uses == 1) { 24338 continue 24339 } 24340 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 24341 v0.AddArg2(x, y) 24342 b.resetWithControl(BlockARM64EQ, v0) 24343 return true 24344 } 24345 break 24346 } 24347 // match: (EQ (CMP x z:(NEG y)) yes no) 24348 // cond: z.Uses == 1 24349 // result: (EQ (CMN x y) yes no) 24350 for b.Controls[0].Op == OpARM64CMP { 24351 v_0 := b.Controls[0] 24352 _ = v_0.Args[1] 24353 x := v_0.Args[0] 24354 z := v_0.Args[1] 24355 if z.Op != OpARM64NEG { 24356 break 24357 } 24358 y := z.Args[0] 24359 if !(z.Uses == 1) { 24360 break 24361 } 24362 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 24363 v0.AddArg2(x, y) 24364 b.resetWithControl(BlockARM64EQ, v0) 24365 return true 24366 } 24367 // match: (EQ (CMPW x z:(NEG y)) yes no) 24368 // cond: z.Uses == 1 24369 // result: (EQ (CMNW x y) yes no) 24370 for b.Controls[0].Op == OpARM64CMPW { 24371 v_0 := b.Controls[0] 24372 _ = v_0.Args[1] 24373 x := v_0.Args[0] 24374 z := v_0.Args[1] 24375 if z.Op != OpARM64NEG { 24376 break 24377 } 24378 y := z.Args[0] 24379 if !(z.Uses == 1) { 24380 break 24381 } 24382 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 24383 v0.AddArg2(x, y) 24384 b.resetWithControl(BlockARM64EQ, v0) 24385 return true 24386 } 24387 // match: (EQ (CMPconst [0] x) yes no) 24388 // result: (Z x yes no) 24389 for b.Controls[0].Op == OpARM64CMPconst { 24390 v_0 := b.Controls[0] 24391 if auxIntToInt64(v_0.AuxInt) != 0 { 24392 break 24393 } 24394 x := v_0.Args[0] 24395 b.resetWithControl(BlockARM64Z, x) 24396 return true 24397 } 24398 // match: (EQ (CMPWconst [0] x) yes no) 24399 // result: (ZW x yes no) 24400 for b.Controls[0].Op == OpARM64CMPWconst { 24401 v_0 := b.Controls[0] 24402 if auxIntToInt32(v_0.AuxInt) != 0 { 24403 break 24404 } 24405 x := v_0.Args[0] 24406 b.resetWithControl(BlockARM64ZW, x) 24407 return true 24408 } 24409 // match: (EQ (CMPconst [0] z:(MADD a x y)) yes no) 24410 // cond: z.Uses==1 24411 // result: (EQ (CMN a (MUL <x.Type> x y)) yes no) 24412 for b.Controls[0].Op == OpARM64CMPconst { 24413 v_0 := b.Controls[0] 24414 if auxIntToInt64(v_0.AuxInt) != 0 { 24415 break 24416 } 24417 z := v_0.Args[0] 24418 if z.Op != OpARM64MADD { 24419 break 24420 } 24421 y := z.Args[2] 24422 a := z.Args[0] 24423 x := z.Args[1] 24424 if !(z.Uses == 1) { 24425 break 24426 } 24427 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 24428 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 24429 v1.AddArg2(x, y) 24430 v0.AddArg2(a, v1) 24431 b.resetWithControl(BlockARM64EQ, v0) 24432 return true 24433 } 24434 // match: (EQ (CMPconst [0] z:(MSUB a x y)) yes no) 24435 // cond: z.Uses==1 24436 // result: (EQ (CMP a (MUL <x.Type> x y)) yes no) 24437 for b.Controls[0].Op == OpARM64CMPconst { 24438 v_0 := b.Controls[0] 24439 if auxIntToInt64(v_0.AuxInt) != 0 { 24440 break 24441 } 24442 z := v_0.Args[0] 24443 if z.Op != OpARM64MSUB { 24444 break 24445 } 24446 y := z.Args[2] 24447 a := z.Args[0] 24448 x := z.Args[1] 24449 if !(z.Uses == 1) { 24450 break 24451 } 24452 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags) 24453 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 24454 v1.AddArg2(x, y) 24455 v0.AddArg2(a, v1) 24456 b.resetWithControl(BlockARM64EQ, v0) 24457 return true 24458 } 24459 // match: (EQ (CMPWconst [0] z:(MADDW a x y)) yes no) 24460 // cond: z.Uses==1 24461 // result: (EQ (CMNW a (MULW <x.Type> x y)) yes no) 24462 for b.Controls[0].Op == OpARM64CMPWconst { 24463 v_0 := b.Controls[0] 24464 if auxIntToInt32(v_0.AuxInt) != 0 { 24465 break 24466 } 24467 z := v_0.Args[0] 24468 if z.Op != OpARM64MADDW { 24469 break 24470 } 24471 y := z.Args[2] 24472 a := z.Args[0] 24473 x := z.Args[1] 24474 if !(z.Uses == 1) { 24475 break 24476 } 24477 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 24478 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 24479 v1.AddArg2(x, y) 24480 v0.AddArg2(a, v1) 24481 b.resetWithControl(BlockARM64EQ, v0) 24482 return true 24483 } 24484 // match: (EQ (CMPWconst [0] z:(MSUBW a x y)) yes no) 24485 // cond: z.Uses==1 24486 // result: (EQ (CMPW a (MULW <x.Type> x y)) yes no) 24487 for b.Controls[0].Op == OpARM64CMPWconst { 24488 v_0 := b.Controls[0] 24489 if auxIntToInt32(v_0.AuxInt) != 0 { 24490 break 24491 } 24492 z := v_0.Args[0] 24493 if z.Op != OpARM64MSUBW { 24494 break 24495 } 24496 y := z.Args[2] 24497 a := z.Args[0] 24498 x := z.Args[1] 24499 if !(z.Uses == 1) { 24500 break 24501 } 24502 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags) 24503 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 24504 v1.AddArg2(x, y) 24505 v0.AddArg2(a, v1) 24506 b.resetWithControl(BlockARM64EQ, v0) 24507 return true 24508 } 24509 // match: (EQ (TSTconst [c] x) yes no) 24510 // cond: oneBit(c) 24511 // result: (TBZ [int64(ntz64(c))] x yes no) 24512 for b.Controls[0].Op == OpARM64TSTconst { 24513 v_0 := b.Controls[0] 24514 c := auxIntToInt64(v_0.AuxInt) 24515 x := v_0.Args[0] 24516 if !(oneBit(c)) { 24517 break 24518 } 24519 b.resetWithControl(BlockARM64TBZ, x) 24520 b.AuxInt = int64ToAuxInt(int64(ntz64(c))) 24521 return true 24522 } 24523 // match: (EQ (TSTWconst [c] x) yes no) 24524 // cond: oneBit(int64(uint32(c))) 24525 // result: (TBZ [int64(ntz64(int64(uint32(c))))] x yes no) 24526 for b.Controls[0].Op == OpARM64TSTWconst { 24527 v_0 := b.Controls[0] 24528 c := auxIntToInt32(v_0.AuxInt) 24529 x := v_0.Args[0] 24530 if !(oneBit(int64(uint32(c)))) { 24531 break 24532 } 24533 b.resetWithControl(BlockARM64TBZ, x) 24534 b.AuxInt = int64ToAuxInt(int64(ntz64(int64(uint32(c))))) 24535 return true 24536 } 24537 // match: (EQ (FlagConstant [fc]) yes no) 24538 // cond: fc.eq() 24539 // result: (First yes no) 24540 for b.Controls[0].Op == OpARM64FlagConstant { 24541 v_0 := b.Controls[0] 24542 fc := auxIntToFlagConstant(v_0.AuxInt) 24543 if !(fc.eq()) { 24544 break 24545 } 24546 b.Reset(BlockFirst) 24547 return true 24548 } 24549 // match: (EQ (FlagConstant [fc]) yes no) 24550 // cond: !fc.eq() 24551 // result: (First no yes) 24552 for b.Controls[0].Op == OpARM64FlagConstant { 24553 v_0 := b.Controls[0] 24554 fc := auxIntToFlagConstant(v_0.AuxInt) 24555 if !(!fc.eq()) { 24556 break 24557 } 24558 b.Reset(BlockFirst) 24559 b.swapSuccessors() 24560 return true 24561 } 24562 // match: (EQ (InvertFlags cmp) yes no) 24563 // result: (EQ cmp yes no) 24564 for b.Controls[0].Op == OpARM64InvertFlags { 24565 v_0 := b.Controls[0] 24566 cmp := v_0.Args[0] 24567 b.resetWithControl(BlockARM64EQ, cmp) 24568 return true 24569 } 24570 case BlockARM64FGE: 24571 // match: (FGE (InvertFlags cmp) yes no) 24572 // result: (FLE cmp yes no) 24573 for b.Controls[0].Op == OpARM64InvertFlags { 24574 v_0 := b.Controls[0] 24575 cmp := v_0.Args[0] 24576 b.resetWithControl(BlockARM64FLE, cmp) 24577 return true 24578 } 24579 case BlockARM64FGT: 24580 // match: (FGT (InvertFlags cmp) yes no) 24581 // result: (FLT cmp yes no) 24582 for b.Controls[0].Op == OpARM64InvertFlags { 24583 v_0 := b.Controls[0] 24584 cmp := v_0.Args[0] 24585 b.resetWithControl(BlockARM64FLT, cmp) 24586 return true 24587 } 24588 case BlockARM64FLE: 24589 // match: (FLE (InvertFlags cmp) yes no) 24590 // result: (FGE cmp yes no) 24591 for b.Controls[0].Op == OpARM64InvertFlags { 24592 v_0 := b.Controls[0] 24593 cmp := v_0.Args[0] 24594 b.resetWithControl(BlockARM64FGE, cmp) 24595 return true 24596 } 24597 case BlockARM64FLT: 24598 // match: (FLT (InvertFlags cmp) yes no) 24599 // result: (FGT cmp yes no) 24600 for b.Controls[0].Op == OpARM64InvertFlags { 24601 v_0 := b.Controls[0] 24602 cmp := v_0.Args[0] 24603 b.resetWithControl(BlockARM64FGT, cmp) 24604 return true 24605 } 24606 case BlockARM64GE: 24607 // match: (GE (CMPconst [0] z:(AND x y)) yes no) 24608 // cond: z.Uses == 1 24609 // result: (GE (TST x y) yes no) 24610 for b.Controls[0].Op == OpARM64CMPconst { 24611 v_0 := b.Controls[0] 24612 if auxIntToInt64(v_0.AuxInt) != 0 { 24613 break 24614 } 24615 z := v_0.Args[0] 24616 if z.Op != OpARM64AND { 24617 break 24618 } 24619 _ = z.Args[1] 24620 z_0 := z.Args[0] 24621 z_1 := z.Args[1] 24622 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 24623 x := z_0 24624 y := z_1 24625 if !(z.Uses == 1) { 24626 continue 24627 } 24628 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags) 24629 v0.AddArg2(x, y) 24630 b.resetWithControl(BlockARM64GE, v0) 24631 return true 24632 } 24633 break 24634 } 24635 // match: (GE (CMPconst [0] x:(ANDconst [c] y)) yes no) 24636 // cond: x.Uses == 1 24637 // result: (GE (TSTconst [c] y) yes no) 24638 for b.Controls[0].Op == OpARM64CMPconst { 24639 v_0 := b.Controls[0] 24640 if auxIntToInt64(v_0.AuxInt) != 0 { 24641 break 24642 } 24643 x := v_0.Args[0] 24644 if x.Op != OpARM64ANDconst { 24645 break 24646 } 24647 c := auxIntToInt64(x.AuxInt) 24648 y := x.Args[0] 24649 if !(x.Uses == 1) { 24650 break 24651 } 24652 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags) 24653 v0.AuxInt = int64ToAuxInt(c) 24654 v0.AddArg(y) 24655 b.resetWithControl(BlockARM64GE, v0) 24656 return true 24657 } 24658 // match: (GE (CMPWconst [0] z:(AND x y)) yes no) 24659 // cond: z.Uses == 1 24660 // result: (GE (TSTW x y) yes no) 24661 for b.Controls[0].Op == OpARM64CMPWconst { 24662 v_0 := b.Controls[0] 24663 if auxIntToInt32(v_0.AuxInt) != 0 { 24664 break 24665 } 24666 z := v_0.Args[0] 24667 if z.Op != OpARM64AND { 24668 break 24669 } 24670 _ = z.Args[1] 24671 z_0 := z.Args[0] 24672 z_1 := z.Args[1] 24673 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 24674 x := z_0 24675 y := z_1 24676 if !(z.Uses == 1) { 24677 continue 24678 } 24679 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags) 24680 v0.AddArg2(x, y) 24681 b.resetWithControl(BlockARM64GE, v0) 24682 return true 24683 } 24684 break 24685 } 24686 // match: (GE (CMPWconst [0] x:(ANDconst [c] y)) yes no) 24687 // cond: x.Uses == 1 24688 // result: (GE (TSTWconst [int32(c)] y) yes no) 24689 for b.Controls[0].Op == OpARM64CMPWconst { 24690 v_0 := b.Controls[0] 24691 if auxIntToInt32(v_0.AuxInt) != 0 { 24692 break 24693 } 24694 x := v_0.Args[0] 24695 if x.Op != OpARM64ANDconst { 24696 break 24697 } 24698 c := auxIntToInt64(x.AuxInt) 24699 y := x.Args[0] 24700 if !(x.Uses == 1) { 24701 break 24702 } 24703 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags) 24704 v0.AuxInt = int32ToAuxInt(int32(c)) 24705 v0.AddArg(y) 24706 b.resetWithControl(BlockARM64GE, v0) 24707 return true 24708 } 24709 // match: (GE (CMPconst [0] x:(ADDconst [c] y)) yes no) 24710 // cond: x.Uses == 1 24711 // result: (GEnoov (CMNconst [c] y) yes no) 24712 for b.Controls[0].Op == OpARM64CMPconst { 24713 v_0 := b.Controls[0] 24714 if auxIntToInt64(v_0.AuxInt) != 0 { 24715 break 24716 } 24717 x := v_0.Args[0] 24718 if x.Op != OpARM64ADDconst { 24719 break 24720 } 24721 c := auxIntToInt64(x.AuxInt) 24722 y := x.Args[0] 24723 if !(x.Uses == 1) { 24724 break 24725 } 24726 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags) 24727 v0.AuxInt = int64ToAuxInt(c) 24728 v0.AddArg(y) 24729 b.resetWithControl(BlockARM64GEnoov, v0) 24730 return true 24731 } 24732 // match: (GE (CMPWconst [0] x:(ADDconst [c] y)) yes no) 24733 // cond: x.Uses == 1 24734 // result: (GEnoov (CMNWconst [int32(c)] y) yes no) 24735 for b.Controls[0].Op == OpARM64CMPWconst { 24736 v_0 := b.Controls[0] 24737 if auxIntToInt32(v_0.AuxInt) != 0 { 24738 break 24739 } 24740 x := v_0.Args[0] 24741 if x.Op != OpARM64ADDconst { 24742 break 24743 } 24744 c := auxIntToInt64(x.AuxInt) 24745 y := x.Args[0] 24746 if !(x.Uses == 1) { 24747 break 24748 } 24749 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags) 24750 v0.AuxInt = int32ToAuxInt(int32(c)) 24751 v0.AddArg(y) 24752 b.resetWithControl(BlockARM64GEnoov, v0) 24753 return true 24754 } 24755 // match: (GE (CMPconst [0] z:(ADD x y)) yes no) 24756 // cond: z.Uses == 1 24757 // result: (GEnoov (CMN x y) yes no) 24758 for b.Controls[0].Op == OpARM64CMPconst { 24759 v_0 := b.Controls[0] 24760 if auxIntToInt64(v_0.AuxInt) != 0 { 24761 break 24762 } 24763 z := v_0.Args[0] 24764 if z.Op != OpARM64ADD { 24765 break 24766 } 24767 _ = z.Args[1] 24768 z_0 := z.Args[0] 24769 z_1 := z.Args[1] 24770 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 24771 x := z_0 24772 y := z_1 24773 if !(z.Uses == 1) { 24774 continue 24775 } 24776 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 24777 v0.AddArg2(x, y) 24778 b.resetWithControl(BlockARM64GEnoov, v0) 24779 return true 24780 } 24781 break 24782 } 24783 // match: (GE (CMPWconst [0] z:(ADD x y)) yes no) 24784 // cond: z.Uses == 1 24785 // result: (GEnoov (CMNW x y) yes no) 24786 for b.Controls[0].Op == OpARM64CMPWconst { 24787 v_0 := b.Controls[0] 24788 if auxIntToInt32(v_0.AuxInt) != 0 { 24789 break 24790 } 24791 z := v_0.Args[0] 24792 if z.Op != OpARM64ADD { 24793 break 24794 } 24795 _ = z.Args[1] 24796 z_0 := z.Args[0] 24797 z_1 := z.Args[1] 24798 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 24799 x := z_0 24800 y := z_1 24801 if !(z.Uses == 1) { 24802 continue 24803 } 24804 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 24805 v0.AddArg2(x, y) 24806 b.resetWithControl(BlockARM64GEnoov, v0) 24807 return true 24808 } 24809 break 24810 } 24811 // match: (GE (CMPconst [0] z:(MADD a x y)) yes no) 24812 // cond: z.Uses==1 24813 // result: (GEnoov (CMN a (MUL <x.Type> x y)) yes no) 24814 for b.Controls[0].Op == OpARM64CMPconst { 24815 v_0 := b.Controls[0] 24816 if auxIntToInt64(v_0.AuxInt) != 0 { 24817 break 24818 } 24819 z := v_0.Args[0] 24820 if z.Op != OpARM64MADD { 24821 break 24822 } 24823 y := z.Args[2] 24824 a := z.Args[0] 24825 x := z.Args[1] 24826 if !(z.Uses == 1) { 24827 break 24828 } 24829 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 24830 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 24831 v1.AddArg2(x, y) 24832 v0.AddArg2(a, v1) 24833 b.resetWithControl(BlockARM64GEnoov, v0) 24834 return true 24835 } 24836 // match: (GE (CMPconst [0] z:(MSUB a x y)) yes no) 24837 // cond: z.Uses==1 24838 // result: (GEnoov (CMP a (MUL <x.Type> x y)) yes no) 24839 for b.Controls[0].Op == OpARM64CMPconst { 24840 v_0 := b.Controls[0] 24841 if auxIntToInt64(v_0.AuxInt) != 0 { 24842 break 24843 } 24844 z := v_0.Args[0] 24845 if z.Op != OpARM64MSUB { 24846 break 24847 } 24848 y := z.Args[2] 24849 a := z.Args[0] 24850 x := z.Args[1] 24851 if !(z.Uses == 1) { 24852 break 24853 } 24854 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags) 24855 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 24856 v1.AddArg2(x, y) 24857 v0.AddArg2(a, v1) 24858 b.resetWithControl(BlockARM64GEnoov, v0) 24859 return true 24860 } 24861 // match: (GE (CMPWconst [0] z:(MADDW a x y)) yes no) 24862 // cond: z.Uses==1 24863 // result: (GEnoov (CMNW a (MULW <x.Type> x y)) yes no) 24864 for b.Controls[0].Op == OpARM64CMPWconst { 24865 v_0 := b.Controls[0] 24866 if auxIntToInt32(v_0.AuxInt) != 0 { 24867 break 24868 } 24869 z := v_0.Args[0] 24870 if z.Op != OpARM64MADDW { 24871 break 24872 } 24873 y := z.Args[2] 24874 a := z.Args[0] 24875 x := z.Args[1] 24876 if !(z.Uses == 1) { 24877 break 24878 } 24879 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 24880 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 24881 v1.AddArg2(x, y) 24882 v0.AddArg2(a, v1) 24883 b.resetWithControl(BlockARM64GEnoov, v0) 24884 return true 24885 } 24886 // match: (GE (CMPWconst [0] z:(MSUBW a x y)) yes no) 24887 // cond: z.Uses==1 24888 // result: (GEnoov (CMPW a (MULW <x.Type> x y)) yes no) 24889 for b.Controls[0].Op == OpARM64CMPWconst { 24890 v_0 := b.Controls[0] 24891 if auxIntToInt32(v_0.AuxInt) != 0 { 24892 break 24893 } 24894 z := v_0.Args[0] 24895 if z.Op != OpARM64MSUBW { 24896 break 24897 } 24898 y := z.Args[2] 24899 a := z.Args[0] 24900 x := z.Args[1] 24901 if !(z.Uses == 1) { 24902 break 24903 } 24904 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags) 24905 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 24906 v1.AddArg2(x, y) 24907 v0.AddArg2(a, v1) 24908 b.resetWithControl(BlockARM64GEnoov, v0) 24909 return true 24910 } 24911 // match: (GE (CMPWconst [0] x) yes no) 24912 // result: (TBZ [31] x yes no) 24913 for b.Controls[0].Op == OpARM64CMPWconst { 24914 v_0 := b.Controls[0] 24915 if auxIntToInt32(v_0.AuxInt) != 0 { 24916 break 24917 } 24918 x := v_0.Args[0] 24919 b.resetWithControl(BlockARM64TBZ, x) 24920 b.AuxInt = int64ToAuxInt(31) 24921 return true 24922 } 24923 // match: (GE (CMPconst [0] x) yes no) 24924 // result: (TBZ [63] x yes no) 24925 for b.Controls[0].Op == OpARM64CMPconst { 24926 v_0 := b.Controls[0] 24927 if auxIntToInt64(v_0.AuxInt) != 0 { 24928 break 24929 } 24930 x := v_0.Args[0] 24931 b.resetWithControl(BlockARM64TBZ, x) 24932 b.AuxInt = int64ToAuxInt(63) 24933 return true 24934 } 24935 // match: (GE (FlagConstant [fc]) yes no) 24936 // cond: fc.ge() 24937 // result: (First yes no) 24938 for b.Controls[0].Op == OpARM64FlagConstant { 24939 v_0 := b.Controls[0] 24940 fc := auxIntToFlagConstant(v_0.AuxInt) 24941 if !(fc.ge()) { 24942 break 24943 } 24944 b.Reset(BlockFirst) 24945 return true 24946 } 24947 // match: (GE (FlagConstant [fc]) yes no) 24948 // cond: !fc.ge() 24949 // result: (First no yes) 24950 for b.Controls[0].Op == OpARM64FlagConstant { 24951 v_0 := b.Controls[0] 24952 fc := auxIntToFlagConstant(v_0.AuxInt) 24953 if !(!fc.ge()) { 24954 break 24955 } 24956 b.Reset(BlockFirst) 24957 b.swapSuccessors() 24958 return true 24959 } 24960 // match: (GE (InvertFlags cmp) yes no) 24961 // result: (LE cmp yes no) 24962 for b.Controls[0].Op == OpARM64InvertFlags { 24963 v_0 := b.Controls[0] 24964 cmp := v_0.Args[0] 24965 b.resetWithControl(BlockARM64LE, cmp) 24966 return true 24967 } 24968 case BlockARM64GEnoov: 24969 // match: (GEnoov (FlagConstant [fc]) yes no) 24970 // cond: fc.geNoov() 24971 // result: (First yes no) 24972 for b.Controls[0].Op == OpARM64FlagConstant { 24973 v_0 := b.Controls[0] 24974 fc := auxIntToFlagConstant(v_0.AuxInt) 24975 if !(fc.geNoov()) { 24976 break 24977 } 24978 b.Reset(BlockFirst) 24979 return true 24980 } 24981 // match: (GEnoov (FlagConstant [fc]) yes no) 24982 // cond: !fc.geNoov() 24983 // result: (First no yes) 24984 for b.Controls[0].Op == OpARM64FlagConstant { 24985 v_0 := b.Controls[0] 24986 fc := auxIntToFlagConstant(v_0.AuxInt) 24987 if !(!fc.geNoov()) { 24988 break 24989 } 24990 b.Reset(BlockFirst) 24991 b.swapSuccessors() 24992 return true 24993 } 24994 // match: (GEnoov (InvertFlags cmp) yes no) 24995 // result: (LEnoov cmp yes no) 24996 for b.Controls[0].Op == OpARM64InvertFlags { 24997 v_0 := b.Controls[0] 24998 cmp := v_0.Args[0] 24999 b.resetWithControl(BlockARM64LEnoov, cmp) 25000 return true 25001 } 25002 case BlockARM64GT: 25003 // match: (GT (CMPconst [0] z:(AND x y)) yes no) 25004 // cond: z.Uses == 1 25005 // result: (GT (TST x y) yes no) 25006 for b.Controls[0].Op == OpARM64CMPconst { 25007 v_0 := b.Controls[0] 25008 if auxIntToInt64(v_0.AuxInt) != 0 { 25009 break 25010 } 25011 z := v_0.Args[0] 25012 if z.Op != OpARM64AND { 25013 break 25014 } 25015 _ = z.Args[1] 25016 z_0 := z.Args[0] 25017 z_1 := z.Args[1] 25018 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 25019 x := z_0 25020 y := z_1 25021 if !(z.Uses == 1) { 25022 continue 25023 } 25024 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags) 25025 v0.AddArg2(x, y) 25026 b.resetWithControl(BlockARM64GT, v0) 25027 return true 25028 } 25029 break 25030 } 25031 // match: (GT (CMPconst [0] x:(ANDconst [c] y)) yes no) 25032 // cond: x.Uses == 1 25033 // result: (GT (TSTconst [c] y) yes no) 25034 for b.Controls[0].Op == OpARM64CMPconst { 25035 v_0 := b.Controls[0] 25036 if auxIntToInt64(v_0.AuxInt) != 0 { 25037 break 25038 } 25039 x := v_0.Args[0] 25040 if x.Op != OpARM64ANDconst { 25041 break 25042 } 25043 c := auxIntToInt64(x.AuxInt) 25044 y := x.Args[0] 25045 if !(x.Uses == 1) { 25046 break 25047 } 25048 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags) 25049 v0.AuxInt = int64ToAuxInt(c) 25050 v0.AddArg(y) 25051 b.resetWithControl(BlockARM64GT, v0) 25052 return true 25053 } 25054 // match: (GT (CMPWconst [0] z:(AND x y)) yes no) 25055 // cond: z.Uses == 1 25056 // result: (GT (TSTW x y) yes no) 25057 for b.Controls[0].Op == OpARM64CMPWconst { 25058 v_0 := b.Controls[0] 25059 if auxIntToInt32(v_0.AuxInt) != 0 { 25060 break 25061 } 25062 z := v_0.Args[0] 25063 if z.Op != OpARM64AND { 25064 break 25065 } 25066 _ = z.Args[1] 25067 z_0 := z.Args[0] 25068 z_1 := z.Args[1] 25069 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 25070 x := z_0 25071 y := z_1 25072 if !(z.Uses == 1) { 25073 continue 25074 } 25075 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags) 25076 v0.AddArg2(x, y) 25077 b.resetWithControl(BlockARM64GT, v0) 25078 return true 25079 } 25080 break 25081 } 25082 // match: (GT (CMPWconst [0] x:(ANDconst [c] y)) yes no) 25083 // cond: x.Uses == 1 25084 // result: (GT (TSTWconst [int32(c)] y) yes no) 25085 for b.Controls[0].Op == OpARM64CMPWconst { 25086 v_0 := b.Controls[0] 25087 if auxIntToInt32(v_0.AuxInt) != 0 { 25088 break 25089 } 25090 x := v_0.Args[0] 25091 if x.Op != OpARM64ANDconst { 25092 break 25093 } 25094 c := auxIntToInt64(x.AuxInt) 25095 y := x.Args[0] 25096 if !(x.Uses == 1) { 25097 break 25098 } 25099 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags) 25100 v0.AuxInt = int32ToAuxInt(int32(c)) 25101 v0.AddArg(y) 25102 b.resetWithControl(BlockARM64GT, v0) 25103 return true 25104 } 25105 // match: (GT (CMPconst [0] x:(ADDconst [c] y)) yes no) 25106 // cond: x.Uses == 1 25107 // result: (GTnoov (CMNconst [c] y) yes no) 25108 for b.Controls[0].Op == OpARM64CMPconst { 25109 v_0 := b.Controls[0] 25110 if auxIntToInt64(v_0.AuxInt) != 0 { 25111 break 25112 } 25113 x := v_0.Args[0] 25114 if x.Op != OpARM64ADDconst { 25115 break 25116 } 25117 c := auxIntToInt64(x.AuxInt) 25118 y := x.Args[0] 25119 if !(x.Uses == 1) { 25120 break 25121 } 25122 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags) 25123 v0.AuxInt = int64ToAuxInt(c) 25124 v0.AddArg(y) 25125 b.resetWithControl(BlockARM64GTnoov, v0) 25126 return true 25127 } 25128 // match: (GT (CMPWconst [0] x:(ADDconst [c] y)) yes no) 25129 // cond: x.Uses == 1 25130 // result: (GTnoov (CMNWconst [int32(c)] y) yes no) 25131 for b.Controls[0].Op == OpARM64CMPWconst { 25132 v_0 := b.Controls[0] 25133 if auxIntToInt32(v_0.AuxInt) != 0 { 25134 break 25135 } 25136 x := v_0.Args[0] 25137 if x.Op != OpARM64ADDconst { 25138 break 25139 } 25140 c := auxIntToInt64(x.AuxInt) 25141 y := x.Args[0] 25142 if !(x.Uses == 1) { 25143 break 25144 } 25145 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags) 25146 v0.AuxInt = int32ToAuxInt(int32(c)) 25147 v0.AddArg(y) 25148 b.resetWithControl(BlockARM64GTnoov, v0) 25149 return true 25150 } 25151 // match: (GT (CMPconst [0] z:(ADD x y)) yes no) 25152 // cond: z.Uses == 1 25153 // result: (GTnoov (CMN x y) yes no) 25154 for b.Controls[0].Op == OpARM64CMPconst { 25155 v_0 := b.Controls[0] 25156 if auxIntToInt64(v_0.AuxInt) != 0 { 25157 break 25158 } 25159 z := v_0.Args[0] 25160 if z.Op != OpARM64ADD { 25161 break 25162 } 25163 _ = z.Args[1] 25164 z_0 := z.Args[0] 25165 z_1 := z.Args[1] 25166 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 25167 x := z_0 25168 y := z_1 25169 if !(z.Uses == 1) { 25170 continue 25171 } 25172 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 25173 v0.AddArg2(x, y) 25174 b.resetWithControl(BlockARM64GTnoov, v0) 25175 return true 25176 } 25177 break 25178 } 25179 // match: (GT (CMPWconst [0] z:(ADD x y)) yes no) 25180 // cond: z.Uses == 1 25181 // result: (GTnoov (CMNW x y) yes no) 25182 for b.Controls[0].Op == OpARM64CMPWconst { 25183 v_0 := b.Controls[0] 25184 if auxIntToInt32(v_0.AuxInt) != 0 { 25185 break 25186 } 25187 z := v_0.Args[0] 25188 if z.Op != OpARM64ADD { 25189 break 25190 } 25191 _ = z.Args[1] 25192 z_0 := z.Args[0] 25193 z_1 := z.Args[1] 25194 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 25195 x := z_0 25196 y := z_1 25197 if !(z.Uses == 1) { 25198 continue 25199 } 25200 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 25201 v0.AddArg2(x, y) 25202 b.resetWithControl(BlockARM64GTnoov, v0) 25203 return true 25204 } 25205 break 25206 } 25207 // match: (GT (CMPconst [0] z:(MADD a x y)) yes no) 25208 // cond: z.Uses==1 25209 // result: (GTnoov (CMN a (MUL <x.Type> x y)) yes no) 25210 for b.Controls[0].Op == OpARM64CMPconst { 25211 v_0 := b.Controls[0] 25212 if auxIntToInt64(v_0.AuxInt) != 0 { 25213 break 25214 } 25215 z := v_0.Args[0] 25216 if z.Op != OpARM64MADD { 25217 break 25218 } 25219 y := z.Args[2] 25220 a := z.Args[0] 25221 x := z.Args[1] 25222 if !(z.Uses == 1) { 25223 break 25224 } 25225 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 25226 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 25227 v1.AddArg2(x, y) 25228 v0.AddArg2(a, v1) 25229 b.resetWithControl(BlockARM64GTnoov, v0) 25230 return true 25231 } 25232 // match: (GT (CMPconst [0] z:(MSUB a x y)) yes no) 25233 // cond: z.Uses==1 25234 // result: (GTnoov (CMP a (MUL <x.Type> x y)) yes no) 25235 for b.Controls[0].Op == OpARM64CMPconst { 25236 v_0 := b.Controls[0] 25237 if auxIntToInt64(v_0.AuxInt) != 0 { 25238 break 25239 } 25240 z := v_0.Args[0] 25241 if z.Op != OpARM64MSUB { 25242 break 25243 } 25244 y := z.Args[2] 25245 a := z.Args[0] 25246 x := z.Args[1] 25247 if !(z.Uses == 1) { 25248 break 25249 } 25250 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags) 25251 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 25252 v1.AddArg2(x, y) 25253 v0.AddArg2(a, v1) 25254 b.resetWithControl(BlockARM64GTnoov, v0) 25255 return true 25256 } 25257 // match: (GT (CMPWconst [0] z:(MADDW a x y)) yes no) 25258 // cond: z.Uses==1 25259 // result: (GTnoov (CMNW a (MULW <x.Type> x y)) yes no) 25260 for b.Controls[0].Op == OpARM64CMPWconst { 25261 v_0 := b.Controls[0] 25262 if auxIntToInt32(v_0.AuxInt) != 0 { 25263 break 25264 } 25265 z := v_0.Args[0] 25266 if z.Op != OpARM64MADDW { 25267 break 25268 } 25269 y := z.Args[2] 25270 a := z.Args[0] 25271 x := z.Args[1] 25272 if !(z.Uses == 1) { 25273 break 25274 } 25275 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 25276 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 25277 v1.AddArg2(x, y) 25278 v0.AddArg2(a, v1) 25279 b.resetWithControl(BlockARM64GTnoov, v0) 25280 return true 25281 } 25282 // match: (GT (CMPWconst [0] z:(MSUBW a x y)) yes no) 25283 // cond: z.Uses==1 25284 // result: (GTnoov (CMPW a (MULW <x.Type> x y)) yes no) 25285 for b.Controls[0].Op == OpARM64CMPWconst { 25286 v_0 := b.Controls[0] 25287 if auxIntToInt32(v_0.AuxInt) != 0 { 25288 break 25289 } 25290 z := v_0.Args[0] 25291 if z.Op != OpARM64MSUBW { 25292 break 25293 } 25294 y := z.Args[2] 25295 a := z.Args[0] 25296 x := z.Args[1] 25297 if !(z.Uses == 1) { 25298 break 25299 } 25300 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags) 25301 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 25302 v1.AddArg2(x, y) 25303 v0.AddArg2(a, v1) 25304 b.resetWithControl(BlockARM64GTnoov, v0) 25305 return true 25306 } 25307 // match: (GT (FlagConstant [fc]) yes no) 25308 // cond: fc.gt() 25309 // result: (First yes no) 25310 for b.Controls[0].Op == OpARM64FlagConstant { 25311 v_0 := b.Controls[0] 25312 fc := auxIntToFlagConstant(v_0.AuxInt) 25313 if !(fc.gt()) { 25314 break 25315 } 25316 b.Reset(BlockFirst) 25317 return true 25318 } 25319 // match: (GT (FlagConstant [fc]) yes no) 25320 // cond: !fc.gt() 25321 // result: (First no yes) 25322 for b.Controls[0].Op == OpARM64FlagConstant { 25323 v_0 := b.Controls[0] 25324 fc := auxIntToFlagConstant(v_0.AuxInt) 25325 if !(!fc.gt()) { 25326 break 25327 } 25328 b.Reset(BlockFirst) 25329 b.swapSuccessors() 25330 return true 25331 } 25332 // match: (GT (InvertFlags cmp) yes no) 25333 // result: (LT cmp yes no) 25334 for b.Controls[0].Op == OpARM64InvertFlags { 25335 v_0 := b.Controls[0] 25336 cmp := v_0.Args[0] 25337 b.resetWithControl(BlockARM64LT, cmp) 25338 return true 25339 } 25340 case BlockARM64GTnoov: 25341 // match: (GTnoov (FlagConstant [fc]) yes no) 25342 // cond: fc.gtNoov() 25343 // result: (First yes no) 25344 for b.Controls[0].Op == OpARM64FlagConstant { 25345 v_0 := b.Controls[0] 25346 fc := auxIntToFlagConstant(v_0.AuxInt) 25347 if !(fc.gtNoov()) { 25348 break 25349 } 25350 b.Reset(BlockFirst) 25351 return true 25352 } 25353 // match: (GTnoov (FlagConstant [fc]) yes no) 25354 // cond: !fc.gtNoov() 25355 // result: (First no yes) 25356 for b.Controls[0].Op == OpARM64FlagConstant { 25357 v_0 := b.Controls[0] 25358 fc := auxIntToFlagConstant(v_0.AuxInt) 25359 if !(!fc.gtNoov()) { 25360 break 25361 } 25362 b.Reset(BlockFirst) 25363 b.swapSuccessors() 25364 return true 25365 } 25366 // match: (GTnoov (InvertFlags cmp) yes no) 25367 // result: (LTnoov cmp yes no) 25368 for b.Controls[0].Op == OpARM64InvertFlags { 25369 v_0 := b.Controls[0] 25370 cmp := v_0.Args[0] 25371 b.resetWithControl(BlockARM64LTnoov, cmp) 25372 return true 25373 } 25374 case BlockIf: 25375 // match: (If (Equal cc) yes no) 25376 // result: (EQ cc yes no) 25377 for b.Controls[0].Op == OpARM64Equal { 25378 v_0 := b.Controls[0] 25379 cc := v_0.Args[0] 25380 b.resetWithControl(BlockARM64EQ, cc) 25381 return true 25382 } 25383 // match: (If (NotEqual cc) yes no) 25384 // result: (NE cc yes no) 25385 for b.Controls[0].Op == OpARM64NotEqual { 25386 v_0 := b.Controls[0] 25387 cc := v_0.Args[0] 25388 b.resetWithControl(BlockARM64NE, cc) 25389 return true 25390 } 25391 // match: (If (LessThan cc) yes no) 25392 // result: (LT cc yes no) 25393 for b.Controls[0].Op == OpARM64LessThan { 25394 v_0 := b.Controls[0] 25395 cc := v_0.Args[0] 25396 b.resetWithControl(BlockARM64LT, cc) 25397 return true 25398 } 25399 // match: (If (LessThanU cc) yes no) 25400 // result: (ULT cc yes no) 25401 for b.Controls[0].Op == OpARM64LessThanU { 25402 v_0 := b.Controls[0] 25403 cc := v_0.Args[0] 25404 b.resetWithControl(BlockARM64ULT, cc) 25405 return true 25406 } 25407 // match: (If (LessEqual cc) yes no) 25408 // result: (LE cc yes no) 25409 for b.Controls[0].Op == OpARM64LessEqual { 25410 v_0 := b.Controls[0] 25411 cc := v_0.Args[0] 25412 b.resetWithControl(BlockARM64LE, cc) 25413 return true 25414 } 25415 // match: (If (LessEqualU cc) yes no) 25416 // result: (ULE cc yes no) 25417 for b.Controls[0].Op == OpARM64LessEqualU { 25418 v_0 := b.Controls[0] 25419 cc := v_0.Args[0] 25420 b.resetWithControl(BlockARM64ULE, cc) 25421 return true 25422 } 25423 // match: (If (GreaterThan cc) yes no) 25424 // result: (GT cc yes no) 25425 for b.Controls[0].Op == OpARM64GreaterThan { 25426 v_0 := b.Controls[0] 25427 cc := v_0.Args[0] 25428 b.resetWithControl(BlockARM64GT, cc) 25429 return true 25430 } 25431 // match: (If (GreaterThanU cc) yes no) 25432 // result: (UGT cc yes no) 25433 for b.Controls[0].Op == OpARM64GreaterThanU { 25434 v_0 := b.Controls[0] 25435 cc := v_0.Args[0] 25436 b.resetWithControl(BlockARM64UGT, cc) 25437 return true 25438 } 25439 // match: (If (GreaterEqual cc) yes no) 25440 // result: (GE cc yes no) 25441 for b.Controls[0].Op == OpARM64GreaterEqual { 25442 v_0 := b.Controls[0] 25443 cc := v_0.Args[0] 25444 b.resetWithControl(BlockARM64GE, cc) 25445 return true 25446 } 25447 // match: (If (GreaterEqualU cc) yes no) 25448 // result: (UGE cc yes no) 25449 for b.Controls[0].Op == OpARM64GreaterEqualU { 25450 v_0 := b.Controls[0] 25451 cc := v_0.Args[0] 25452 b.resetWithControl(BlockARM64UGE, cc) 25453 return true 25454 } 25455 // match: (If (LessThanF cc) yes no) 25456 // result: (FLT cc yes no) 25457 for b.Controls[0].Op == OpARM64LessThanF { 25458 v_0 := b.Controls[0] 25459 cc := v_0.Args[0] 25460 b.resetWithControl(BlockARM64FLT, cc) 25461 return true 25462 } 25463 // match: (If (LessEqualF cc) yes no) 25464 // result: (FLE cc yes no) 25465 for b.Controls[0].Op == OpARM64LessEqualF { 25466 v_0 := b.Controls[0] 25467 cc := v_0.Args[0] 25468 b.resetWithControl(BlockARM64FLE, cc) 25469 return true 25470 } 25471 // match: (If (GreaterThanF cc) yes no) 25472 // result: (FGT cc yes no) 25473 for b.Controls[0].Op == OpARM64GreaterThanF { 25474 v_0 := b.Controls[0] 25475 cc := v_0.Args[0] 25476 b.resetWithControl(BlockARM64FGT, cc) 25477 return true 25478 } 25479 // match: (If (GreaterEqualF cc) yes no) 25480 // result: (FGE cc yes no) 25481 for b.Controls[0].Op == OpARM64GreaterEqualF { 25482 v_0 := b.Controls[0] 25483 cc := v_0.Args[0] 25484 b.resetWithControl(BlockARM64FGE, cc) 25485 return true 25486 } 25487 // match: (If cond yes no) 25488 // result: (TBNZ [0] cond yes no) 25489 for { 25490 cond := b.Controls[0] 25491 b.resetWithControl(BlockARM64TBNZ, cond) 25492 b.AuxInt = int64ToAuxInt(0) 25493 return true 25494 } 25495 case BlockJumpTable: 25496 // match: (JumpTable idx) 25497 // result: (JUMPTABLE {makeJumpTableSym(b)} idx (MOVDaddr <typ.Uintptr> {makeJumpTableSym(b)} (SB))) 25498 for { 25499 idx := b.Controls[0] 25500 v0 := b.NewValue0(b.Pos, OpARM64MOVDaddr, typ.Uintptr) 25501 v0.Aux = symToAux(makeJumpTableSym(b)) 25502 v1 := b.NewValue0(b.Pos, OpSB, typ.Uintptr) 25503 v0.AddArg(v1) 25504 b.resetWithControl2(BlockARM64JUMPTABLE, idx, v0) 25505 b.Aux = symToAux(makeJumpTableSym(b)) 25506 return true 25507 } 25508 case BlockARM64LE: 25509 // match: (LE (CMPconst [0] z:(AND x y)) yes no) 25510 // cond: z.Uses == 1 25511 // result: (LE (TST x y) yes no) 25512 for b.Controls[0].Op == OpARM64CMPconst { 25513 v_0 := b.Controls[0] 25514 if auxIntToInt64(v_0.AuxInt) != 0 { 25515 break 25516 } 25517 z := v_0.Args[0] 25518 if z.Op != OpARM64AND { 25519 break 25520 } 25521 _ = z.Args[1] 25522 z_0 := z.Args[0] 25523 z_1 := z.Args[1] 25524 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 25525 x := z_0 25526 y := z_1 25527 if !(z.Uses == 1) { 25528 continue 25529 } 25530 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags) 25531 v0.AddArg2(x, y) 25532 b.resetWithControl(BlockARM64LE, v0) 25533 return true 25534 } 25535 break 25536 } 25537 // match: (LE (CMPconst [0] x:(ANDconst [c] y)) yes no) 25538 // cond: x.Uses == 1 25539 // result: (LE (TSTconst [c] y) yes no) 25540 for b.Controls[0].Op == OpARM64CMPconst { 25541 v_0 := b.Controls[0] 25542 if auxIntToInt64(v_0.AuxInt) != 0 { 25543 break 25544 } 25545 x := v_0.Args[0] 25546 if x.Op != OpARM64ANDconst { 25547 break 25548 } 25549 c := auxIntToInt64(x.AuxInt) 25550 y := x.Args[0] 25551 if !(x.Uses == 1) { 25552 break 25553 } 25554 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags) 25555 v0.AuxInt = int64ToAuxInt(c) 25556 v0.AddArg(y) 25557 b.resetWithControl(BlockARM64LE, v0) 25558 return true 25559 } 25560 // match: (LE (CMPWconst [0] z:(AND x y)) yes no) 25561 // cond: z.Uses == 1 25562 // result: (LE (TSTW x y) yes no) 25563 for b.Controls[0].Op == OpARM64CMPWconst { 25564 v_0 := b.Controls[0] 25565 if auxIntToInt32(v_0.AuxInt) != 0 { 25566 break 25567 } 25568 z := v_0.Args[0] 25569 if z.Op != OpARM64AND { 25570 break 25571 } 25572 _ = z.Args[1] 25573 z_0 := z.Args[0] 25574 z_1 := z.Args[1] 25575 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 25576 x := z_0 25577 y := z_1 25578 if !(z.Uses == 1) { 25579 continue 25580 } 25581 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags) 25582 v0.AddArg2(x, y) 25583 b.resetWithControl(BlockARM64LE, v0) 25584 return true 25585 } 25586 break 25587 } 25588 // match: (LE (CMPWconst [0] x:(ANDconst [c] y)) yes no) 25589 // cond: x.Uses == 1 25590 // result: (LE (TSTWconst [int32(c)] y) yes no) 25591 for b.Controls[0].Op == OpARM64CMPWconst { 25592 v_0 := b.Controls[0] 25593 if auxIntToInt32(v_0.AuxInt) != 0 { 25594 break 25595 } 25596 x := v_0.Args[0] 25597 if x.Op != OpARM64ANDconst { 25598 break 25599 } 25600 c := auxIntToInt64(x.AuxInt) 25601 y := x.Args[0] 25602 if !(x.Uses == 1) { 25603 break 25604 } 25605 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags) 25606 v0.AuxInt = int32ToAuxInt(int32(c)) 25607 v0.AddArg(y) 25608 b.resetWithControl(BlockARM64LE, v0) 25609 return true 25610 } 25611 // match: (LE (CMPconst [0] x:(ADDconst [c] y)) yes no) 25612 // cond: x.Uses == 1 25613 // result: (LEnoov (CMNconst [c] y) yes no) 25614 for b.Controls[0].Op == OpARM64CMPconst { 25615 v_0 := b.Controls[0] 25616 if auxIntToInt64(v_0.AuxInt) != 0 { 25617 break 25618 } 25619 x := v_0.Args[0] 25620 if x.Op != OpARM64ADDconst { 25621 break 25622 } 25623 c := auxIntToInt64(x.AuxInt) 25624 y := x.Args[0] 25625 if !(x.Uses == 1) { 25626 break 25627 } 25628 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags) 25629 v0.AuxInt = int64ToAuxInt(c) 25630 v0.AddArg(y) 25631 b.resetWithControl(BlockARM64LEnoov, v0) 25632 return true 25633 } 25634 // match: (LE (CMPWconst [0] x:(ADDconst [c] y)) yes no) 25635 // cond: x.Uses == 1 25636 // result: (LEnoov (CMNWconst [int32(c)] y) yes no) 25637 for b.Controls[0].Op == OpARM64CMPWconst { 25638 v_0 := b.Controls[0] 25639 if auxIntToInt32(v_0.AuxInt) != 0 { 25640 break 25641 } 25642 x := v_0.Args[0] 25643 if x.Op != OpARM64ADDconst { 25644 break 25645 } 25646 c := auxIntToInt64(x.AuxInt) 25647 y := x.Args[0] 25648 if !(x.Uses == 1) { 25649 break 25650 } 25651 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags) 25652 v0.AuxInt = int32ToAuxInt(int32(c)) 25653 v0.AddArg(y) 25654 b.resetWithControl(BlockARM64LEnoov, v0) 25655 return true 25656 } 25657 // match: (LE (CMPconst [0] z:(ADD x y)) yes no) 25658 // cond: z.Uses == 1 25659 // result: (LEnoov (CMN x y) yes no) 25660 for b.Controls[0].Op == OpARM64CMPconst { 25661 v_0 := b.Controls[0] 25662 if auxIntToInt64(v_0.AuxInt) != 0 { 25663 break 25664 } 25665 z := v_0.Args[0] 25666 if z.Op != OpARM64ADD { 25667 break 25668 } 25669 _ = z.Args[1] 25670 z_0 := z.Args[0] 25671 z_1 := z.Args[1] 25672 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 25673 x := z_0 25674 y := z_1 25675 if !(z.Uses == 1) { 25676 continue 25677 } 25678 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 25679 v0.AddArg2(x, y) 25680 b.resetWithControl(BlockARM64LEnoov, v0) 25681 return true 25682 } 25683 break 25684 } 25685 // match: (LE (CMPWconst [0] z:(ADD x y)) yes no) 25686 // cond: z.Uses == 1 25687 // result: (LEnoov (CMNW x y) yes no) 25688 for b.Controls[0].Op == OpARM64CMPWconst { 25689 v_0 := b.Controls[0] 25690 if auxIntToInt32(v_0.AuxInt) != 0 { 25691 break 25692 } 25693 z := v_0.Args[0] 25694 if z.Op != OpARM64ADD { 25695 break 25696 } 25697 _ = z.Args[1] 25698 z_0 := z.Args[0] 25699 z_1 := z.Args[1] 25700 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 25701 x := z_0 25702 y := z_1 25703 if !(z.Uses == 1) { 25704 continue 25705 } 25706 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 25707 v0.AddArg2(x, y) 25708 b.resetWithControl(BlockARM64LEnoov, v0) 25709 return true 25710 } 25711 break 25712 } 25713 // match: (LE (CMPconst [0] z:(MADD a x y)) yes no) 25714 // cond: z.Uses==1 25715 // result: (LEnoov (CMN a (MUL <x.Type> x y)) yes no) 25716 for b.Controls[0].Op == OpARM64CMPconst { 25717 v_0 := b.Controls[0] 25718 if auxIntToInt64(v_0.AuxInt) != 0 { 25719 break 25720 } 25721 z := v_0.Args[0] 25722 if z.Op != OpARM64MADD { 25723 break 25724 } 25725 y := z.Args[2] 25726 a := z.Args[0] 25727 x := z.Args[1] 25728 if !(z.Uses == 1) { 25729 break 25730 } 25731 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 25732 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 25733 v1.AddArg2(x, y) 25734 v0.AddArg2(a, v1) 25735 b.resetWithControl(BlockARM64LEnoov, v0) 25736 return true 25737 } 25738 // match: (LE (CMPconst [0] z:(MSUB a x y)) yes no) 25739 // cond: z.Uses==1 25740 // result: (LEnoov (CMP a (MUL <x.Type> x y)) yes no) 25741 for b.Controls[0].Op == OpARM64CMPconst { 25742 v_0 := b.Controls[0] 25743 if auxIntToInt64(v_0.AuxInt) != 0 { 25744 break 25745 } 25746 z := v_0.Args[0] 25747 if z.Op != OpARM64MSUB { 25748 break 25749 } 25750 y := z.Args[2] 25751 a := z.Args[0] 25752 x := z.Args[1] 25753 if !(z.Uses == 1) { 25754 break 25755 } 25756 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags) 25757 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 25758 v1.AddArg2(x, y) 25759 v0.AddArg2(a, v1) 25760 b.resetWithControl(BlockARM64LEnoov, v0) 25761 return true 25762 } 25763 // match: (LE (CMPWconst [0] z:(MADDW a x y)) yes no) 25764 // cond: z.Uses==1 25765 // result: (LEnoov (CMNW a (MULW <x.Type> x y)) yes no) 25766 for b.Controls[0].Op == OpARM64CMPWconst { 25767 v_0 := b.Controls[0] 25768 if auxIntToInt32(v_0.AuxInt) != 0 { 25769 break 25770 } 25771 z := v_0.Args[0] 25772 if z.Op != OpARM64MADDW { 25773 break 25774 } 25775 y := z.Args[2] 25776 a := z.Args[0] 25777 x := z.Args[1] 25778 if !(z.Uses == 1) { 25779 break 25780 } 25781 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 25782 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 25783 v1.AddArg2(x, y) 25784 v0.AddArg2(a, v1) 25785 b.resetWithControl(BlockARM64LEnoov, v0) 25786 return true 25787 } 25788 // match: (LE (CMPWconst [0] z:(MSUBW a x y)) yes no) 25789 // cond: z.Uses==1 25790 // result: (LEnoov (CMPW a (MULW <x.Type> x y)) yes no) 25791 for b.Controls[0].Op == OpARM64CMPWconst { 25792 v_0 := b.Controls[0] 25793 if auxIntToInt32(v_0.AuxInt) != 0 { 25794 break 25795 } 25796 z := v_0.Args[0] 25797 if z.Op != OpARM64MSUBW { 25798 break 25799 } 25800 y := z.Args[2] 25801 a := z.Args[0] 25802 x := z.Args[1] 25803 if !(z.Uses == 1) { 25804 break 25805 } 25806 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags) 25807 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 25808 v1.AddArg2(x, y) 25809 v0.AddArg2(a, v1) 25810 b.resetWithControl(BlockARM64LEnoov, v0) 25811 return true 25812 } 25813 // match: (LE (FlagConstant [fc]) yes no) 25814 // cond: fc.le() 25815 // result: (First yes no) 25816 for b.Controls[0].Op == OpARM64FlagConstant { 25817 v_0 := b.Controls[0] 25818 fc := auxIntToFlagConstant(v_0.AuxInt) 25819 if !(fc.le()) { 25820 break 25821 } 25822 b.Reset(BlockFirst) 25823 return true 25824 } 25825 // match: (LE (FlagConstant [fc]) yes no) 25826 // cond: !fc.le() 25827 // result: (First no yes) 25828 for b.Controls[0].Op == OpARM64FlagConstant { 25829 v_0 := b.Controls[0] 25830 fc := auxIntToFlagConstant(v_0.AuxInt) 25831 if !(!fc.le()) { 25832 break 25833 } 25834 b.Reset(BlockFirst) 25835 b.swapSuccessors() 25836 return true 25837 } 25838 // match: (LE (InvertFlags cmp) yes no) 25839 // result: (GE cmp yes no) 25840 for b.Controls[0].Op == OpARM64InvertFlags { 25841 v_0 := b.Controls[0] 25842 cmp := v_0.Args[0] 25843 b.resetWithControl(BlockARM64GE, cmp) 25844 return true 25845 } 25846 case BlockARM64LEnoov: 25847 // match: (LEnoov (FlagConstant [fc]) yes no) 25848 // cond: fc.leNoov() 25849 // result: (First yes no) 25850 for b.Controls[0].Op == OpARM64FlagConstant { 25851 v_0 := b.Controls[0] 25852 fc := auxIntToFlagConstant(v_0.AuxInt) 25853 if !(fc.leNoov()) { 25854 break 25855 } 25856 b.Reset(BlockFirst) 25857 return true 25858 } 25859 // match: (LEnoov (FlagConstant [fc]) yes no) 25860 // cond: !fc.leNoov() 25861 // result: (First no yes) 25862 for b.Controls[0].Op == OpARM64FlagConstant { 25863 v_0 := b.Controls[0] 25864 fc := auxIntToFlagConstant(v_0.AuxInt) 25865 if !(!fc.leNoov()) { 25866 break 25867 } 25868 b.Reset(BlockFirst) 25869 b.swapSuccessors() 25870 return true 25871 } 25872 // match: (LEnoov (InvertFlags cmp) yes no) 25873 // result: (GEnoov cmp yes no) 25874 for b.Controls[0].Op == OpARM64InvertFlags { 25875 v_0 := b.Controls[0] 25876 cmp := v_0.Args[0] 25877 b.resetWithControl(BlockARM64GEnoov, cmp) 25878 return true 25879 } 25880 case BlockARM64LT: 25881 // match: (LT (CMPconst [0] z:(AND x y)) yes no) 25882 // cond: z.Uses == 1 25883 // result: (LT (TST x y) yes no) 25884 for b.Controls[0].Op == OpARM64CMPconst { 25885 v_0 := b.Controls[0] 25886 if auxIntToInt64(v_0.AuxInt) != 0 { 25887 break 25888 } 25889 z := v_0.Args[0] 25890 if z.Op != OpARM64AND { 25891 break 25892 } 25893 _ = z.Args[1] 25894 z_0 := z.Args[0] 25895 z_1 := z.Args[1] 25896 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 25897 x := z_0 25898 y := z_1 25899 if !(z.Uses == 1) { 25900 continue 25901 } 25902 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags) 25903 v0.AddArg2(x, y) 25904 b.resetWithControl(BlockARM64LT, v0) 25905 return true 25906 } 25907 break 25908 } 25909 // match: (LT (CMPconst [0] x:(ANDconst [c] y)) yes no) 25910 // cond: x.Uses == 1 25911 // result: (LT (TSTconst [c] y) yes no) 25912 for b.Controls[0].Op == OpARM64CMPconst { 25913 v_0 := b.Controls[0] 25914 if auxIntToInt64(v_0.AuxInt) != 0 { 25915 break 25916 } 25917 x := v_0.Args[0] 25918 if x.Op != OpARM64ANDconst { 25919 break 25920 } 25921 c := auxIntToInt64(x.AuxInt) 25922 y := x.Args[0] 25923 if !(x.Uses == 1) { 25924 break 25925 } 25926 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags) 25927 v0.AuxInt = int64ToAuxInt(c) 25928 v0.AddArg(y) 25929 b.resetWithControl(BlockARM64LT, v0) 25930 return true 25931 } 25932 // match: (LT (CMPWconst [0] z:(AND x y)) yes no) 25933 // cond: z.Uses == 1 25934 // result: (LT (TSTW x y) yes no) 25935 for b.Controls[0].Op == OpARM64CMPWconst { 25936 v_0 := b.Controls[0] 25937 if auxIntToInt32(v_0.AuxInt) != 0 { 25938 break 25939 } 25940 z := v_0.Args[0] 25941 if z.Op != OpARM64AND { 25942 break 25943 } 25944 _ = z.Args[1] 25945 z_0 := z.Args[0] 25946 z_1 := z.Args[1] 25947 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 25948 x := z_0 25949 y := z_1 25950 if !(z.Uses == 1) { 25951 continue 25952 } 25953 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags) 25954 v0.AddArg2(x, y) 25955 b.resetWithControl(BlockARM64LT, v0) 25956 return true 25957 } 25958 break 25959 } 25960 // match: (LT (CMPWconst [0] x:(ANDconst [c] y)) yes no) 25961 // cond: x.Uses == 1 25962 // result: (LT (TSTWconst [int32(c)] y) yes no) 25963 for b.Controls[0].Op == OpARM64CMPWconst { 25964 v_0 := b.Controls[0] 25965 if auxIntToInt32(v_0.AuxInt) != 0 { 25966 break 25967 } 25968 x := v_0.Args[0] 25969 if x.Op != OpARM64ANDconst { 25970 break 25971 } 25972 c := auxIntToInt64(x.AuxInt) 25973 y := x.Args[0] 25974 if !(x.Uses == 1) { 25975 break 25976 } 25977 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags) 25978 v0.AuxInt = int32ToAuxInt(int32(c)) 25979 v0.AddArg(y) 25980 b.resetWithControl(BlockARM64LT, v0) 25981 return true 25982 } 25983 // match: (LT (CMPconst [0] x:(ADDconst [c] y)) yes no) 25984 // cond: x.Uses == 1 25985 // result: (LTnoov (CMNconst [c] y) yes no) 25986 for b.Controls[0].Op == OpARM64CMPconst { 25987 v_0 := b.Controls[0] 25988 if auxIntToInt64(v_0.AuxInt) != 0 { 25989 break 25990 } 25991 x := v_0.Args[0] 25992 if x.Op != OpARM64ADDconst { 25993 break 25994 } 25995 c := auxIntToInt64(x.AuxInt) 25996 y := x.Args[0] 25997 if !(x.Uses == 1) { 25998 break 25999 } 26000 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags) 26001 v0.AuxInt = int64ToAuxInt(c) 26002 v0.AddArg(y) 26003 b.resetWithControl(BlockARM64LTnoov, v0) 26004 return true 26005 } 26006 // match: (LT (CMPWconst [0] x:(ADDconst [c] y)) yes no) 26007 // cond: x.Uses == 1 26008 // result: (LTnoov (CMNWconst [int32(c)] y) yes no) 26009 for b.Controls[0].Op == OpARM64CMPWconst { 26010 v_0 := b.Controls[0] 26011 if auxIntToInt32(v_0.AuxInt) != 0 { 26012 break 26013 } 26014 x := v_0.Args[0] 26015 if x.Op != OpARM64ADDconst { 26016 break 26017 } 26018 c := auxIntToInt64(x.AuxInt) 26019 y := x.Args[0] 26020 if !(x.Uses == 1) { 26021 break 26022 } 26023 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags) 26024 v0.AuxInt = int32ToAuxInt(int32(c)) 26025 v0.AddArg(y) 26026 b.resetWithControl(BlockARM64LTnoov, v0) 26027 return true 26028 } 26029 // match: (LT (CMPconst [0] z:(ADD x y)) yes no) 26030 // cond: z.Uses == 1 26031 // result: (LTnoov (CMN x y) yes no) 26032 for b.Controls[0].Op == OpARM64CMPconst { 26033 v_0 := b.Controls[0] 26034 if auxIntToInt64(v_0.AuxInt) != 0 { 26035 break 26036 } 26037 z := v_0.Args[0] 26038 if z.Op != OpARM64ADD { 26039 break 26040 } 26041 _ = z.Args[1] 26042 z_0 := z.Args[0] 26043 z_1 := z.Args[1] 26044 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 26045 x := z_0 26046 y := z_1 26047 if !(z.Uses == 1) { 26048 continue 26049 } 26050 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 26051 v0.AddArg2(x, y) 26052 b.resetWithControl(BlockARM64LTnoov, v0) 26053 return true 26054 } 26055 break 26056 } 26057 // match: (LT (CMPWconst [0] z:(ADD x y)) yes no) 26058 // cond: z.Uses == 1 26059 // result: (LTnoov (CMNW x y) yes no) 26060 for b.Controls[0].Op == OpARM64CMPWconst { 26061 v_0 := b.Controls[0] 26062 if auxIntToInt32(v_0.AuxInt) != 0 { 26063 break 26064 } 26065 z := v_0.Args[0] 26066 if z.Op != OpARM64ADD { 26067 break 26068 } 26069 _ = z.Args[1] 26070 z_0 := z.Args[0] 26071 z_1 := z.Args[1] 26072 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 26073 x := z_0 26074 y := z_1 26075 if !(z.Uses == 1) { 26076 continue 26077 } 26078 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 26079 v0.AddArg2(x, y) 26080 b.resetWithControl(BlockARM64LTnoov, v0) 26081 return true 26082 } 26083 break 26084 } 26085 // match: (LT (CMPconst [0] z:(MADD a x y)) yes no) 26086 // cond: z.Uses==1 26087 // result: (LTnoov (CMN a (MUL <x.Type> x y)) yes no) 26088 for b.Controls[0].Op == OpARM64CMPconst { 26089 v_0 := b.Controls[0] 26090 if auxIntToInt64(v_0.AuxInt) != 0 { 26091 break 26092 } 26093 z := v_0.Args[0] 26094 if z.Op != OpARM64MADD { 26095 break 26096 } 26097 y := z.Args[2] 26098 a := z.Args[0] 26099 x := z.Args[1] 26100 if !(z.Uses == 1) { 26101 break 26102 } 26103 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 26104 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 26105 v1.AddArg2(x, y) 26106 v0.AddArg2(a, v1) 26107 b.resetWithControl(BlockARM64LTnoov, v0) 26108 return true 26109 } 26110 // match: (LT (CMPconst [0] z:(MSUB a x y)) yes no) 26111 // cond: z.Uses==1 26112 // result: (LTnoov (CMP a (MUL <x.Type> x y)) yes no) 26113 for b.Controls[0].Op == OpARM64CMPconst { 26114 v_0 := b.Controls[0] 26115 if auxIntToInt64(v_0.AuxInt) != 0 { 26116 break 26117 } 26118 z := v_0.Args[0] 26119 if z.Op != OpARM64MSUB { 26120 break 26121 } 26122 y := z.Args[2] 26123 a := z.Args[0] 26124 x := z.Args[1] 26125 if !(z.Uses == 1) { 26126 break 26127 } 26128 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags) 26129 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 26130 v1.AddArg2(x, y) 26131 v0.AddArg2(a, v1) 26132 b.resetWithControl(BlockARM64LTnoov, v0) 26133 return true 26134 } 26135 // match: (LT (CMPWconst [0] z:(MADDW a x y)) yes no) 26136 // cond: z.Uses==1 26137 // result: (LTnoov (CMNW a (MULW <x.Type> x y)) yes no) 26138 for b.Controls[0].Op == OpARM64CMPWconst { 26139 v_0 := b.Controls[0] 26140 if auxIntToInt32(v_0.AuxInt) != 0 { 26141 break 26142 } 26143 z := v_0.Args[0] 26144 if z.Op != OpARM64MADDW { 26145 break 26146 } 26147 y := z.Args[2] 26148 a := z.Args[0] 26149 x := z.Args[1] 26150 if !(z.Uses == 1) { 26151 break 26152 } 26153 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 26154 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 26155 v1.AddArg2(x, y) 26156 v0.AddArg2(a, v1) 26157 b.resetWithControl(BlockARM64LTnoov, v0) 26158 return true 26159 } 26160 // match: (LT (CMPWconst [0] z:(MSUBW a x y)) yes no) 26161 // cond: z.Uses==1 26162 // result: (LTnoov (CMPW a (MULW <x.Type> x y)) yes no) 26163 for b.Controls[0].Op == OpARM64CMPWconst { 26164 v_0 := b.Controls[0] 26165 if auxIntToInt32(v_0.AuxInt) != 0 { 26166 break 26167 } 26168 z := v_0.Args[0] 26169 if z.Op != OpARM64MSUBW { 26170 break 26171 } 26172 y := z.Args[2] 26173 a := z.Args[0] 26174 x := z.Args[1] 26175 if !(z.Uses == 1) { 26176 break 26177 } 26178 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags) 26179 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 26180 v1.AddArg2(x, y) 26181 v0.AddArg2(a, v1) 26182 b.resetWithControl(BlockARM64LTnoov, v0) 26183 return true 26184 } 26185 // match: (LT (CMPWconst [0] x) yes no) 26186 // result: (TBNZ [31] x yes no) 26187 for b.Controls[0].Op == OpARM64CMPWconst { 26188 v_0 := b.Controls[0] 26189 if auxIntToInt32(v_0.AuxInt) != 0 { 26190 break 26191 } 26192 x := v_0.Args[0] 26193 b.resetWithControl(BlockARM64TBNZ, x) 26194 b.AuxInt = int64ToAuxInt(31) 26195 return true 26196 } 26197 // match: (LT (CMPconst [0] x) yes no) 26198 // result: (TBNZ [63] x yes no) 26199 for b.Controls[0].Op == OpARM64CMPconst { 26200 v_0 := b.Controls[0] 26201 if auxIntToInt64(v_0.AuxInt) != 0 { 26202 break 26203 } 26204 x := v_0.Args[0] 26205 b.resetWithControl(BlockARM64TBNZ, x) 26206 b.AuxInt = int64ToAuxInt(63) 26207 return true 26208 } 26209 // match: (LT (FlagConstant [fc]) yes no) 26210 // cond: fc.lt() 26211 // result: (First yes no) 26212 for b.Controls[0].Op == OpARM64FlagConstant { 26213 v_0 := b.Controls[0] 26214 fc := auxIntToFlagConstant(v_0.AuxInt) 26215 if !(fc.lt()) { 26216 break 26217 } 26218 b.Reset(BlockFirst) 26219 return true 26220 } 26221 // match: (LT (FlagConstant [fc]) yes no) 26222 // cond: !fc.lt() 26223 // result: (First no yes) 26224 for b.Controls[0].Op == OpARM64FlagConstant { 26225 v_0 := b.Controls[0] 26226 fc := auxIntToFlagConstant(v_0.AuxInt) 26227 if !(!fc.lt()) { 26228 break 26229 } 26230 b.Reset(BlockFirst) 26231 b.swapSuccessors() 26232 return true 26233 } 26234 // match: (LT (InvertFlags cmp) yes no) 26235 // result: (GT cmp yes no) 26236 for b.Controls[0].Op == OpARM64InvertFlags { 26237 v_0 := b.Controls[0] 26238 cmp := v_0.Args[0] 26239 b.resetWithControl(BlockARM64GT, cmp) 26240 return true 26241 } 26242 case BlockARM64LTnoov: 26243 // match: (LTnoov (FlagConstant [fc]) yes no) 26244 // cond: fc.ltNoov() 26245 // result: (First yes no) 26246 for b.Controls[0].Op == OpARM64FlagConstant { 26247 v_0 := b.Controls[0] 26248 fc := auxIntToFlagConstant(v_0.AuxInt) 26249 if !(fc.ltNoov()) { 26250 break 26251 } 26252 b.Reset(BlockFirst) 26253 return true 26254 } 26255 // match: (LTnoov (FlagConstant [fc]) yes no) 26256 // cond: !fc.ltNoov() 26257 // result: (First no yes) 26258 for b.Controls[0].Op == OpARM64FlagConstant { 26259 v_0 := b.Controls[0] 26260 fc := auxIntToFlagConstant(v_0.AuxInt) 26261 if !(!fc.ltNoov()) { 26262 break 26263 } 26264 b.Reset(BlockFirst) 26265 b.swapSuccessors() 26266 return true 26267 } 26268 // match: (LTnoov (InvertFlags cmp) yes no) 26269 // result: (GTnoov cmp yes no) 26270 for b.Controls[0].Op == OpARM64InvertFlags { 26271 v_0 := b.Controls[0] 26272 cmp := v_0.Args[0] 26273 b.resetWithControl(BlockARM64GTnoov, cmp) 26274 return true 26275 } 26276 case BlockARM64NE: 26277 // match: (NE (CMPconst [0] z:(AND x y)) yes no) 26278 // cond: z.Uses == 1 26279 // result: (NE (TST x y) yes no) 26280 for b.Controls[0].Op == OpARM64CMPconst { 26281 v_0 := b.Controls[0] 26282 if auxIntToInt64(v_0.AuxInt) != 0 { 26283 break 26284 } 26285 z := v_0.Args[0] 26286 if z.Op != OpARM64AND { 26287 break 26288 } 26289 _ = z.Args[1] 26290 z_0 := z.Args[0] 26291 z_1 := z.Args[1] 26292 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 26293 x := z_0 26294 y := z_1 26295 if !(z.Uses == 1) { 26296 continue 26297 } 26298 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags) 26299 v0.AddArg2(x, y) 26300 b.resetWithControl(BlockARM64NE, v0) 26301 return true 26302 } 26303 break 26304 } 26305 // match: (NE (CMPconst [0] x:(ANDconst [c] y)) yes no) 26306 // cond: x.Uses == 1 26307 // result: (NE (TSTconst [c] y) yes no) 26308 for b.Controls[0].Op == OpARM64CMPconst { 26309 v_0 := b.Controls[0] 26310 if auxIntToInt64(v_0.AuxInt) != 0 { 26311 break 26312 } 26313 x := v_0.Args[0] 26314 if x.Op != OpARM64ANDconst { 26315 break 26316 } 26317 c := auxIntToInt64(x.AuxInt) 26318 y := x.Args[0] 26319 if !(x.Uses == 1) { 26320 break 26321 } 26322 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags) 26323 v0.AuxInt = int64ToAuxInt(c) 26324 v0.AddArg(y) 26325 b.resetWithControl(BlockARM64NE, v0) 26326 return true 26327 } 26328 // match: (NE (CMPWconst [0] z:(AND x y)) yes no) 26329 // cond: z.Uses == 1 26330 // result: (NE (TSTW x y) yes no) 26331 for b.Controls[0].Op == OpARM64CMPWconst { 26332 v_0 := b.Controls[0] 26333 if auxIntToInt32(v_0.AuxInt) != 0 { 26334 break 26335 } 26336 z := v_0.Args[0] 26337 if z.Op != OpARM64AND { 26338 break 26339 } 26340 _ = z.Args[1] 26341 z_0 := z.Args[0] 26342 z_1 := z.Args[1] 26343 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 26344 x := z_0 26345 y := z_1 26346 if !(z.Uses == 1) { 26347 continue 26348 } 26349 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags) 26350 v0.AddArg2(x, y) 26351 b.resetWithControl(BlockARM64NE, v0) 26352 return true 26353 } 26354 break 26355 } 26356 // match: (NE (CMPWconst [0] x:(ANDconst [c] y)) yes no) 26357 // cond: x.Uses == 1 26358 // result: (NE (TSTWconst [int32(c)] y) yes no) 26359 for b.Controls[0].Op == OpARM64CMPWconst { 26360 v_0 := b.Controls[0] 26361 if auxIntToInt32(v_0.AuxInt) != 0 { 26362 break 26363 } 26364 x := v_0.Args[0] 26365 if x.Op != OpARM64ANDconst { 26366 break 26367 } 26368 c := auxIntToInt64(x.AuxInt) 26369 y := x.Args[0] 26370 if !(x.Uses == 1) { 26371 break 26372 } 26373 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags) 26374 v0.AuxInt = int32ToAuxInt(int32(c)) 26375 v0.AddArg(y) 26376 b.resetWithControl(BlockARM64NE, v0) 26377 return true 26378 } 26379 // match: (NE (CMPconst [0] x:(ADDconst [c] y)) yes no) 26380 // cond: x.Uses == 1 26381 // result: (NE (CMNconst [c] y) yes no) 26382 for b.Controls[0].Op == OpARM64CMPconst { 26383 v_0 := b.Controls[0] 26384 if auxIntToInt64(v_0.AuxInt) != 0 { 26385 break 26386 } 26387 x := v_0.Args[0] 26388 if x.Op != OpARM64ADDconst { 26389 break 26390 } 26391 c := auxIntToInt64(x.AuxInt) 26392 y := x.Args[0] 26393 if !(x.Uses == 1) { 26394 break 26395 } 26396 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags) 26397 v0.AuxInt = int64ToAuxInt(c) 26398 v0.AddArg(y) 26399 b.resetWithControl(BlockARM64NE, v0) 26400 return true 26401 } 26402 // match: (NE (CMPWconst [0] x:(ADDconst [c] y)) yes no) 26403 // cond: x.Uses == 1 26404 // result: (NE (CMNWconst [int32(c)] y) yes no) 26405 for b.Controls[0].Op == OpARM64CMPWconst { 26406 v_0 := b.Controls[0] 26407 if auxIntToInt32(v_0.AuxInt) != 0 { 26408 break 26409 } 26410 x := v_0.Args[0] 26411 if x.Op != OpARM64ADDconst { 26412 break 26413 } 26414 c := auxIntToInt64(x.AuxInt) 26415 y := x.Args[0] 26416 if !(x.Uses == 1) { 26417 break 26418 } 26419 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags) 26420 v0.AuxInt = int32ToAuxInt(int32(c)) 26421 v0.AddArg(y) 26422 b.resetWithControl(BlockARM64NE, v0) 26423 return true 26424 } 26425 // match: (NE (CMPconst [0] z:(ADD x y)) yes no) 26426 // cond: z.Uses == 1 26427 // result: (NE (CMN x y) yes no) 26428 for b.Controls[0].Op == OpARM64CMPconst { 26429 v_0 := b.Controls[0] 26430 if auxIntToInt64(v_0.AuxInt) != 0 { 26431 break 26432 } 26433 z := v_0.Args[0] 26434 if z.Op != OpARM64ADD { 26435 break 26436 } 26437 _ = z.Args[1] 26438 z_0 := z.Args[0] 26439 z_1 := z.Args[1] 26440 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 26441 x := z_0 26442 y := z_1 26443 if !(z.Uses == 1) { 26444 continue 26445 } 26446 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 26447 v0.AddArg2(x, y) 26448 b.resetWithControl(BlockARM64NE, v0) 26449 return true 26450 } 26451 break 26452 } 26453 // match: (NE (CMPWconst [0] z:(ADD x y)) yes no) 26454 // cond: z.Uses == 1 26455 // result: (NE (CMNW x y) yes no) 26456 for b.Controls[0].Op == OpARM64CMPWconst { 26457 v_0 := b.Controls[0] 26458 if auxIntToInt32(v_0.AuxInt) != 0 { 26459 break 26460 } 26461 z := v_0.Args[0] 26462 if z.Op != OpARM64ADD { 26463 break 26464 } 26465 _ = z.Args[1] 26466 z_0 := z.Args[0] 26467 z_1 := z.Args[1] 26468 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 { 26469 x := z_0 26470 y := z_1 26471 if !(z.Uses == 1) { 26472 continue 26473 } 26474 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 26475 v0.AddArg2(x, y) 26476 b.resetWithControl(BlockARM64NE, v0) 26477 return true 26478 } 26479 break 26480 } 26481 // match: (NE (CMP x z:(NEG y)) yes no) 26482 // cond: z.Uses == 1 26483 // result: (NE (CMN x y) yes no) 26484 for b.Controls[0].Op == OpARM64CMP { 26485 v_0 := b.Controls[0] 26486 _ = v_0.Args[1] 26487 x := v_0.Args[0] 26488 z := v_0.Args[1] 26489 if z.Op != OpARM64NEG { 26490 break 26491 } 26492 y := z.Args[0] 26493 if !(z.Uses == 1) { 26494 break 26495 } 26496 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 26497 v0.AddArg2(x, y) 26498 b.resetWithControl(BlockARM64NE, v0) 26499 return true 26500 } 26501 // match: (NE (CMPW x z:(NEG y)) yes no) 26502 // cond: z.Uses == 1 26503 // result: (NE (CMNW x y) yes no) 26504 for b.Controls[0].Op == OpARM64CMPW { 26505 v_0 := b.Controls[0] 26506 _ = v_0.Args[1] 26507 x := v_0.Args[0] 26508 z := v_0.Args[1] 26509 if z.Op != OpARM64NEG { 26510 break 26511 } 26512 y := z.Args[0] 26513 if !(z.Uses == 1) { 26514 break 26515 } 26516 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 26517 v0.AddArg2(x, y) 26518 b.resetWithControl(BlockARM64NE, v0) 26519 return true 26520 } 26521 // match: (NE (CMPconst [0] x) yes no) 26522 // result: (NZ x yes no) 26523 for b.Controls[0].Op == OpARM64CMPconst { 26524 v_0 := b.Controls[0] 26525 if auxIntToInt64(v_0.AuxInt) != 0 { 26526 break 26527 } 26528 x := v_0.Args[0] 26529 b.resetWithControl(BlockARM64NZ, x) 26530 return true 26531 } 26532 // match: (NE (CMPWconst [0] x) yes no) 26533 // result: (NZW x yes no) 26534 for b.Controls[0].Op == OpARM64CMPWconst { 26535 v_0 := b.Controls[0] 26536 if auxIntToInt32(v_0.AuxInt) != 0 { 26537 break 26538 } 26539 x := v_0.Args[0] 26540 b.resetWithControl(BlockARM64NZW, x) 26541 return true 26542 } 26543 // match: (NE (CMPconst [0] z:(MADD a x y)) yes no) 26544 // cond: z.Uses==1 26545 // result: (NE (CMN a (MUL <x.Type> x y)) yes no) 26546 for b.Controls[0].Op == OpARM64CMPconst { 26547 v_0 := b.Controls[0] 26548 if auxIntToInt64(v_0.AuxInt) != 0 { 26549 break 26550 } 26551 z := v_0.Args[0] 26552 if z.Op != OpARM64MADD { 26553 break 26554 } 26555 y := z.Args[2] 26556 a := z.Args[0] 26557 x := z.Args[1] 26558 if !(z.Uses == 1) { 26559 break 26560 } 26561 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 26562 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 26563 v1.AddArg2(x, y) 26564 v0.AddArg2(a, v1) 26565 b.resetWithControl(BlockARM64NE, v0) 26566 return true 26567 } 26568 // match: (NE (CMPconst [0] z:(MSUB a x y)) yes no) 26569 // cond: z.Uses==1 26570 // result: (NE (CMP a (MUL <x.Type> x y)) yes no) 26571 for b.Controls[0].Op == OpARM64CMPconst { 26572 v_0 := b.Controls[0] 26573 if auxIntToInt64(v_0.AuxInt) != 0 { 26574 break 26575 } 26576 z := v_0.Args[0] 26577 if z.Op != OpARM64MSUB { 26578 break 26579 } 26580 y := z.Args[2] 26581 a := z.Args[0] 26582 x := z.Args[1] 26583 if !(z.Uses == 1) { 26584 break 26585 } 26586 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags) 26587 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 26588 v1.AddArg2(x, y) 26589 v0.AddArg2(a, v1) 26590 b.resetWithControl(BlockARM64NE, v0) 26591 return true 26592 } 26593 // match: (NE (CMPWconst [0] z:(MADDW a x y)) yes no) 26594 // cond: z.Uses==1 26595 // result: (NE (CMNW a (MULW <x.Type> x y)) yes no) 26596 for b.Controls[0].Op == OpARM64CMPWconst { 26597 v_0 := b.Controls[0] 26598 if auxIntToInt32(v_0.AuxInt) != 0 { 26599 break 26600 } 26601 z := v_0.Args[0] 26602 if z.Op != OpARM64MADDW { 26603 break 26604 } 26605 y := z.Args[2] 26606 a := z.Args[0] 26607 x := z.Args[1] 26608 if !(z.Uses == 1) { 26609 break 26610 } 26611 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 26612 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 26613 v1.AddArg2(x, y) 26614 v0.AddArg2(a, v1) 26615 b.resetWithControl(BlockARM64NE, v0) 26616 return true 26617 } 26618 // match: (NE (CMPWconst [0] z:(MSUBW a x y)) yes no) 26619 // cond: z.Uses==1 26620 // result: (NE (CMPW a (MULW <x.Type> x y)) yes no) 26621 for b.Controls[0].Op == OpARM64CMPWconst { 26622 v_0 := b.Controls[0] 26623 if auxIntToInt32(v_0.AuxInt) != 0 { 26624 break 26625 } 26626 z := v_0.Args[0] 26627 if z.Op != OpARM64MSUBW { 26628 break 26629 } 26630 y := z.Args[2] 26631 a := z.Args[0] 26632 x := z.Args[1] 26633 if !(z.Uses == 1) { 26634 break 26635 } 26636 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags) 26637 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 26638 v1.AddArg2(x, y) 26639 v0.AddArg2(a, v1) 26640 b.resetWithControl(BlockARM64NE, v0) 26641 return true 26642 } 26643 // match: (NE (TSTconst [c] x) yes no) 26644 // cond: oneBit(c) 26645 // result: (TBNZ [int64(ntz64(c))] x yes no) 26646 for b.Controls[0].Op == OpARM64TSTconst { 26647 v_0 := b.Controls[0] 26648 c := auxIntToInt64(v_0.AuxInt) 26649 x := v_0.Args[0] 26650 if !(oneBit(c)) { 26651 break 26652 } 26653 b.resetWithControl(BlockARM64TBNZ, x) 26654 b.AuxInt = int64ToAuxInt(int64(ntz64(c))) 26655 return true 26656 } 26657 // match: (NE (TSTWconst [c] x) yes no) 26658 // cond: oneBit(int64(uint32(c))) 26659 // result: (TBNZ [int64(ntz64(int64(uint32(c))))] x yes no) 26660 for b.Controls[0].Op == OpARM64TSTWconst { 26661 v_0 := b.Controls[0] 26662 c := auxIntToInt32(v_0.AuxInt) 26663 x := v_0.Args[0] 26664 if !(oneBit(int64(uint32(c)))) { 26665 break 26666 } 26667 b.resetWithControl(BlockARM64TBNZ, x) 26668 b.AuxInt = int64ToAuxInt(int64(ntz64(int64(uint32(c))))) 26669 return true 26670 } 26671 // match: (NE (FlagConstant [fc]) yes no) 26672 // cond: fc.ne() 26673 // result: (First yes no) 26674 for b.Controls[0].Op == OpARM64FlagConstant { 26675 v_0 := b.Controls[0] 26676 fc := auxIntToFlagConstant(v_0.AuxInt) 26677 if !(fc.ne()) { 26678 break 26679 } 26680 b.Reset(BlockFirst) 26681 return true 26682 } 26683 // match: (NE (FlagConstant [fc]) yes no) 26684 // cond: !fc.ne() 26685 // result: (First no yes) 26686 for b.Controls[0].Op == OpARM64FlagConstant { 26687 v_0 := b.Controls[0] 26688 fc := auxIntToFlagConstant(v_0.AuxInt) 26689 if !(!fc.ne()) { 26690 break 26691 } 26692 b.Reset(BlockFirst) 26693 b.swapSuccessors() 26694 return true 26695 } 26696 // match: (NE (InvertFlags cmp) yes no) 26697 // result: (NE cmp yes no) 26698 for b.Controls[0].Op == OpARM64InvertFlags { 26699 v_0 := b.Controls[0] 26700 cmp := v_0.Args[0] 26701 b.resetWithControl(BlockARM64NE, cmp) 26702 return true 26703 } 26704 case BlockARM64NZ: 26705 // match: (NZ (Equal cc) yes no) 26706 // result: (EQ cc yes no) 26707 for b.Controls[0].Op == OpARM64Equal { 26708 v_0 := b.Controls[0] 26709 cc := v_0.Args[0] 26710 b.resetWithControl(BlockARM64EQ, cc) 26711 return true 26712 } 26713 // match: (NZ (NotEqual cc) yes no) 26714 // result: (NE cc yes no) 26715 for b.Controls[0].Op == OpARM64NotEqual { 26716 v_0 := b.Controls[0] 26717 cc := v_0.Args[0] 26718 b.resetWithControl(BlockARM64NE, cc) 26719 return true 26720 } 26721 // match: (NZ (LessThan cc) yes no) 26722 // result: (LT cc yes no) 26723 for b.Controls[0].Op == OpARM64LessThan { 26724 v_0 := b.Controls[0] 26725 cc := v_0.Args[0] 26726 b.resetWithControl(BlockARM64LT, cc) 26727 return true 26728 } 26729 // match: (NZ (LessThanU cc) yes no) 26730 // result: (ULT cc yes no) 26731 for b.Controls[0].Op == OpARM64LessThanU { 26732 v_0 := b.Controls[0] 26733 cc := v_0.Args[0] 26734 b.resetWithControl(BlockARM64ULT, cc) 26735 return true 26736 } 26737 // match: (NZ (LessEqual cc) yes no) 26738 // result: (LE cc yes no) 26739 for b.Controls[0].Op == OpARM64LessEqual { 26740 v_0 := b.Controls[0] 26741 cc := v_0.Args[0] 26742 b.resetWithControl(BlockARM64LE, cc) 26743 return true 26744 } 26745 // match: (NZ (LessEqualU cc) yes no) 26746 // result: (ULE cc yes no) 26747 for b.Controls[0].Op == OpARM64LessEqualU { 26748 v_0 := b.Controls[0] 26749 cc := v_0.Args[0] 26750 b.resetWithControl(BlockARM64ULE, cc) 26751 return true 26752 } 26753 // match: (NZ (GreaterThan cc) yes no) 26754 // result: (GT cc yes no) 26755 for b.Controls[0].Op == OpARM64GreaterThan { 26756 v_0 := b.Controls[0] 26757 cc := v_0.Args[0] 26758 b.resetWithControl(BlockARM64GT, cc) 26759 return true 26760 } 26761 // match: (NZ (GreaterThanU cc) yes no) 26762 // result: (UGT cc yes no) 26763 for b.Controls[0].Op == OpARM64GreaterThanU { 26764 v_0 := b.Controls[0] 26765 cc := v_0.Args[0] 26766 b.resetWithControl(BlockARM64UGT, cc) 26767 return true 26768 } 26769 // match: (NZ (GreaterEqual cc) yes no) 26770 // result: (GE cc yes no) 26771 for b.Controls[0].Op == OpARM64GreaterEqual { 26772 v_0 := b.Controls[0] 26773 cc := v_0.Args[0] 26774 b.resetWithControl(BlockARM64GE, cc) 26775 return true 26776 } 26777 // match: (NZ (GreaterEqualU cc) yes no) 26778 // result: (UGE cc yes no) 26779 for b.Controls[0].Op == OpARM64GreaterEqualU { 26780 v_0 := b.Controls[0] 26781 cc := v_0.Args[0] 26782 b.resetWithControl(BlockARM64UGE, cc) 26783 return true 26784 } 26785 // match: (NZ (LessThanF cc) yes no) 26786 // result: (FLT cc yes no) 26787 for b.Controls[0].Op == OpARM64LessThanF { 26788 v_0 := b.Controls[0] 26789 cc := v_0.Args[0] 26790 b.resetWithControl(BlockARM64FLT, cc) 26791 return true 26792 } 26793 // match: (NZ (LessEqualF cc) yes no) 26794 // result: (FLE cc yes no) 26795 for b.Controls[0].Op == OpARM64LessEqualF { 26796 v_0 := b.Controls[0] 26797 cc := v_0.Args[0] 26798 b.resetWithControl(BlockARM64FLE, cc) 26799 return true 26800 } 26801 // match: (NZ (GreaterThanF cc) yes no) 26802 // result: (FGT cc yes no) 26803 for b.Controls[0].Op == OpARM64GreaterThanF { 26804 v_0 := b.Controls[0] 26805 cc := v_0.Args[0] 26806 b.resetWithControl(BlockARM64FGT, cc) 26807 return true 26808 } 26809 // match: (NZ (GreaterEqualF cc) yes no) 26810 // result: (FGE cc yes no) 26811 for b.Controls[0].Op == OpARM64GreaterEqualF { 26812 v_0 := b.Controls[0] 26813 cc := v_0.Args[0] 26814 b.resetWithControl(BlockARM64FGE, cc) 26815 return true 26816 } 26817 // match: (NZ (ANDconst [c] x) yes no) 26818 // cond: oneBit(c) 26819 // result: (TBNZ [int64(ntz64(c))] x yes no) 26820 for b.Controls[0].Op == OpARM64ANDconst { 26821 v_0 := b.Controls[0] 26822 c := auxIntToInt64(v_0.AuxInt) 26823 x := v_0.Args[0] 26824 if !(oneBit(c)) { 26825 break 26826 } 26827 b.resetWithControl(BlockARM64TBNZ, x) 26828 b.AuxInt = int64ToAuxInt(int64(ntz64(c))) 26829 return true 26830 } 26831 // match: (NZ (MOVDconst [0]) yes no) 26832 // result: (First no yes) 26833 for b.Controls[0].Op == OpARM64MOVDconst { 26834 v_0 := b.Controls[0] 26835 if auxIntToInt64(v_0.AuxInt) != 0 { 26836 break 26837 } 26838 b.Reset(BlockFirst) 26839 b.swapSuccessors() 26840 return true 26841 } 26842 // match: (NZ (MOVDconst [c]) yes no) 26843 // cond: c != 0 26844 // result: (First yes no) 26845 for b.Controls[0].Op == OpARM64MOVDconst { 26846 v_0 := b.Controls[0] 26847 c := auxIntToInt64(v_0.AuxInt) 26848 if !(c != 0) { 26849 break 26850 } 26851 b.Reset(BlockFirst) 26852 return true 26853 } 26854 case BlockARM64NZW: 26855 // match: (NZW (ANDconst [c] x) yes no) 26856 // cond: oneBit(int64(uint32(c))) 26857 // result: (TBNZ [int64(ntz64(int64(uint32(c))))] x yes no) 26858 for b.Controls[0].Op == OpARM64ANDconst { 26859 v_0 := b.Controls[0] 26860 c := auxIntToInt64(v_0.AuxInt) 26861 x := v_0.Args[0] 26862 if !(oneBit(int64(uint32(c)))) { 26863 break 26864 } 26865 b.resetWithControl(BlockARM64TBNZ, x) 26866 b.AuxInt = int64ToAuxInt(int64(ntz64(int64(uint32(c))))) 26867 return true 26868 } 26869 // match: (NZW (MOVDconst [c]) yes no) 26870 // cond: int32(c) == 0 26871 // result: (First no yes) 26872 for b.Controls[0].Op == OpARM64MOVDconst { 26873 v_0 := b.Controls[0] 26874 c := auxIntToInt64(v_0.AuxInt) 26875 if !(int32(c) == 0) { 26876 break 26877 } 26878 b.Reset(BlockFirst) 26879 b.swapSuccessors() 26880 return true 26881 } 26882 // match: (NZW (MOVDconst [c]) yes no) 26883 // cond: int32(c) != 0 26884 // result: (First yes no) 26885 for b.Controls[0].Op == OpARM64MOVDconst { 26886 v_0 := b.Controls[0] 26887 c := auxIntToInt64(v_0.AuxInt) 26888 if !(int32(c) != 0) { 26889 break 26890 } 26891 b.Reset(BlockFirst) 26892 return true 26893 } 26894 case BlockARM64TBNZ: 26895 // match: (TBNZ [0] (Equal cc) yes no) 26896 // result: (EQ cc yes no) 26897 for b.Controls[0].Op == OpARM64Equal { 26898 v_0 := b.Controls[0] 26899 cc := v_0.Args[0] 26900 if auxIntToInt64(b.AuxInt) != 0 { 26901 break 26902 } 26903 b.resetWithControl(BlockARM64EQ, cc) 26904 return true 26905 } 26906 // match: (TBNZ [0] (NotEqual cc) yes no) 26907 // result: (NE cc yes no) 26908 for b.Controls[0].Op == OpARM64NotEqual { 26909 v_0 := b.Controls[0] 26910 cc := v_0.Args[0] 26911 if auxIntToInt64(b.AuxInt) != 0 { 26912 break 26913 } 26914 b.resetWithControl(BlockARM64NE, cc) 26915 return true 26916 } 26917 // match: (TBNZ [0] (LessThan cc) yes no) 26918 // result: (LT cc yes no) 26919 for b.Controls[0].Op == OpARM64LessThan { 26920 v_0 := b.Controls[0] 26921 cc := v_0.Args[0] 26922 if auxIntToInt64(b.AuxInt) != 0 { 26923 break 26924 } 26925 b.resetWithControl(BlockARM64LT, cc) 26926 return true 26927 } 26928 // match: (TBNZ [0] (LessThanU cc) yes no) 26929 // result: (ULT cc yes no) 26930 for b.Controls[0].Op == OpARM64LessThanU { 26931 v_0 := b.Controls[0] 26932 cc := v_0.Args[0] 26933 if auxIntToInt64(b.AuxInt) != 0 { 26934 break 26935 } 26936 b.resetWithControl(BlockARM64ULT, cc) 26937 return true 26938 } 26939 // match: (TBNZ [0] (LessEqual cc) yes no) 26940 // result: (LE cc yes no) 26941 for b.Controls[0].Op == OpARM64LessEqual { 26942 v_0 := b.Controls[0] 26943 cc := v_0.Args[0] 26944 if auxIntToInt64(b.AuxInt) != 0 { 26945 break 26946 } 26947 b.resetWithControl(BlockARM64LE, cc) 26948 return true 26949 } 26950 // match: (TBNZ [0] (LessEqualU cc) yes no) 26951 // result: (ULE cc yes no) 26952 for b.Controls[0].Op == OpARM64LessEqualU { 26953 v_0 := b.Controls[0] 26954 cc := v_0.Args[0] 26955 if auxIntToInt64(b.AuxInt) != 0 { 26956 break 26957 } 26958 b.resetWithControl(BlockARM64ULE, cc) 26959 return true 26960 } 26961 // match: (TBNZ [0] (GreaterThan cc) yes no) 26962 // result: (GT cc yes no) 26963 for b.Controls[0].Op == OpARM64GreaterThan { 26964 v_0 := b.Controls[0] 26965 cc := v_0.Args[0] 26966 if auxIntToInt64(b.AuxInt) != 0 { 26967 break 26968 } 26969 b.resetWithControl(BlockARM64GT, cc) 26970 return true 26971 } 26972 // match: (TBNZ [0] (GreaterThanU cc) yes no) 26973 // result: (UGT cc yes no) 26974 for b.Controls[0].Op == OpARM64GreaterThanU { 26975 v_0 := b.Controls[0] 26976 cc := v_0.Args[0] 26977 if auxIntToInt64(b.AuxInt) != 0 { 26978 break 26979 } 26980 b.resetWithControl(BlockARM64UGT, cc) 26981 return true 26982 } 26983 // match: (TBNZ [0] (GreaterEqual cc) yes no) 26984 // result: (GE cc yes no) 26985 for b.Controls[0].Op == OpARM64GreaterEqual { 26986 v_0 := b.Controls[0] 26987 cc := v_0.Args[0] 26988 if auxIntToInt64(b.AuxInt) != 0 { 26989 break 26990 } 26991 b.resetWithControl(BlockARM64GE, cc) 26992 return true 26993 } 26994 // match: (TBNZ [0] (GreaterEqualU cc) yes no) 26995 // result: (UGE cc yes no) 26996 for b.Controls[0].Op == OpARM64GreaterEqualU { 26997 v_0 := b.Controls[0] 26998 cc := v_0.Args[0] 26999 if auxIntToInt64(b.AuxInt) != 0 { 27000 break 27001 } 27002 b.resetWithControl(BlockARM64UGE, cc) 27003 return true 27004 } 27005 // match: (TBNZ [0] (LessThanF cc) yes no) 27006 // result: (FLT cc yes no) 27007 for b.Controls[0].Op == OpARM64LessThanF { 27008 v_0 := b.Controls[0] 27009 cc := v_0.Args[0] 27010 if auxIntToInt64(b.AuxInt) != 0 { 27011 break 27012 } 27013 b.resetWithControl(BlockARM64FLT, cc) 27014 return true 27015 } 27016 // match: (TBNZ [0] (LessEqualF cc) yes no) 27017 // result: (FLE cc yes no) 27018 for b.Controls[0].Op == OpARM64LessEqualF { 27019 v_0 := b.Controls[0] 27020 cc := v_0.Args[0] 27021 if auxIntToInt64(b.AuxInt) != 0 { 27022 break 27023 } 27024 b.resetWithControl(BlockARM64FLE, cc) 27025 return true 27026 } 27027 // match: (TBNZ [0] (GreaterThanF cc) yes no) 27028 // result: (FGT cc yes no) 27029 for b.Controls[0].Op == OpARM64GreaterThanF { 27030 v_0 := b.Controls[0] 27031 cc := v_0.Args[0] 27032 if auxIntToInt64(b.AuxInt) != 0 { 27033 break 27034 } 27035 b.resetWithControl(BlockARM64FGT, cc) 27036 return true 27037 } 27038 // match: (TBNZ [0] (GreaterEqualF cc) yes no) 27039 // result: (FGE cc yes no) 27040 for b.Controls[0].Op == OpARM64GreaterEqualF { 27041 v_0 := b.Controls[0] 27042 cc := v_0.Args[0] 27043 if auxIntToInt64(b.AuxInt) != 0 { 27044 break 27045 } 27046 b.resetWithControl(BlockARM64FGE, cc) 27047 return true 27048 } 27049 case BlockARM64UGE: 27050 // match: (UGE (FlagConstant [fc]) yes no) 27051 // cond: fc.uge() 27052 // result: (First yes no) 27053 for b.Controls[0].Op == OpARM64FlagConstant { 27054 v_0 := b.Controls[0] 27055 fc := auxIntToFlagConstant(v_0.AuxInt) 27056 if !(fc.uge()) { 27057 break 27058 } 27059 b.Reset(BlockFirst) 27060 return true 27061 } 27062 // match: (UGE (FlagConstant [fc]) yes no) 27063 // cond: !fc.uge() 27064 // result: (First no yes) 27065 for b.Controls[0].Op == OpARM64FlagConstant { 27066 v_0 := b.Controls[0] 27067 fc := auxIntToFlagConstant(v_0.AuxInt) 27068 if !(!fc.uge()) { 27069 break 27070 } 27071 b.Reset(BlockFirst) 27072 b.swapSuccessors() 27073 return true 27074 } 27075 // match: (UGE (InvertFlags cmp) yes no) 27076 // result: (ULE cmp yes no) 27077 for b.Controls[0].Op == OpARM64InvertFlags { 27078 v_0 := b.Controls[0] 27079 cmp := v_0.Args[0] 27080 b.resetWithControl(BlockARM64ULE, cmp) 27081 return true 27082 } 27083 case BlockARM64UGT: 27084 // match: (UGT (FlagConstant [fc]) yes no) 27085 // cond: fc.ugt() 27086 // result: (First yes no) 27087 for b.Controls[0].Op == OpARM64FlagConstant { 27088 v_0 := b.Controls[0] 27089 fc := auxIntToFlagConstant(v_0.AuxInt) 27090 if !(fc.ugt()) { 27091 break 27092 } 27093 b.Reset(BlockFirst) 27094 return true 27095 } 27096 // match: (UGT (FlagConstant [fc]) yes no) 27097 // cond: !fc.ugt() 27098 // result: (First no yes) 27099 for b.Controls[0].Op == OpARM64FlagConstant { 27100 v_0 := b.Controls[0] 27101 fc := auxIntToFlagConstant(v_0.AuxInt) 27102 if !(!fc.ugt()) { 27103 break 27104 } 27105 b.Reset(BlockFirst) 27106 b.swapSuccessors() 27107 return true 27108 } 27109 // match: (UGT (InvertFlags cmp) yes no) 27110 // result: (ULT cmp yes no) 27111 for b.Controls[0].Op == OpARM64InvertFlags { 27112 v_0 := b.Controls[0] 27113 cmp := v_0.Args[0] 27114 b.resetWithControl(BlockARM64ULT, cmp) 27115 return true 27116 } 27117 case BlockARM64ULE: 27118 // match: (ULE (FlagConstant [fc]) yes no) 27119 // cond: fc.ule() 27120 // result: (First yes no) 27121 for b.Controls[0].Op == OpARM64FlagConstant { 27122 v_0 := b.Controls[0] 27123 fc := auxIntToFlagConstant(v_0.AuxInt) 27124 if !(fc.ule()) { 27125 break 27126 } 27127 b.Reset(BlockFirst) 27128 return true 27129 } 27130 // match: (ULE (FlagConstant [fc]) yes no) 27131 // cond: !fc.ule() 27132 // result: (First no yes) 27133 for b.Controls[0].Op == OpARM64FlagConstant { 27134 v_0 := b.Controls[0] 27135 fc := auxIntToFlagConstant(v_0.AuxInt) 27136 if !(!fc.ule()) { 27137 break 27138 } 27139 b.Reset(BlockFirst) 27140 b.swapSuccessors() 27141 return true 27142 } 27143 // match: (ULE (InvertFlags cmp) yes no) 27144 // result: (UGE cmp yes no) 27145 for b.Controls[0].Op == OpARM64InvertFlags { 27146 v_0 := b.Controls[0] 27147 cmp := v_0.Args[0] 27148 b.resetWithControl(BlockARM64UGE, cmp) 27149 return true 27150 } 27151 case BlockARM64ULT: 27152 // match: (ULT (FlagConstant [fc]) yes no) 27153 // cond: fc.ult() 27154 // result: (First yes no) 27155 for b.Controls[0].Op == OpARM64FlagConstant { 27156 v_0 := b.Controls[0] 27157 fc := auxIntToFlagConstant(v_0.AuxInt) 27158 if !(fc.ult()) { 27159 break 27160 } 27161 b.Reset(BlockFirst) 27162 return true 27163 } 27164 // match: (ULT (FlagConstant [fc]) yes no) 27165 // cond: !fc.ult() 27166 // result: (First no yes) 27167 for b.Controls[0].Op == OpARM64FlagConstant { 27168 v_0 := b.Controls[0] 27169 fc := auxIntToFlagConstant(v_0.AuxInt) 27170 if !(!fc.ult()) { 27171 break 27172 } 27173 b.Reset(BlockFirst) 27174 b.swapSuccessors() 27175 return true 27176 } 27177 // match: (ULT (InvertFlags cmp) yes no) 27178 // result: (UGT cmp yes no) 27179 for b.Controls[0].Op == OpARM64InvertFlags { 27180 v_0 := b.Controls[0] 27181 cmp := v_0.Args[0] 27182 b.resetWithControl(BlockARM64UGT, cmp) 27183 return true 27184 } 27185 case BlockARM64Z: 27186 // match: (Z (ANDconst [c] x) yes no) 27187 // cond: oneBit(c) 27188 // result: (TBZ [int64(ntz64(c))] x yes no) 27189 for b.Controls[0].Op == OpARM64ANDconst { 27190 v_0 := b.Controls[0] 27191 c := auxIntToInt64(v_0.AuxInt) 27192 x := v_0.Args[0] 27193 if !(oneBit(c)) { 27194 break 27195 } 27196 b.resetWithControl(BlockARM64TBZ, x) 27197 b.AuxInt = int64ToAuxInt(int64(ntz64(c))) 27198 return true 27199 } 27200 // match: (Z (MOVDconst [0]) yes no) 27201 // result: (First yes no) 27202 for b.Controls[0].Op == OpARM64MOVDconst { 27203 v_0 := b.Controls[0] 27204 if auxIntToInt64(v_0.AuxInt) != 0 { 27205 break 27206 } 27207 b.Reset(BlockFirst) 27208 return true 27209 } 27210 // match: (Z (MOVDconst [c]) yes no) 27211 // cond: c != 0 27212 // result: (First no yes) 27213 for b.Controls[0].Op == OpARM64MOVDconst { 27214 v_0 := b.Controls[0] 27215 c := auxIntToInt64(v_0.AuxInt) 27216 if !(c != 0) { 27217 break 27218 } 27219 b.Reset(BlockFirst) 27220 b.swapSuccessors() 27221 return true 27222 } 27223 case BlockARM64ZW: 27224 // match: (ZW (ANDconst [c] x) yes no) 27225 // cond: oneBit(int64(uint32(c))) 27226 // result: (TBZ [int64(ntz64(int64(uint32(c))))] x yes no) 27227 for b.Controls[0].Op == OpARM64ANDconst { 27228 v_0 := b.Controls[0] 27229 c := auxIntToInt64(v_0.AuxInt) 27230 x := v_0.Args[0] 27231 if !(oneBit(int64(uint32(c)))) { 27232 break 27233 } 27234 b.resetWithControl(BlockARM64TBZ, x) 27235 b.AuxInt = int64ToAuxInt(int64(ntz64(int64(uint32(c))))) 27236 return true 27237 } 27238 // match: (ZW (MOVDconst [c]) yes no) 27239 // cond: int32(c) == 0 27240 // result: (First yes no) 27241 for b.Controls[0].Op == OpARM64MOVDconst { 27242 v_0 := b.Controls[0] 27243 c := auxIntToInt64(v_0.AuxInt) 27244 if !(int32(c) == 0) { 27245 break 27246 } 27247 b.Reset(BlockFirst) 27248 return true 27249 } 27250 // match: (ZW (MOVDconst [c]) yes no) 27251 // cond: int32(c) != 0 27252 // result: (First no yes) 27253 for b.Controls[0].Op == OpARM64MOVDconst { 27254 v_0 := b.Controls[0] 27255 c := auxIntToInt64(v_0.AuxInt) 27256 if !(int32(c) != 0) { 27257 break 27258 } 27259 b.Reset(BlockFirst) 27260 b.swapSuccessors() 27261 return true 27262 } 27263 } 27264 return false 27265 }