github.com/riscv/riscv-go@v0.0.0-20200123204226-124ebd6fcc8e/src/cmd/compile/internal/ssa/rewriteARM64.go (about) 1 // autogenerated from gen/ARM64.rules: do not edit! 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "math" 7 8 var _ = math.MinInt8 // in case not otherwise used 9 func rewriteValueARM64(v *Value, config *Config) bool { 10 switch v.Op { 11 case OpARM64ADD: 12 return rewriteValueARM64_OpARM64ADD(v, config) 13 case OpARM64ADDconst: 14 return rewriteValueARM64_OpARM64ADDconst(v, config) 15 case OpARM64ADDshiftLL: 16 return rewriteValueARM64_OpARM64ADDshiftLL(v, config) 17 case OpARM64ADDshiftRA: 18 return rewriteValueARM64_OpARM64ADDshiftRA(v, config) 19 case OpARM64ADDshiftRL: 20 return rewriteValueARM64_OpARM64ADDshiftRL(v, config) 21 case OpARM64AND: 22 return rewriteValueARM64_OpARM64AND(v, config) 23 case OpARM64ANDconst: 24 return rewriteValueARM64_OpARM64ANDconst(v, config) 25 case OpARM64ANDshiftLL: 26 return rewriteValueARM64_OpARM64ANDshiftLL(v, config) 27 case OpARM64ANDshiftRA: 28 return rewriteValueARM64_OpARM64ANDshiftRA(v, config) 29 case OpARM64ANDshiftRL: 30 return rewriteValueARM64_OpARM64ANDshiftRL(v, config) 31 case OpARM64BIC: 32 return rewriteValueARM64_OpARM64BIC(v, config) 33 case OpARM64BICconst: 34 return rewriteValueARM64_OpARM64BICconst(v, config) 35 case OpARM64BICshiftLL: 36 return rewriteValueARM64_OpARM64BICshiftLL(v, config) 37 case OpARM64BICshiftRA: 38 return rewriteValueARM64_OpARM64BICshiftRA(v, config) 39 case OpARM64BICshiftRL: 40 return rewriteValueARM64_OpARM64BICshiftRL(v, config) 41 case OpARM64CMP: 42 return rewriteValueARM64_OpARM64CMP(v, config) 43 case OpARM64CMPW: 44 return rewriteValueARM64_OpARM64CMPW(v, config) 45 case OpARM64CMPWconst: 46 return rewriteValueARM64_OpARM64CMPWconst(v, config) 47 case OpARM64CMPconst: 48 return rewriteValueARM64_OpARM64CMPconst(v, config) 49 case OpARM64CMPshiftLL: 50 return rewriteValueARM64_OpARM64CMPshiftLL(v, config) 51 case OpARM64CMPshiftRA: 52 return rewriteValueARM64_OpARM64CMPshiftRA(v, config) 53 case OpARM64CMPshiftRL: 54 return rewriteValueARM64_OpARM64CMPshiftRL(v, config) 55 case OpARM64CSELULT: 56 return rewriteValueARM64_OpARM64CSELULT(v, config) 57 case OpARM64CSELULT0: 58 return rewriteValueARM64_OpARM64CSELULT0(v, config) 59 case OpARM64DIV: 60 return rewriteValueARM64_OpARM64DIV(v, config) 61 case OpARM64DIVW: 62 return rewriteValueARM64_OpARM64DIVW(v, config) 63 case OpARM64Equal: 64 return rewriteValueARM64_OpARM64Equal(v, config) 65 case OpARM64FMOVDload: 66 return rewriteValueARM64_OpARM64FMOVDload(v, config) 67 case OpARM64FMOVDstore: 68 return rewriteValueARM64_OpARM64FMOVDstore(v, config) 69 case OpARM64FMOVSload: 70 return rewriteValueARM64_OpARM64FMOVSload(v, config) 71 case OpARM64FMOVSstore: 72 return rewriteValueARM64_OpARM64FMOVSstore(v, config) 73 case OpARM64GreaterEqual: 74 return rewriteValueARM64_OpARM64GreaterEqual(v, config) 75 case OpARM64GreaterEqualU: 76 return rewriteValueARM64_OpARM64GreaterEqualU(v, config) 77 case OpARM64GreaterThan: 78 return rewriteValueARM64_OpARM64GreaterThan(v, config) 79 case OpARM64GreaterThanU: 80 return rewriteValueARM64_OpARM64GreaterThanU(v, config) 81 case OpARM64LessEqual: 82 return rewriteValueARM64_OpARM64LessEqual(v, config) 83 case OpARM64LessEqualU: 84 return rewriteValueARM64_OpARM64LessEqualU(v, config) 85 case OpARM64LessThan: 86 return rewriteValueARM64_OpARM64LessThan(v, config) 87 case OpARM64LessThanU: 88 return rewriteValueARM64_OpARM64LessThanU(v, config) 89 case OpARM64MOD: 90 return rewriteValueARM64_OpARM64MOD(v, config) 91 case OpARM64MODW: 92 return rewriteValueARM64_OpARM64MODW(v, config) 93 case OpARM64MOVBUload: 94 return rewriteValueARM64_OpARM64MOVBUload(v, config) 95 case OpARM64MOVBUreg: 96 return rewriteValueARM64_OpARM64MOVBUreg(v, config) 97 case OpARM64MOVBload: 98 return rewriteValueARM64_OpARM64MOVBload(v, config) 99 case OpARM64MOVBreg: 100 return rewriteValueARM64_OpARM64MOVBreg(v, config) 101 case OpARM64MOVBstore: 102 return rewriteValueARM64_OpARM64MOVBstore(v, config) 103 case OpARM64MOVBstorezero: 104 return rewriteValueARM64_OpARM64MOVBstorezero(v, config) 105 case OpARM64MOVDload: 106 return rewriteValueARM64_OpARM64MOVDload(v, config) 107 case OpARM64MOVDreg: 108 return rewriteValueARM64_OpARM64MOVDreg(v, config) 109 case OpARM64MOVDstore: 110 return rewriteValueARM64_OpARM64MOVDstore(v, config) 111 case OpARM64MOVDstorezero: 112 return rewriteValueARM64_OpARM64MOVDstorezero(v, config) 113 case OpARM64MOVHUload: 114 return rewriteValueARM64_OpARM64MOVHUload(v, config) 115 case OpARM64MOVHUreg: 116 return rewriteValueARM64_OpARM64MOVHUreg(v, config) 117 case OpARM64MOVHload: 118 return rewriteValueARM64_OpARM64MOVHload(v, config) 119 case OpARM64MOVHreg: 120 return rewriteValueARM64_OpARM64MOVHreg(v, config) 121 case OpARM64MOVHstore: 122 return rewriteValueARM64_OpARM64MOVHstore(v, config) 123 case OpARM64MOVHstorezero: 124 return rewriteValueARM64_OpARM64MOVHstorezero(v, config) 125 case OpARM64MOVWUload: 126 return rewriteValueARM64_OpARM64MOVWUload(v, config) 127 case OpARM64MOVWUreg: 128 return rewriteValueARM64_OpARM64MOVWUreg(v, config) 129 case OpARM64MOVWload: 130 return rewriteValueARM64_OpARM64MOVWload(v, config) 131 case OpARM64MOVWreg: 132 return rewriteValueARM64_OpARM64MOVWreg(v, config) 133 case OpARM64MOVWstore: 134 return rewriteValueARM64_OpARM64MOVWstore(v, config) 135 case OpARM64MOVWstorezero: 136 return rewriteValueARM64_OpARM64MOVWstorezero(v, config) 137 case OpARM64MUL: 138 return rewriteValueARM64_OpARM64MUL(v, config) 139 case OpARM64MULW: 140 return rewriteValueARM64_OpARM64MULW(v, config) 141 case OpARM64MVN: 142 return rewriteValueARM64_OpARM64MVN(v, config) 143 case OpARM64NEG: 144 return rewriteValueARM64_OpARM64NEG(v, config) 145 case OpARM64NotEqual: 146 return rewriteValueARM64_OpARM64NotEqual(v, config) 147 case OpARM64OR: 148 return rewriteValueARM64_OpARM64OR(v, config) 149 case OpARM64ORconst: 150 return rewriteValueARM64_OpARM64ORconst(v, config) 151 case OpARM64ORshiftLL: 152 return rewriteValueARM64_OpARM64ORshiftLL(v, config) 153 case OpARM64ORshiftRA: 154 return rewriteValueARM64_OpARM64ORshiftRA(v, config) 155 case OpARM64ORshiftRL: 156 return rewriteValueARM64_OpARM64ORshiftRL(v, config) 157 case OpARM64SLL: 158 return rewriteValueARM64_OpARM64SLL(v, config) 159 case OpARM64SLLconst: 160 return rewriteValueARM64_OpARM64SLLconst(v, config) 161 case OpARM64SRA: 162 return rewriteValueARM64_OpARM64SRA(v, config) 163 case OpARM64SRAconst: 164 return rewriteValueARM64_OpARM64SRAconst(v, config) 165 case OpARM64SRL: 166 return rewriteValueARM64_OpARM64SRL(v, config) 167 case OpARM64SRLconst: 168 return rewriteValueARM64_OpARM64SRLconst(v, config) 169 case OpARM64SUB: 170 return rewriteValueARM64_OpARM64SUB(v, config) 171 case OpARM64SUBconst: 172 return rewriteValueARM64_OpARM64SUBconst(v, config) 173 case OpARM64SUBshiftLL: 174 return rewriteValueARM64_OpARM64SUBshiftLL(v, config) 175 case OpARM64SUBshiftRA: 176 return rewriteValueARM64_OpARM64SUBshiftRA(v, config) 177 case OpARM64SUBshiftRL: 178 return rewriteValueARM64_OpARM64SUBshiftRL(v, config) 179 case OpARM64UDIV: 180 return rewriteValueARM64_OpARM64UDIV(v, config) 181 case OpARM64UDIVW: 182 return rewriteValueARM64_OpARM64UDIVW(v, config) 183 case OpARM64UMOD: 184 return rewriteValueARM64_OpARM64UMOD(v, config) 185 case OpARM64UMODW: 186 return rewriteValueARM64_OpARM64UMODW(v, config) 187 case OpARM64XOR: 188 return rewriteValueARM64_OpARM64XOR(v, config) 189 case OpARM64XORconst: 190 return rewriteValueARM64_OpARM64XORconst(v, config) 191 case OpARM64XORshiftLL: 192 return rewriteValueARM64_OpARM64XORshiftLL(v, config) 193 case OpARM64XORshiftRA: 194 return rewriteValueARM64_OpARM64XORshiftRA(v, config) 195 case OpARM64XORshiftRL: 196 return rewriteValueARM64_OpARM64XORshiftRL(v, config) 197 case OpAdd16: 198 return rewriteValueARM64_OpAdd16(v, config) 199 case OpAdd32: 200 return rewriteValueARM64_OpAdd32(v, config) 201 case OpAdd32F: 202 return rewriteValueARM64_OpAdd32F(v, config) 203 case OpAdd64: 204 return rewriteValueARM64_OpAdd64(v, config) 205 case OpAdd64F: 206 return rewriteValueARM64_OpAdd64F(v, config) 207 case OpAdd8: 208 return rewriteValueARM64_OpAdd8(v, config) 209 case OpAddPtr: 210 return rewriteValueARM64_OpAddPtr(v, config) 211 case OpAddr: 212 return rewriteValueARM64_OpAddr(v, config) 213 case OpAnd16: 214 return rewriteValueARM64_OpAnd16(v, config) 215 case OpAnd32: 216 return rewriteValueARM64_OpAnd32(v, config) 217 case OpAnd64: 218 return rewriteValueARM64_OpAnd64(v, config) 219 case OpAnd8: 220 return rewriteValueARM64_OpAnd8(v, config) 221 case OpAndB: 222 return rewriteValueARM64_OpAndB(v, config) 223 case OpAtomicAdd32: 224 return rewriteValueARM64_OpAtomicAdd32(v, config) 225 case OpAtomicAdd64: 226 return rewriteValueARM64_OpAtomicAdd64(v, config) 227 case OpAtomicAnd8: 228 return rewriteValueARM64_OpAtomicAnd8(v, config) 229 case OpAtomicCompareAndSwap32: 230 return rewriteValueARM64_OpAtomicCompareAndSwap32(v, config) 231 case OpAtomicCompareAndSwap64: 232 return rewriteValueARM64_OpAtomicCompareAndSwap64(v, config) 233 case OpAtomicExchange32: 234 return rewriteValueARM64_OpAtomicExchange32(v, config) 235 case OpAtomicExchange64: 236 return rewriteValueARM64_OpAtomicExchange64(v, config) 237 case OpAtomicLoad32: 238 return rewriteValueARM64_OpAtomicLoad32(v, config) 239 case OpAtomicLoad64: 240 return rewriteValueARM64_OpAtomicLoad64(v, config) 241 case OpAtomicLoadPtr: 242 return rewriteValueARM64_OpAtomicLoadPtr(v, config) 243 case OpAtomicOr8: 244 return rewriteValueARM64_OpAtomicOr8(v, config) 245 case OpAtomicStore32: 246 return rewriteValueARM64_OpAtomicStore32(v, config) 247 case OpAtomicStore64: 248 return rewriteValueARM64_OpAtomicStore64(v, config) 249 case OpAtomicStorePtrNoWB: 250 return rewriteValueARM64_OpAtomicStorePtrNoWB(v, config) 251 case OpAvg64u: 252 return rewriteValueARM64_OpAvg64u(v, config) 253 case OpBswap32: 254 return rewriteValueARM64_OpBswap32(v, config) 255 case OpBswap64: 256 return rewriteValueARM64_OpBswap64(v, config) 257 case OpClosureCall: 258 return rewriteValueARM64_OpClosureCall(v, config) 259 case OpCom16: 260 return rewriteValueARM64_OpCom16(v, config) 261 case OpCom32: 262 return rewriteValueARM64_OpCom32(v, config) 263 case OpCom64: 264 return rewriteValueARM64_OpCom64(v, config) 265 case OpCom8: 266 return rewriteValueARM64_OpCom8(v, config) 267 case OpConst16: 268 return rewriteValueARM64_OpConst16(v, config) 269 case OpConst32: 270 return rewriteValueARM64_OpConst32(v, config) 271 case OpConst32F: 272 return rewriteValueARM64_OpConst32F(v, config) 273 case OpConst64: 274 return rewriteValueARM64_OpConst64(v, config) 275 case OpConst64F: 276 return rewriteValueARM64_OpConst64F(v, config) 277 case OpConst8: 278 return rewriteValueARM64_OpConst8(v, config) 279 case OpConstBool: 280 return rewriteValueARM64_OpConstBool(v, config) 281 case OpConstNil: 282 return rewriteValueARM64_OpConstNil(v, config) 283 case OpConvert: 284 return rewriteValueARM64_OpConvert(v, config) 285 case OpCtz32: 286 return rewriteValueARM64_OpCtz32(v, config) 287 case OpCtz64: 288 return rewriteValueARM64_OpCtz64(v, config) 289 case OpCvt32Fto32: 290 return rewriteValueARM64_OpCvt32Fto32(v, config) 291 case OpCvt32Fto32U: 292 return rewriteValueARM64_OpCvt32Fto32U(v, config) 293 case OpCvt32Fto64: 294 return rewriteValueARM64_OpCvt32Fto64(v, config) 295 case OpCvt32Fto64F: 296 return rewriteValueARM64_OpCvt32Fto64F(v, config) 297 case OpCvt32Fto64U: 298 return rewriteValueARM64_OpCvt32Fto64U(v, config) 299 case OpCvt32Uto32F: 300 return rewriteValueARM64_OpCvt32Uto32F(v, config) 301 case OpCvt32Uto64F: 302 return rewriteValueARM64_OpCvt32Uto64F(v, config) 303 case OpCvt32to32F: 304 return rewriteValueARM64_OpCvt32to32F(v, config) 305 case OpCvt32to64F: 306 return rewriteValueARM64_OpCvt32to64F(v, config) 307 case OpCvt64Fto32: 308 return rewriteValueARM64_OpCvt64Fto32(v, config) 309 case OpCvt64Fto32F: 310 return rewriteValueARM64_OpCvt64Fto32F(v, config) 311 case OpCvt64Fto32U: 312 return rewriteValueARM64_OpCvt64Fto32U(v, config) 313 case OpCvt64Fto64: 314 return rewriteValueARM64_OpCvt64Fto64(v, config) 315 case OpCvt64Fto64U: 316 return rewriteValueARM64_OpCvt64Fto64U(v, config) 317 case OpCvt64Uto32F: 318 return rewriteValueARM64_OpCvt64Uto32F(v, config) 319 case OpCvt64Uto64F: 320 return rewriteValueARM64_OpCvt64Uto64F(v, config) 321 case OpCvt64to32F: 322 return rewriteValueARM64_OpCvt64to32F(v, config) 323 case OpCvt64to64F: 324 return rewriteValueARM64_OpCvt64to64F(v, config) 325 case OpDeferCall: 326 return rewriteValueARM64_OpDeferCall(v, config) 327 case OpDiv16: 328 return rewriteValueARM64_OpDiv16(v, config) 329 case OpDiv16u: 330 return rewriteValueARM64_OpDiv16u(v, config) 331 case OpDiv32: 332 return rewriteValueARM64_OpDiv32(v, config) 333 case OpDiv32F: 334 return rewriteValueARM64_OpDiv32F(v, config) 335 case OpDiv32u: 336 return rewriteValueARM64_OpDiv32u(v, config) 337 case OpDiv64: 338 return rewriteValueARM64_OpDiv64(v, config) 339 case OpDiv64F: 340 return rewriteValueARM64_OpDiv64F(v, config) 341 case OpDiv64u: 342 return rewriteValueARM64_OpDiv64u(v, config) 343 case OpDiv8: 344 return rewriteValueARM64_OpDiv8(v, config) 345 case OpDiv8u: 346 return rewriteValueARM64_OpDiv8u(v, config) 347 case OpEq16: 348 return rewriteValueARM64_OpEq16(v, config) 349 case OpEq32: 350 return rewriteValueARM64_OpEq32(v, config) 351 case OpEq32F: 352 return rewriteValueARM64_OpEq32F(v, config) 353 case OpEq64: 354 return rewriteValueARM64_OpEq64(v, config) 355 case OpEq64F: 356 return rewriteValueARM64_OpEq64F(v, config) 357 case OpEq8: 358 return rewriteValueARM64_OpEq8(v, config) 359 case OpEqB: 360 return rewriteValueARM64_OpEqB(v, config) 361 case OpEqPtr: 362 return rewriteValueARM64_OpEqPtr(v, config) 363 case OpGeq16: 364 return rewriteValueARM64_OpGeq16(v, config) 365 case OpGeq16U: 366 return rewriteValueARM64_OpGeq16U(v, config) 367 case OpGeq32: 368 return rewriteValueARM64_OpGeq32(v, config) 369 case OpGeq32F: 370 return rewriteValueARM64_OpGeq32F(v, config) 371 case OpGeq32U: 372 return rewriteValueARM64_OpGeq32U(v, config) 373 case OpGeq64: 374 return rewriteValueARM64_OpGeq64(v, config) 375 case OpGeq64F: 376 return rewriteValueARM64_OpGeq64F(v, config) 377 case OpGeq64U: 378 return rewriteValueARM64_OpGeq64U(v, config) 379 case OpGeq8: 380 return rewriteValueARM64_OpGeq8(v, config) 381 case OpGeq8U: 382 return rewriteValueARM64_OpGeq8U(v, config) 383 case OpGetClosurePtr: 384 return rewriteValueARM64_OpGetClosurePtr(v, config) 385 case OpGoCall: 386 return rewriteValueARM64_OpGoCall(v, config) 387 case OpGreater16: 388 return rewriteValueARM64_OpGreater16(v, config) 389 case OpGreater16U: 390 return rewriteValueARM64_OpGreater16U(v, config) 391 case OpGreater32: 392 return rewriteValueARM64_OpGreater32(v, config) 393 case OpGreater32F: 394 return rewriteValueARM64_OpGreater32F(v, config) 395 case OpGreater32U: 396 return rewriteValueARM64_OpGreater32U(v, config) 397 case OpGreater64: 398 return rewriteValueARM64_OpGreater64(v, config) 399 case OpGreater64F: 400 return rewriteValueARM64_OpGreater64F(v, config) 401 case OpGreater64U: 402 return rewriteValueARM64_OpGreater64U(v, config) 403 case OpGreater8: 404 return rewriteValueARM64_OpGreater8(v, config) 405 case OpGreater8U: 406 return rewriteValueARM64_OpGreater8U(v, config) 407 case OpHmul16: 408 return rewriteValueARM64_OpHmul16(v, config) 409 case OpHmul16u: 410 return rewriteValueARM64_OpHmul16u(v, config) 411 case OpHmul32: 412 return rewriteValueARM64_OpHmul32(v, config) 413 case OpHmul32u: 414 return rewriteValueARM64_OpHmul32u(v, config) 415 case OpHmul64: 416 return rewriteValueARM64_OpHmul64(v, config) 417 case OpHmul64u: 418 return rewriteValueARM64_OpHmul64u(v, config) 419 case OpHmul8: 420 return rewriteValueARM64_OpHmul8(v, config) 421 case OpHmul8u: 422 return rewriteValueARM64_OpHmul8u(v, config) 423 case OpInterCall: 424 return rewriteValueARM64_OpInterCall(v, config) 425 case OpIsInBounds: 426 return rewriteValueARM64_OpIsInBounds(v, config) 427 case OpIsNonNil: 428 return rewriteValueARM64_OpIsNonNil(v, config) 429 case OpIsSliceInBounds: 430 return rewriteValueARM64_OpIsSliceInBounds(v, config) 431 case OpLeq16: 432 return rewriteValueARM64_OpLeq16(v, config) 433 case OpLeq16U: 434 return rewriteValueARM64_OpLeq16U(v, config) 435 case OpLeq32: 436 return rewriteValueARM64_OpLeq32(v, config) 437 case OpLeq32F: 438 return rewriteValueARM64_OpLeq32F(v, config) 439 case OpLeq32U: 440 return rewriteValueARM64_OpLeq32U(v, config) 441 case OpLeq64: 442 return rewriteValueARM64_OpLeq64(v, config) 443 case OpLeq64F: 444 return rewriteValueARM64_OpLeq64F(v, config) 445 case OpLeq64U: 446 return rewriteValueARM64_OpLeq64U(v, config) 447 case OpLeq8: 448 return rewriteValueARM64_OpLeq8(v, config) 449 case OpLeq8U: 450 return rewriteValueARM64_OpLeq8U(v, config) 451 case OpLess16: 452 return rewriteValueARM64_OpLess16(v, config) 453 case OpLess16U: 454 return rewriteValueARM64_OpLess16U(v, config) 455 case OpLess32: 456 return rewriteValueARM64_OpLess32(v, config) 457 case OpLess32F: 458 return rewriteValueARM64_OpLess32F(v, config) 459 case OpLess32U: 460 return rewriteValueARM64_OpLess32U(v, config) 461 case OpLess64: 462 return rewriteValueARM64_OpLess64(v, config) 463 case OpLess64F: 464 return rewriteValueARM64_OpLess64F(v, config) 465 case OpLess64U: 466 return rewriteValueARM64_OpLess64U(v, config) 467 case OpLess8: 468 return rewriteValueARM64_OpLess8(v, config) 469 case OpLess8U: 470 return rewriteValueARM64_OpLess8U(v, config) 471 case OpLoad: 472 return rewriteValueARM64_OpLoad(v, config) 473 case OpLsh16x16: 474 return rewriteValueARM64_OpLsh16x16(v, config) 475 case OpLsh16x32: 476 return rewriteValueARM64_OpLsh16x32(v, config) 477 case OpLsh16x64: 478 return rewriteValueARM64_OpLsh16x64(v, config) 479 case OpLsh16x8: 480 return rewriteValueARM64_OpLsh16x8(v, config) 481 case OpLsh32x16: 482 return rewriteValueARM64_OpLsh32x16(v, config) 483 case OpLsh32x32: 484 return rewriteValueARM64_OpLsh32x32(v, config) 485 case OpLsh32x64: 486 return rewriteValueARM64_OpLsh32x64(v, config) 487 case OpLsh32x8: 488 return rewriteValueARM64_OpLsh32x8(v, config) 489 case OpLsh64x16: 490 return rewriteValueARM64_OpLsh64x16(v, config) 491 case OpLsh64x32: 492 return rewriteValueARM64_OpLsh64x32(v, config) 493 case OpLsh64x64: 494 return rewriteValueARM64_OpLsh64x64(v, config) 495 case OpLsh64x8: 496 return rewriteValueARM64_OpLsh64x8(v, config) 497 case OpLsh8x16: 498 return rewriteValueARM64_OpLsh8x16(v, config) 499 case OpLsh8x32: 500 return rewriteValueARM64_OpLsh8x32(v, config) 501 case OpLsh8x64: 502 return rewriteValueARM64_OpLsh8x64(v, config) 503 case OpLsh8x8: 504 return rewriteValueARM64_OpLsh8x8(v, config) 505 case OpMod16: 506 return rewriteValueARM64_OpMod16(v, config) 507 case OpMod16u: 508 return rewriteValueARM64_OpMod16u(v, config) 509 case OpMod32: 510 return rewriteValueARM64_OpMod32(v, config) 511 case OpMod32u: 512 return rewriteValueARM64_OpMod32u(v, config) 513 case OpMod64: 514 return rewriteValueARM64_OpMod64(v, config) 515 case OpMod64u: 516 return rewriteValueARM64_OpMod64u(v, config) 517 case OpMod8: 518 return rewriteValueARM64_OpMod8(v, config) 519 case OpMod8u: 520 return rewriteValueARM64_OpMod8u(v, config) 521 case OpMove: 522 return rewriteValueARM64_OpMove(v, config) 523 case OpMul16: 524 return rewriteValueARM64_OpMul16(v, config) 525 case OpMul32: 526 return rewriteValueARM64_OpMul32(v, config) 527 case OpMul32F: 528 return rewriteValueARM64_OpMul32F(v, config) 529 case OpMul64: 530 return rewriteValueARM64_OpMul64(v, config) 531 case OpMul64F: 532 return rewriteValueARM64_OpMul64F(v, config) 533 case OpMul8: 534 return rewriteValueARM64_OpMul8(v, config) 535 case OpNeg16: 536 return rewriteValueARM64_OpNeg16(v, config) 537 case OpNeg32: 538 return rewriteValueARM64_OpNeg32(v, config) 539 case OpNeg32F: 540 return rewriteValueARM64_OpNeg32F(v, config) 541 case OpNeg64: 542 return rewriteValueARM64_OpNeg64(v, config) 543 case OpNeg64F: 544 return rewriteValueARM64_OpNeg64F(v, config) 545 case OpNeg8: 546 return rewriteValueARM64_OpNeg8(v, config) 547 case OpNeq16: 548 return rewriteValueARM64_OpNeq16(v, config) 549 case OpNeq32: 550 return rewriteValueARM64_OpNeq32(v, config) 551 case OpNeq32F: 552 return rewriteValueARM64_OpNeq32F(v, config) 553 case OpNeq64: 554 return rewriteValueARM64_OpNeq64(v, config) 555 case OpNeq64F: 556 return rewriteValueARM64_OpNeq64F(v, config) 557 case OpNeq8: 558 return rewriteValueARM64_OpNeq8(v, config) 559 case OpNeqB: 560 return rewriteValueARM64_OpNeqB(v, config) 561 case OpNeqPtr: 562 return rewriteValueARM64_OpNeqPtr(v, config) 563 case OpNilCheck: 564 return rewriteValueARM64_OpNilCheck(v, config) 565 case OpNot: 566 return rewriteValueARM64_OpNot(v, config) 567 case OpOffPtr: 568 return rewriteValueARM64_OpOffPtr(v, config) 569 case OpOr16: 570 return rewriteValueARM64_OpOr16(v, config) 571 case OpOr32: 572 return rewriteValueARM64_OpOr32(v, config) 573 case OpOr64: 574 return rewriteValueARM64_OpOr64(v, config) 575 case OpOr8: 576 return rewriteValueARM64_OpOr8(v, config) 577 case OpOrB: 578 return rewriteValueARM64_OpOrB(v, config) 579 case OpRsh16Ux16: 580 return rewriteValueARM64_OpRsh16Ux16(v, config) 581 case OpRsh16Ux32: 582 return rewriteValueARM64_OpRsh16Ux32(v, config) 583 case OpRsh16Ux64: 584 return rewriteValueARM64_OpRsh16Ux64(v, config) 585 case OpRsh16Ux8: 586 return rewriteValueARM64_OpRsh16Ux8(v, config) 587 case OpRsh16x16: 588 return rewriteValueARM64_OpRsh16x16(v, config) 589 case OpRsh16x32: 590 return rewriteValueARM64_OpRsh16x32(v, config) 591 case OpRsh16x64: 592 return rewriteValueARM64_OpRsh16x64(v, config) 593 case OpRsh16x8: 594 return rewriteValueARM64_OpRsh16x8(v, config) 595 case OpRsh32Ux16: 596 return rewriteValueARM64_OpRsh32Ux16(v, config) 597 case OpRsh32Ux32: 598 return rewriteValueARM64_OpRsh32Ux32(v, config) 599 case OpRsh32Ux64: 600 return rewriteValueARM64_OpRsh32Ux64(v, config) 601 case OpRsh32Ux8: 602 return rewriteValueARM64_OpRsh32Ux8(v, config) 603 case OpRsh32x16: 604 return rewriteValueARM64_OpRsh32x16(v, config) 605 case OpRsh32x32: 606 return rewriteValueARM64_OpRsh32x32(v, config) 607 case OpRsh32x64: 608 return rewriteValueARM64_OpRsh32x64(v, config) 609 case OpRsh32x8: 610 return rewriteValueARM64_OpRsh32x8(v, config) 611 case OpRsh64Ux16: 612 return rewriteValueARM64_OpRsh64Ux16(v, config) 613 case OpRsh64Ux32: 614 return rewriteValueARM64_OpRsh64Ux32(v, config) 615 case OpRsh64Ux64: 616 return rewriteValueARM64_OpRsh64Ux64(v, config) 617 case OpRsh64Ux8: 618 return rewriteValueARM64_OpRsh64Ux8(v, config) 619 case OpRsh64x16: 620 return rewriteValueARM64_OpRsh64x16(v, config) 621 case OpRsh64x32: 622 return rewriteValueARM64_OpRsh64x32(v, config) 623 case OpRsh64x64: 624 return rewriteValueARM64_OpRsh64x64(v, config) 625 case OpRsh64x8: 626 return rewriteValueARM64_OpRsh64x8(v, config) 627 case OpRsh8Ux16: 628 return rewriteValueARM64_OpRsh8Ux16(v, config) 629 case OpRsh8Ux32: 630 return rewriteValueARM64_OpRsh8Ux32(v, config) 631 case OpRsh8Ux64: 632 return rewriteValueARM64_OpRsh8Ux64(v, config) 633 case OpRsh8Ux8: 634 return rewriteValueARM64_OpRsh8Ux8(v, config) 635 case OpRsh8x16: 636 return rewriteValueARM64_OpRsh8x16(v, config) 637 case OpRsh8x32: 638 return rewriteValueARM64_OpRsh8x32(v, config) 639 case OpRsh8x64: 640 return rewriteValueARM64_OpRsh8x64(v, config) 641 case OpRsh8x8: 642 return rewriteValueARM64_OpRsh8x8(v, config) 643 case OpSignExt16to32: 644 return rewriteValueARM64_OpSignExt16to32(v, config) 645 case OpSignExt16to64: 646 return rewriteValueARM64_OpSignExt16to64(v, config) 647 case OpSignExt32to64: 648 return rewriteValueARM64_OpSignExt32to64(v, config) 649 case OpSignExt8to16: 650 return rewriteValueARM64_OpSignExt8to16(v, config) 651 case OpSignExt8to32: 652 return rewriteValueARM64_OpSignExt8to32(v, config) 653 case OpSignExt8to64: 654 return rewriteValueARM64_OpSignExt8to64(v, config) 655 case OpSlicemask: 656 return rewriteValueARM64_OpSlicemask(v, config) 657 case OpSqrt: 658 return rewriteValueARM64_OpSqrt(v, config) 659 case OpStaticCall: 660 return rewriteValueARM64_OpStaticCall(v, config) 661 case OpStore: 662 return rewriteValueARM64_OpStore(v, config) 663 case OpSub16: 664 return rewriteValueARM64_OpSub16(v, config) 665 case OpSub32: 666 return rewriteValueARM64_OpSub32(v, config) 667 case OpSub32F: 668 return rewriteValueARM64_OpSub32F(v, config) 669 case OpSub64: 670 return rewriteValueARM64_OpSub64(v, config) 671 case OpSub64F: 672 return rewriteValueARM64_OpSub64F(v, config) 673 case OpSub8: 674 return rewriteValueARM64_OpSub8(v, config) 675 case OpSubPtr: 676 return rewriteValueARM64_OpSubPtr(v, config) 677 case OpTrunc16to8: 678 return rewriteValueARM64_OpTrunc16to8(v, config) 679 case OpTrunc32to16: 680 return rewriteValueARM64_OpTrunc32to16(v, config) 681 case OpTrunc32to8: 682 return rewriteValueARM64_OpTrunc32to8(v, config) 683 case OpTrunc64to16: 684 return rewriteValueARM64_OpTrunc64to16(v, config) 685 case OpTrunc64to32: 686 return rewriteValueARM64_OpTrunc64to32(v, config) 687 case OpTrunc64to8: 688 return rewriteValueARM64_OpTrunc64to8(v, config) 689 case OpXor16: 690 return rewriteValueARM64_OpXor16(v, config) 691 case OpXor32: 692 return rewriteValueARM64_OpXor32(v, config) 693 case OpXor64: 694 return rewriteValueARM64_OpXor64(v, config) 695 case OpXor8: 696 return rewriteValueARM64_OpXor8(v, config) 697 case OpZero: 698 return rewriteValueARM64_OpZero(v, config) 699 case OpZeroExt16to32: 700 return rewriteValueARM64_OpZeroExt16to32(v, config) 701 case OpZeroExt16to64: 702 return rewriteValueARM64_OpZeroExt16to64(v, config) 703 case OpZeroExt32to64: 704 return rewriteValueARM64_OpZeroExt32to64(v, config) 705 case OpZeroExt8to16: 706 return rewriteValueARM64_OpZeroExt8to16(v, config) 707 case OpZeroExt8to32: 708 return rewriteValueARM64_OpZeroExt8to32(v, config) 709 case OpZeroExt8to64: 710 return rewriteValueARM64_OpZeroExt8to64(v, config) 711 } 712 return false 713 } 714 func rewriteValueARM64_OpARM64ADD(v *Value, config *Config) bool { 715 b := v.Block 716 _ = b 717 // match: (ADD (MOVDconst [c]) x) 718 // cond: 719 // result: (ADDconst [c] x) 720 for { 721 v_0 := v.Args[0] 722 if v_0.Op != OpARM64MOVDconst { 723 break 724 } 725 c := v_0.AuxInt 726 x := v.Args[1] 727 v.reset(OpARM64ADDconst) 728 v.AuxInt = c 729 v.AddArg(x) 730 return true 731 } 732 // match: (ADD x (MOVDconst [c])) 733 // cond: 734 // result: (ADDconst [c] x) 735 for { 736 x := v.Args[0] 737 v_1 := v.Args[1] 738 if v_1.Op != OpARM64MOVDconst { 739 break 740 } 741 c := v_1.AuxInt 742 v.reset(OpARM64ADDconst) 743 v.AuxInt = c 744 v.AddArg(x) 745 return true 746 } 747 // match: (ADD x (NEG y)) 748 // cond: 749 // result: (SUB x y) 750 for { 751 x := v.Args[0] 752 v_1 := v.Args[1] 753 if v_1.Op != OpARM64NEG { 754 break 755 } 756 y := v_1.Args[0] 757 v.reset(OpARM64SUB) 758 v.AddArg(x) 759 v.AddArg(y) 760 return true 761 } 762 // match: (ADD (NEG y) x) 763 // cond: 764 // result: (SUB x y) 765 for { 766 v_0 := v.Args[0] 767 if v_0.Op != OpARM64NEG { 768 break 769 } 770 y := v_0.Args[0] 771 x := v.Args[1] 772 v.reset(OpARM64SUB) 773 v.AddArg(x) 774 v.AddArg(y) 775 return true 776 } 777 // match: (ADD x (SLLconst [c] y)) 778 // cond: 779 // result: (ADDshiftLL x y [c]) 780 for { 781 x := v.Args[0] 782 v_1 := v.Args[1] 783 if v_1.Op != OpARM64SLLconst { 784 break 785 } 786 c := v_1.AuxInt 787 y := v_1.Args[0] 788 v.reset(OpARM64ADDshiftLL) 789 v.AuxInt = c 790 v.AddArg(x) 791 v.AddArg(y) 792 return true 793 } 794 // match: (ADD (SLLconst [c] y) x) 795 // cond: 796 // result: (ADDshiftLL x y [c]) 797 for { 798 v_0 := v.Args[0] 799 if v_0.Op != OpARM64SLLconst { 800 break 801 } 802 c := v_0.AuxInt 803 y := v_0.Args[0] 804 x := v.Args[1] 805 v.reset(OpARM64ADDshiftLL) 806 v.AuxInt = c 807 v.AddArg(x) 808 v.AddArg(y) 809 return true 810 } 811 // match: (ADD x (SRLconst [c] y)) 812 // cond: 813 // result: (ADDshiftRL x y [c]) 814 for { 815 x := v.Args[0] 816 v_1 := v.Args[1] 817 if v_1.Op != OpARM64SRLconst { 818 break 819 } 820 c := v_1.AuxInt 821 y := v_1.Args[0] 822 v.reset(OpARM64ADDshiftRL) 823 v.AuxInt = c 824 v.AddArg(x) 825 v.AddArg(y) 826 return true 827 } 828 // match: (ADD (SRLconst [c] y) x) 829 // cond: 830 // result: (ADDshiftRL x y [c]) 831 for { 832 v_0 := v.Args[0] 833 if v_0.Op != OpARM64SRLconst { 834 break 835 } 836 c := v_0.AuxInt 837 y := v_0.Args[0] 838 x := v.Args[1] 839 v.reset(OpARM64ADDshiftRL) 840 v.AuxInt = c 841 v.AddArg(x) 842 v.AddArg(y) 843 return true 844 } 845 // match: (ADD x (SRAconst [c] y)) 846 // cond: 847 // result: (ADDshiftRA x y [c]) 848 for { 849 x := v.Args[0] 850 v_1 := v.Args[1] 851 if v_1.Op != OpARM64SRAconst { 852 break 853 } 854 c := v_1.AuxInt 855 y := v_1.Args[0] 856 v.reset(OpARM64ADDshiftRA) 857 v.AuxInt = c 858 v.AddArg(x) 859 v.AddArg(y) 860 return true 861 } 862 // match: (ADD (SRAconst [c] y) x) 863 // cond: 864 // result: (ADDshiftRA x y [c]) 865 for { 866 v_0 := v.Args[0] 867 if v_0.Op != OpARM64SRAconst { 868 break 869 } 870 c := v_0.AuxInt 871 y := v_0.Args[0] 872 x := v.Args[1] 873 v.reset(OpARM64ADDshiftRA) 874 v.AuxInt = c 875 v.AddArg(x) 876 v.AddArg(y) 877 return true 878 } 879 return false 880 } 881 func rewriteValueARM64_OpARM64ADDconst(v *Value, config *Config) bool { 882 b := v.Block 883 _ = b 884 // match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr)) 885 // cond: 886 // result: (MOVDaddr [off1+off2] {sym} ptr) 887 for { 888 off1 := v.AuxInt 889 v_0 := v.Args[0] 890 if v_0.Op != OpARM64MOVDaddr { 891 break 892 } 893 off2 := v_0.AuxInt 894 sym := v_0.Aux 895 ptr := v_0.Args[0] 896 v.reset(OpARM64MOVDaddr) 897 v.AuxInt = off1 + off2 898 v.Aux = sym 899 v.AddArg(ptr) 900 return true 901 } 902 // match: (ADDconst [0] x) 903 // cond: 904 // result: x 905 for { 906 if v.AuxInt != 0 { 907 break 908 } 909 x := v.Args[0] 910 v.reset(OpCopy) 911 v.Type = x.Type 912 v.AddArg(x) 913 return true 914 } 915 // match: (ADDconst [c] (MOVDconst [d])) 916 // cond: 917 // result: (MOVDconst [c+d]) 918 for { 919 c := v.AuxInt 920 v_0 := v.Args[0] 921 if v_0.Op != OpARM64MOVDconst { 922 break 923 } 924 d := v_0.AuxInt 925 v.reset(OpARM64MOVDconst) 926 v.AuxInt = c + d 927 return true 928 } 929 // match: (ADDconst [c] (ADDconst [d] x)) 930 // cond: 931 // result: (ADDconst [c+d] x) 932 for { 933 c := v.AuxInt 934 v_0 := v.Args[0] 935 if v_0.Op != OpARM64ADDconst { 936 break 937 } 938 d := v_0.AuxInt 939 x := v_0.Args[0] 940 v.reset(OpARM64ADDconst) 941 v.AuxInt = c + d 942 v.AddArg(x) 943 return true 944 } 945 // match: (ADDconst [c] (SUBconst [d] x)) 946 // cond: 947 // result: (ADDconst [c-d] x) 948 for { 949 c := v.AuxInt 950 v_0 := v.Args[0] 951 if v_0.Op != OpARM64SUBconst { 952 break 953 } 954 d := v_0.AuxInt 955 x := v_0.Args[0] 956 v.reset(OpARM64ADDconst) 957 v.AuxInt = c - d 958 v.AddArg(x) 959 return true 960 } 961 return false 962 } 963 func rewriteValueARM64_OpARM64ADDshiftLL(v *Value, config *Config) bool { 964 b := v.Block 965 _ = b 966 // match: (ADDshiftLL (MOVDconst [c]) x [d]) 967 // cond: 968 // result: (ADDconst [c] (SLLconst <x.Type> x [d])) 969 for { 970 d := v.AuxInt 971 v_0 := v.Args[0] 972 if v_0.Op != OpARM64MOVDconst { 973 break 974 } 975 c := v_0.AuxInt 976 x := v.Args[1] 977 v.reset(OpARM64ADDconst) 978 v.AuxInt = c 979 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 980 v0.AuxInt = d 981 v0.AddArg(x) 982 v.AddArg(v0) 983 return true 984 } 985 // match: (ADDshiftLL x (MOVDconst [c]) [d]) 986 // cond: 987 // result: (ADDconst x [int64(uint64(c)<<uint64(d))]) 988 for { 989 d := v.AuxInt 990 x := v.Args[0] 991 v_1 := v.Args[1] 992 if v_1.Op != OpARM64MOVDconst { 993 break 994 } 995 c := v_1.AuxInt 996 v.reset(OpARM64ADDconst) 997 v.AuxInt = int64(uint64(c) << uint64(d)) 998 v.AddArg(x) 999 return true 1000 } 1001 // match: (ADDshiftLL [c] (SRLconst x [64-c]) x) 1002 // cond: 1003 // result: (RORconst [64-c] x) 1004 for { 1005 c := v.AuxInt 1006 v_0 := v.Args[0] 1007 if v_0.Op != OpARM64SRLconst { 1008 break 1009 } 1010 if v_0.AuxInt != 64-c { 1011 break 1012 } 1013 x := v_0.Args[0] 1014 if x != v.Args[1] { 1015 break 1016 } 1017 v.reset(OpARM64RORconst) 1018 v.AuxInt = 64 - c 1019 v.AddArg(x) 1020 return true 1021 } 1022 // match: (ADDshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x) 1023 // cond: c < 32 && t.Size() == 4 1024 // result: (RORWconst [32-c] x) 1025 for { 1026 t := v.Type 1027 c := v.AuxInt 1028 v_0 := v.Args[0] 1029 if v_0.Op != OpARM64SRLconst { 1030 break 1031 } 1032 if v_0.AuxInt != 32-c { 1033 break 1034 } 1035 v_0_0 := v_0.Args[0] 1036 if v_0_0.Op != OpARM64MOVWUreg { 1037 break 1038 } 1039 x := v_0_0.Args[0] 1040 if x != v.Args[1] { 1041 break 1042 } 1043 if !(c < 32 && t.Size() == 4) { 1044 break 1045 } 1046 v.reset(OpARM64RORWconst) 1047 v.AuxInt = 32 - c 1048 v.AddArg(x) 1049 return true 1050 } 1051 return false 1052 } 1053 func rewriteValueARM64_OpARM64ADDshiftRA(v *Value, config *Config) bool { 1054 b := v.Block 1055 _ = b 1056 // match: (ADDshiftRA (MOVDconst [c]) x [d]) 1057 // cond: 1058 // result: (ADDconst [c] (SRAconst <x.Type> x [d])) 1059 for { 1060 d := v.AuxInt 1061 v_0 := v.Args[0] 1062 if v_0.Op != OpARM64MOVDconst { 1063 break 1064 } 1065 c := v_0.AuxInt 1066 x := v.Args[1] 1067 v.reset(OpARM64ADDconst) 1068 v.AuxInt = c 1069 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 1070 v0.AuxInt = d 1071 v0.AddArg(x) 1072 v.AddArg(v0) 1073 return true 1074 } 1075 // match: (ADDshiftRA x (MOVDconst [c]) [d]) 1076 // cond: 1077 // result: (ADDconst x [int64(int64(c)>>uint64(d))]) 1078 for { 1079 d := v.AuxInt 1080 x := v.Args[0] 1081 v_1 := v.Args[1] 1082 if v_1.Op != OpARM64MOVDconst { 1083 break 1084 } 1085 c := v_1.AuxInt 1086 v.reset(OpARM64ADDconst) 1087 v.AuxInt = int64(int64(c) >> uint64(d)) 1088 v.AddArg(x) 1089 return true 1090 } 1091 return false 1092 } 1093 func rewriteValueARM64_OpARM64ADDshiftRL(v *Value, config *Config) bool { 1094 b := v.Block 1095 _ = b 1096 // match: (ADDshiftRL (MOVDconst [c]) x [d]) 1097 // cond: 1098 // result: (ADDconst [c] (SRLconst <x.Type> x [d])) 1099 for { 1100 d := v.AuxInt 1101 v_0 := v.Args[0] 1102 if v_0.Op != OpARM64MOVDconst { 1103 break 1104 } 1105 c := v_0.AuxInt 1106 x := v.Args[1] 1107 v.reset(OpARM64ADDconst) 1108 v.AuxInt = c 1109 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 1110 v0.AuxInt = d 1111 v0.AddArg(x) 1112 v.AddArg(v0) 1113 return true 1114 } 1115 // match: (ADDshiftRL x (MOVDconst [c]) [d]) 1116 // cond: 1117 // result: (ADDconst x [int64(uint64(c)>>uint64(d))]) 1118 for { 1119 d := v.AuxInt 1120 x := v.Args[0] 1121 v_1 := v.Args[1] 1122 if v_1.Op != OpARM64MOVDconst { 1123 break 1124 } 1125 c := v_1.AuxInt 1126 v.reset(OpARM64ADDconst) 1127 v.AuxInt = int64(uint64(c) >> uint64(d)) 1128 v.AddArg(x) 1129 return true 1130 } 1131 // match: (ADDshiftRL [c] (SLLconst x [64-c]) x) 1132 // cond: 1133 // result: (RORconst [ c] x) 1134 for { 1135 c := v.AuxInt 1136 v_0 := v.Args[0] 1137 if v_0.Op != OpARM64SLLconst { 1138 break 1139 } 1140 if v_0.AuxInt != 64-c { 1141 break 1142 } 1143 x := v_0.Args[0] 1144 if x != v.Args[1] { 1145 break 1146 } 1147 v.reset(OpARM64RORconst) 1148 v.AuxInt = c 1149 v.AddArg(x) 1150 return true 1151 } 1152 // match: (ADDshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 1153 // cond: c < 32 && t.Size() == 4 1154 // result: (RORWconst [ c] x) 1155 for { 1156 t := v.Type 1157 c := v.AuxInt 1158 v_0 := v.Args[0] 1159 if v_0.Op != OpARM64SLLconst { 1160 break 1161 } 1162 if v_0.AuxInt != 32-c { 1163 break 1164 } 1165 x := v_0.Args[0] 1166 v_1 := v.Args[1] 1167 if v_1.Op != OpARM64MOVWUreg { 1168 break 1169 } 1170 if x != v_1.Args[0] { 1171 break 1172 } 1173 if !(c < 32 && t.Size() == 4) { 1174 break 1175 } 1176 v.reset(OpARM64RORWconst) 1177 v.AuxInt = c 1178 v.AddArg(x) 1179 return true 1180 } 1181 return false 1182 } 1183 func rewriteValueARM64_OpARM64AND(v *Value, config *Config) bool { 1184 b := v.Block 1185 _ = b 1186 // match: (AND (MOVDconst [c]) x) 1187 // cond: 1188 // result: (ANDconst [c] x) 1189 for { 1190 v_0 := v.Args[0] 1191 if v_0.Op != OpARM64MOVDconst { 1192 break 1193 } 1194 c := v_0.AuxInt 1195 x := v.Args[1] 1196 v.reset(OpARM64ANDconst) 1197 v.AuxInt = c 1198 v.AddArg(x) 1199 return true 1200 } 1201 // match: (AND x (MOVDconst [c])) 1202 // cond: 1203 // result: (ANDconst [c] x) 1204 for { 1205 x := v.Args[0] 1206 v_1 := v.Args[1] 1207 if v_1.Op != OpARM64MOVDconst { 1208 break 1209 } 1210 c := v_1.AuxInt 1211 v.reset(OpARM64ANDconst) 1212 v.AuxInt = c 1213 v.AddArg(x) 1214 return true 1215 } 1216 // match: (AND x x) 1217 // cond: 1218 // result: x 1219 for { 1220 x := v.Args[0] 1221 if x != v.Args[1] { 1222 break 1223 } 1224 v.reset(OpCopy) 1225 v.Type = x.Type 1226 v.AddArg(x) 1227 return true 1228 } 1229 // match: (AND x (MVN y)) 1230 // cond: 1231 // result: (BIC x y) 1232 for { 1233 x := v.Args[0] 1234 v_1 := v.Args[1] 1235 if v_1.Op != OpARM64MVN { 1236 break 1237 } 1238 y := v_1.Args[0] 1239 v.reset(OpARM64BIC) 1240 v.AddArg(x) 1241 v.AddArg(y) 1242 return true 1243 } 1244 // match: (AND x (SLLconst [c] y)) 1245 // cond: 1246 // result: (ANDshiftLL x y [c]) 1247 for { 1248 x := v.Args[0] 1249 v_1 := v.Args[1] 1250 if v_1.Op != OpARM64SLLconst { 1251 break 1252 } 1253 c := v_1.AuxInt 1254 y := v_1.Args[0] 1255 v.reset(OpARM64ANDshiftLL) 1256 v.AuxInt = c 1257 v.AddArg(x) 1258 v.AddArg(y) 1259 return true 1260 } 1261 // match: (AND (SLLconst [c] y) x) 1262 // cond: 1263 // result: (ANDshiftLL x y [c]) 1264 for { 1265 v_0 := v.Args[0] 1266 if v_0.Op != OpARM64SLLconst { 1267 break 1268 } 1269 c := v_0.AuxInt 1270 y := v_0.Args[0] 1271 x := v.Args[1] 1272 v.reset(OpARM64ANDshiftLL) 1273 v.AuxInt = c 1274 v.AddArg(x) 1275 v.AddArg(y) 1276 return true 1277 } 1278 // match: (AND x (SRLconst [c] y)) 1279 // cond: 1280 // result: (ANDshiftRL x y [c]) 1281 for { 1282 x := v.Args[0] 1283 v_1 := v.Args[1] 1284 if v_1.Op != OpARM64SRLconst { 1285 break 1286 } 1287 c := v_1.AuxInt 1288 y := v_1.Args[0] 1289 v.reset(OpARM64ANDshiftRL) 1290 v.AuxInt = c 1291 v.AddArg(x) 1292 v.AddArg(y) 1293 return true 1294 } 1295 // match: (AND (SRLconst [c] y) x) 1296 // cond: 1297 // result: (ANDshiftRL x y [c]) 1298 for { 1299 v_0 := v.Args[0] 1300 if v_0.Op != OpARM64SRLconst { 1301 break 1302 } 1303 c := v_0.AuxInt 1304 y := v_0.Args[0] 1305 x := v.Args[1] 1306 v.reset(OpARM64ANDshiftRL) 1307 v.AuxInt = c 1308 v.AddArg(x) 1309 v.AddArg(y) 1310 return true 1311 } 1312 // match: (AND x (SRAconst [c] y)) 1313 // cond: 1314 // result: (ANDshiftRA x y [c]) 1315 for { 1316 x := v.Args[0] 1317 v_1 := v.Args[1] 1318 if v_1.Op != OpARM64SRAconst { 1319 break 1320 } 1321 c := v_1.AuxInt 1322 y := v_1.Args[0] 1323 v.reset(OpARM64ANDshiftRA) 1324 v.AuxInt = c 1325 v.AddArg(x) 1326 v.AddArg(y) 1327 return true 1328 } 1329 // match: (AND (SRAconst [c] y) x) 1330 // cond: 1331 // result: (ANDshiftRA x y [c]) 1332 for { 1333 v_0 := v.Args[0] 1334 if v_0.Op != OpARM64SRAconst { 1335 break 1336 } 1337 c := v_0.AuxInt 1338 y := v_0.Args[0] 1339 x := v.Args[1] 1340 v.reset(OpARM64ANDshiftRA) 1341 v.AuxInt = c 1342 v.AddArg(x) 1343 v.AddArg(y) 1344 return true 1345 } 1346 return false 1347 } 1348 func rewriteValueARM64_OpARM64ANDconst(v *Value, config *Config) bool { 1349 b := v.Block 1350 _ = b 1351 // match: (ANDconst [0] _) 1352 // cond: 1353 // result: (MOVDconst [0]) 1354 for { 1355 if v.AuxInt != 0 { 1356 break 1357 } 1358 v.reset(OpARM64MOVDconst) 1359 v.AuxInt = 0 1360 return true 1361 } 1362 // match: (ANDconst [-1] x) 1363 // cond: 1364 // result: x 1365 for { 1366 if v.AuxInt != -1 { 1367 break 1368 } 1369 x := v.Args[0] 1370 v.reset(OpCopy) 1371 v.Type = x.Type 1372 v.AddArg(x) 1373 return true 1374 } 1375 // match: (ANDconst [c] (MOVDconst [d])) 1376 // cond: 1377 // result: (MOVDconst [c&d]) 1378 for { 1379 c := v.AuxInt 1380 v_0 := v.Args[0] 1381 if v_0.Op != OpARM64MOVDconst { 1382 break 1383 } 1384 d := v_0.AuxInt 1385 v.reset(OpARM64MOVDconst) 1386 v.AuxInt = c & d 1387 return true 1388 } 1389 // match: (ANDconst [c] (ANDconst [d] x)) 1390 // cond: 1391 // result: (ANDconst [c&d] x) 1392 for { 1393 c := v.AuxInt 1394 v_0 := v.Args[0] 1395 if v_0.Op != OpARM64ANDconst { 1396 break 1397 } 1398 d := v_0.AuxInt 1399 x := v_0.Args[0] 1400 v.reset(OpARM64ANDconst) 1401 v.AuxInt = c & d 1402 v.AddArg(x) 1403 return true 1404 } 1405 return false 1406 } 1407 func rewriteValueARM64_OpARM64ANDshiftLL(v *Value, config *Config) bool { 1408 b := v.Block 1409 _ = b 1410 // match: (ANDshiftLL (MOVDconst [c]) x [d]) 1411 // cond: 1412 // result: (ANDconst [c] (SLLconst <x.Type> x [d])) 1413 for { 1414 d := v.AuxInt 1415 v_0 := v.Args[0] 1416 if v_0.Op != OpARM64MOVDconst { 1417 break 1418 } 1419 c := v_0.AuxInt 1420 x := v.Args[1] 1421 v.reset(OpARM64ANDconst) 1422 v.AuxInt = c 1423 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 1424 v0.AuxInt = d 1425 v0.AddArg(x) 1426 v.AddArg(v0) 1427 return true 1428 } 1429 // match: (ANDshiftLL x (MOVDconst [c]) [d]) 1430 // cond: 1431 // result: (ANDconst x [int64(uint64(c)<<uint64(d))]) 1432 for { 1433 d := v.AuxInt 1434 x := v.Args[0] 1435 v_1 := v.Args[1] 1436 if v_1.Op != OpARM64MOVDconst { 1437 break 1438 } 1439 c := v_1.AuxInt 1440 v.reset(OpARM64ANDconst) 1441 v.AuxInt = int64(uint64(c) << uint64(d)) 1442 v.AddArg(x) 1443 return true 1444 } 1445 // match: (ANDshiftLL x y:(SLLconst x [c]) [d]) 1446 // cond: c==d 1447 // result: y 1448 for { 1449 d := v.AuxInt 1450 x := v.Args[0] 1451 y := v.Args[1] 1452 if y.Op != OpARM64SLLconst { 1453 break 1454 } 1455 c := y.AuxInt 1456 if x != y.Args[0] { 1457 break 1458 } 1459 if !(c == d) { 1460 break 1461 } 1462 v.reset(OpCopy) 1463 v.Type = y.Type 1464 v.AddArg(y) 1465 return true 1466 } 1467 return false 1468 } 1469 func rewriteValueARM64_OpARM64ANDshiftRA(v *Value, config *Config) bool { 1470 b := v.Block 1471 _ = b 1472 // match: (ANDshiftRA (MOVDconst [c]) x [d]) 1473 // cond: 1474 // result: (ANDconst [c] (SRAconst <x.Type> x [d])) 1475 for { 1476 d := v.AuxInt 1477 v_0 := v.Args[0] 1478 if v_0.Op != OpARM64MOVDconst { 1479 break 1480 } 1481 c := v_0.AuxInt 1482 x := v.Args[1] 1483 v.reset(OpARM64ANDconst) 1484 v.AuxInt = c 1485 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 1486 v0.AuxInt = d 1487 v0.AddArg(x) 1488 v.AddArg(v0) 1489 return true 1490 } 1491 // match: (ANDshiftRA x (MOVDconst [c]) [d]) 1492 // cond: 1493 // result: (ANDconst x [int64(int64(c)>>uint64(d))]) 1494 for { 1495 d := v.AuxInt 1496 x := v.Args[0] 1497 v_1 := v.Args[1] 1498 if v_1.Op != OpARM64MOVDconst { 1499 break 1500 } 1501 c := v_1.AuxInt 1502 v.reset(OpARM64ANDconst) 1503 v.AuxInt = int64(int64(c) >> uint64(d)) 1504 v.AddArg(x) 1505 return true 1506 } 1507 // match: (ANDshiftRA x y:(SRAconst x [c]) [d]) 1508 // cond: c==d 1509 // result: y 1510 for { 1511 d := v.AuxInt 1512 x := v.Args[0] 1513 y := v.Args[1] 1514 if y.Op != OpARM64SRAconst { 1515 break 1516 } 1517 c := y.AuxInt 1518 if x != y.Args[0] { 1519 break 1520 } 1521 if !(c == d) { 1522 break 1523 } 1524 v.reset(OpCopy) 1525 v.Type = y.Type 1526 v.AddArg(y) 1527 return true 1528 } 1529 return false 1530 } 1531 func rewriteValueARM64_OpARM64ANDshiftRL(v *Value, config *Config) bool { 1532 b := v.Block 1533 _ = b 1534 // match: (ANDshiftRL (MOVDconst [c]) x [d]) 1535 // cond: 1536 // result: (ANDconst [c] (SRLconst <x.Type> x [d])) 1537 for { 1538 d := v.AuxInt 1539 v_0 := v.Args[0] 1540 if v_0.Op != OpARM64MOVDconst { 1541 break 1542 } 1543 c := v_0.AuxInt 1544 x := v.Args[1] 1545 v.reset(OpARM64ANDconst) 1546 v.AuxInt = c 1547 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 1548 v0.AuxInt = d 1549 v0.AddArg(x) 1550 v.AddArg(v0) 1551 return true 1552 } 1553 // match: (ANDshiftRL x (MOVDconst [c]) [d]) 1554 // cond: 1555 // result: (ANDconst x [int64(uint64(c)>>uint64(d))]) 1556 for { 1557 d := v.AuxInt 1558 x := v.Args[0] 1559 v_1 := v.Args[1] 1560 if v_1.Op != OpARM64MOVDconst { 1561 break 1562 } 1563 c := v_1.AuxInt 1564 v.reset(OpARM64ANDconst) 1565 v.AuxInt = int64(uint64(c) >> uint64(d)) 1566 v.AddArg(x) 1567 return true 1568 } 1569 // match: (ANDshiftRL x y:(SRLconst x [c]) [d]) 1570 // cond: c==d 1571 // result: y 1572 for { 1573 d := v.AuxInt 1574 x := v.Args[0] 1575 y := v.Args[1] 1576 if y.Op != OpARM64SRLconst { 1577 break 1578 } 1579 c := y.AuxInt 1580 if x != y.Args[0] { 1581 break 1582 } 1583 if !(c == d) { 1584 break 1585 } 1586 v.reset(OpCopy) 1587 v.Type = y.Type 1588 v.AddArg(y) 1589 return true 1590 } 1591 return false 1592 } 1593 func rewriteValueARM64_OpARM64BIC(v *Value, config *Config) bool { 1594 b := v.Block 1595 _ = b 1596 // match: (BIC x (MOVDconst [c])) 1597 // cond: 1598 // result: (BICconst [c] x) 1599 for { 1600 x := v.Args[0] 1601 v_1 := v.Args[1] 1602 if v_1.Op != OpARM64MOVDconst { 1603 break 1604 } 1605 c := v_1.AuxInt 1606 v.reset(OpARM64BICconst) 1607 v.AuxInt = c 1608 v.AddArg(x) 1609 return true 1610 } 1611 // match: (BIC x x) 1612 // cond: 1613 // result: (MOVDconst [0]) 1614 for { 1615 x := v.Args[0] 1616 if x != v.Args[1] { 1617 break 1618 } 1619 v.reset(OpARM64MOVDconst) 1620 v.AuxInt = 0 1621 return true 1622 } 1623 // match: (BIC x (SLLconst [c] y)) 1624 // cond: 1625 // result: (BICshiftLL x y [c]) 1626 for { 1627 x := v.Args[0] 1628 v_1 := v.Args[1] 1629 if v_1.Op != OpARM64SLLconst { 1630 break 1631 } 1632 c := v_1.AuxInt 1633 y := v_1.Args[0] 1634 v.reset(OpARM64BICshiftLL) 1635 v.AuxInt = c 1636 v.AddArg(x) 1637 v.AddArg(y) 1638 return true 1639 } 1640 // match: (BIC x (SRLconst [c] y)) 1641 // cond: 1642 // result: (BICshiftRL x y [c]) 1643 for { 1644 x := v.Args[0] 1645 v_1 := v.Args[1] 1646 if v_1.Op != OpARM64SRLconst { 1647 break 1648 } 1649 c := v_1.AuxInt 1650 y := v_1.Args[0] 1651 v.reset(OpARM64BICshiftRL) 1652 v.AuxInt = c 1653 v.AddArg(x) 1654 v.AddArg(y) 1655 return true 1656 } 1657 // match: (BIC x (SRAconst [c] y)) 1658 // cond: 1659 // result: (BICshiftRA x y [c]) 1660 for { 1661 x := v.Args[0] 1662 v_1 := v.Args[1] 1663 if v_1.Op != OpARM64SRAconst { 1664 break 1665 } 1666 c := v_1.AuxInt 1667 y := v_1.Args[0] 1668 v.reset(OpARM64BICshiftRA) 1669 v.AuxInt = c 1670 v.AddArg(x) 1671 v.AddArg(y) 1672 return true 1673 } 1674 return false 1675 } 1676 func rewriteValueARM64_OpARM64BICconst(v *Value, config *Config) bool { 1677 b := v.Block 1678 _ = b 1679 // match: (BICconst [0] x) 1680 // cond: 1681 // result: x 1682 for { 1683 if v.AuxInt != 0 { 1684 break 1685 } 1686 x := v.Args[0] 1687 v.reset(OpCopy) 1688 v.Type = x.Type 1689 v.AddArg(x) 1690 return true 1691 } 1692 // match: (BICconst [-1] _) 1693 // cond: 1694 // result: (MOVDconst [0]) 1695 for { 1696 if v.AuxInt != -1 { 1697 break 1698 } 1699 v.reset(OpARM64MOVDconst) 1700 v.AuxInt = 0 1701 return true 1702 } 1703 // match: (BICconst [c] (MOVDconst [d])) 1704 // cond: 1705 // result: (MOVDconst [d&^c]) 1706 for { 1707 c := v.AuxInt 1708 v_0 := v.Args[0] 1709 if v_0.Op != OpARM64MOVDconst { 1710 break 1711 } 1712 d := v_0.AuxInt 1713 v.reset(OpARM64MOVDconst) 1714 v.AuxInt = d &^ c 1715 return true 1716 } 1717 return false 1718 } 1719 func rewriteValueARM64_OpARM64BICshiftLL(v *Value, config *Config) bool { 1720 b := v.Block 1721 _ = b 1722 // match: (BICshiftLL x (MOVDconst [c]) [d]) 1723 // cond: 1724 // result: (BICconst x [int64(uint64(c)<<uint64(d))]) 1725 for { 1726 d := v.AuxInt 1727 x := v.Args[0] 1728 v_1 := v.Args[1] 1729 if v_1.Op != OpARM64MOVDconst { 1730 break 1731 } 1732 c := v_1.AuxInt 1733 v.reset(OpARM64BICconst) 1734 v.AuxInt = int64(uint64(c) << uint64(d)) 1735 v.AddArg(x) 1736 return true 1737 } 1738 // match: (BICshiftLL x (SLLconst x [c]) [d]) 1739 // cond: c==d 1740 // result: (MOVDconst [0]) 1741 for { 1742 d := v.AuxInt 1743 x := v.Args[0] 1744 v_1 := v.Args[1] 1745 if v_1.Op != OpARM64SLLconst { 1746 break 1747 } 1748 c := v_1.AuxInt 1749 if x != v_1.Args[0] { 1750 break 1751 } 1752 if !(c == d) { 1753 break 1754 } 1755 v.reset(OpARM64MOVDconst) 1756 v.AuxInt = 0 1757 return true 1758 } 1759 return false 1760 } 1761 func rewriteValueARM64_OpARM64BICshiftRA(v *Value, config *Config) bool { 1762 b := v.Block 1763 _ = b 1764 // match: (BICshiftRA x (MOVDconst [c]) [d]) 1765 // cond: 1766 // result: (BICconst x [int64(int64(c)>>uint64(d))]) 1767 for { 1768 d := v.AuxInt 1769 x := v.Args[0] 1770 v_1 := v.Args[1] 1771 if v_1.Op != OpARM64MOVDconst { 1772 break 1773 } 1774 c := v_1.AuxInt 1775 v.reset(OpARM64BICconst) 1776 v.AuxInt = int64(int64(c) >> uint64(d)) 1777 v.AddArg(x) 1778 return true 1779 } 1780 // match: (BICshiftRA x (SRAconst x [c]) [d]) 1781 // cond: c==d 1782 // result: (MOVDconst [0]) 1783 for { 1784 d := v.AuxInt 1785 x := v.Args[0] 1786 v_1 := v.Args[1] 1787 if v_1.Op != OpARM64SRAconst { 1788 break 1789 } 1790 c := v_1.AuxInt 1791 if x != v_1.Args[0] { 1792 break 1793 } 1794 if !(c == d) { 1795 break 1796 } 1797 v.reset(OpARM64MOVDconst) 1798 v.AuxInt = 0 1799 return true 1800 } 1801 return false 1802 } 1803 func rewriteValueARM64_OpARM64BICshiftRL(v *Value, config *Config) bool { 1804 b := v.Block 1805 _ = b 1806 // match: (BICshiftRL x (MOVDconst [c]) [d]) 1807 // cond: 1808 // result: (BICconst x [int64(uint64(c)>>uint64(d))]) 1809 for { 1810 d := v.AuxInt 1811 x := v.Args[0] 1812 v_1 := v.Args[1] 1813 if v_1.Op != OpARM64MOVDconst { 1814 break 1815 } 1816 c := v_1.AuxInt 1817 v.reset(OpARM64BICconst) 1818 v.AuxInt = int64(uint64(c) >> uint64(d)) 1819 v.AddArg(x) 1820 return true 1821 } 1822 // match: (BICshiftRL x (SRLconst x [c]) [d]) 1823 // cond: c==d 1824 // result: (MOVDconst [0]) 1825 for { 1826 d := v.AuxInt 1827 x := v.Args[0] 1828 v_1 := v.Args[1] 1829 if v_1.Op != OpARM64SRLconst { 1830 break 1831 } 1832 c := v_1.AuxInt 1833 if x != v_1.Args[0] { 1834 break 1835 } 1836 if !(c == d) { 1837 break 1838 } 1839 v.reset(OpARM64MOVDconst) 1840 v.AuxInt = 0 1841 return true 1842 } 1843 return false 1844 } 1845 func rewriteValueARM64_OpARM64CMP(v *Value, config *Config) bool { 1846 b := v.Block 1847 _ = b 1848 // match: (CMP x (MOVDconst [c])) 1849 // cond: 1850 // result: (CMPconst [c] x) 1851 for { 1852 x := v.Args[0] 1853 v_1 := v.Args[1] 1854 if v_1.Op != OpARM64MOVDconst { 1855 break 1856 } 1857 c := v_1.AuxInt 1858 v.reset(OpARM64CMPconst) 1859 v.AuxInt = c 1860 v.AddArg(x) 1861 return true 1862 } 1863 // match: (CMP (MOVDconst [c]) x) 1864 // cond: 1865 // result: (InvertFlags (CMPconst [c] x)) 1866 for { 1867 v_0 := v.Args[0] 1868 if v_0.Op != OpARM64MOVDconst { 1869 break 1870 } 1871 c := v_0.AuxInt 1872 x := v.Args[1] 1873 v.reset(OpARM64InvertFlags) 1874 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 1875 v0.AuxInt = c 1876 v0.AddArg(x) 1877 v.AddArg(v0) 1878 return true 1879 } 1880 // match: (CMP x (SLLconst [c] y)) 1881 // cond: 1882 // result: (CMPshiftLL x y [c]) 1883 for { 1884 x := v.Args[0] 1885 v_1 := v.Args[1] 1886 if v_1.Op != OpARM64SLLconst { 1887 break 1888 } 1889 c := v_1.AuxInt 1890 y := v_1.Args[0] 1891 v.reset(OpARM64CMPshiftLL) 1892 v.AuxInt = c 1893 v.AddArg(x) 1894 v.AddArg(y) 1895 return true 1896 } 1897 // match: (CMP (SLLconst [c] y) x) 1898 // cond: 1899 // result: (InvertFlags (CMPshiftLL x y [c])) 1900 for { 1901 v_0 := v.Args[0] 1902 if v_0.Op != OpARM64SLLconst { 1903 break 1904 } 1905 c := v_0.AuxInt 1906 y := v_0.Args[0] 1907 x := v.Args[1] 1908 v.reset(OpARM64InvertFlags) 1909 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftLL, TypeFlags) 1910 v0.AuxInt = c 1911 v0.AddArg(x) 1912 v0.AddArg(y) 1913 v.AddArg(v0) 1914 return true 1915 } 1916 // match: (CMP x (SRLconst [c] y)) 1917 // cond: 1918 // result: (CMPshiftRL x y [c]) 1919 for { 1920 x := v.Args[0] 1921 v_1 := v.Args[1] 1922 if v_1.Op != OpARM64SRLconst { 1923 break 1924 } 1925 c := v_1.AuxInt 1926 y := v_1.Args[0] 1927 v.reset(OpARM64CMPshiftRL) 1928 v.AuxInt = c 1929 v.AddArg(x) 1930 v.AddArg(y) 1931 return true 1932 } 1933 // match: (CMP (SRLconst [c] y) x) 1934 // cond: 1935 // result: (InvertFlags (CMPshiftRL x y [c])) 1936 for { 1937 v_0 := v.Args[0] 1938 if v_0.Op != OpARM64SRLconst { 1939 break 1940 } 1941 c := v_0.AuxInt 1942 y := v_0.Args[0] 1943 x := v.Args[1] 1944 v.reset(OpARM64InvertFlags) 1945 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRL, TypeFlags) 1946 v0.AuxInt = c 1947 v0.AddArg(x) 1948 v0.AddArg(y) 1949 v.AddArg(v0) 1950 return true 1951 } 1952 // match: (CMP x (SRAconst [c] y)) 1953 // cond: 1954 // result: (CMPshiftRA x y [c]) 1955 for { 1956 x := v.Args[0] 1957 v_1 := v.Args[1] 1958 if v_1.Op != OpARM64SRAconst { 1959 break 1960 } 1961 c := v_1.AuxInt 1962 y := v_1.Args[0] 1963 v.reset(OpARM64CMPshiftRA) 1964 v.AuxInt = c 1965 v.AddArg(x) 1966 v.AddArg(y) 1967 return true 1968 } 1969 // match: (CMP (SRAconst [c] y) x) 1970 // cond: 1971 // result: (InvertFlags (CMPshiftRA x y [c])) 1972 for { 1973 v_0 := v.Args[0] 1974 if v_0.Op != OpARM64SRAconst { 1975 break 1976 } 1977 c := v_0.AuxInt 1978 y := v_0.Args[0] 1979 x := v.Args[1] 1980 v.reset(OpARM64InvertFlags) 1981 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRA, TypeFlags) 1982 v0.AuxInt = c 1983 v0.AddArg(x) 1984 v0.AddArg(y) 1985 v.AddArg(v0) 1986 return true 1987 } 1988 return false 1989 } 1990 func rewriteValueARM64_OpARM64CMPW(v *Value, config *Config) bool { 1991 b := v.Block 1992 _ = b 1993 // match: (CMPW x (MOVDconst [c])) 1994 // cond: 1995 // result: (CMPWconst [int64(int32(c))] x) 1996 for { 1997 x := v.Args[0] 1998 v_1 := v.Args[1] 1999 if v_1.Op != OpARM64MOVDconst { 2000 break 2001 } 2002 c := v_1.AuxInt 2003 v.reset(OpARM64CMPWconst) 2004 v.AuxInt = int64(int32(c)) 2005 v.AddArg(x) 2006 return true 2007 } 2008 // match: (CMPW (MOVDconst [c]) x) 2009 // cond: 2010 // result: (InvertFlags (CMPWconst [int64(int32(c))] x)) 2011 for { 2012 v_0 := v.Args[0] 2013 if v_0.Op != OpARM64MOVDconst { 2014 break 2015 } 2016 c := v_0.AuxInt 2017 x := v.Args[1] 2018 v.reset(OpARM64InvertFlags) 2019 v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, TypeFlags) 2020 v0.AuxInt = int64(int32(c)) 2021 v0.AddArg(x) 2022 v.AddArg(v0) 2023 return true 2024 } 2025 return false 2026 } 2027 func rewriteValueARM64_OpARM64CMPWconst(v *Value, config *Config) bool { 2028 b := v.Block 2029 _ = b 2030 // match: (CMPWconst (MOVDconst [x]) [y]) 2031 // cond: int32(x)==int32(y) 2032 // result: (FlagEQ) 2033 for { 2034 y := v.AuxInt 2035 v_0 := v.Args[0] 2036 if v_0.Op != OpARM64MOVDconst { 2037 break 2038 } 2039 x := v_0.AuxInt 2040 if !(int32(x) == int32(y)) { 2041 break 2042 } 2043 v.reset(OpARM64FlagEQ) 2044 return true 2045 } 2046 // match: (CMPWconst (MOVDconst [x]) [y]) 2047 // cond: int32(x)<int32(y) && uint32(x)<uint32(y) 2048 // result: (FlagLT_ULT) 2049 for { 2050 y := v.AuxInt 2051 v_0 := v.Args[0] 2052 if v_0.Op != OpARM64MOVDconst { 2053 break 2054 } 2055 x := v_0.AuxInt 2056 if !(int32(x) < int32(y) && uint32(x) < uint32(y)) { 2057 break 2058 } 2059 v.reset(OpARM64FlagLT_ULT) 2060 return true 2061 } 2062 // match: (CMPWconst (MOVDconst [x]) [y]) 2063 // cond: int32(x)<int32(y) && uint32(x)>uint32(y) 2064 // result: (FlagLT_UGT) 2065 for { 2066 y := v.AuxInt 2067 v_0 := v.Args[0] 2068 if v_0.Op != OpARM64MOVDconst { 2069 break 2070 } 2071 x := v_0.AuxInt 2072 if !(int32(x) < int32(y) && uint32(x) > uint32(y)) { 2073 break 2074 } 2075 v.reset(OpARM64FlagLT_UGT) 2076 return true 2077 } 2078 // match: (CMPWconst (MOVDconst [x]) [y]) 2079 // cond: int32(x)>int32(y) && uint32(x)<uint32(y) 2080 // result: (FlagGT_ULT) 2081 for { 2082 y := v.AuxInt 2083 v_0 := v.Args[0] 2084 if v_0.Op != OpARM64MOVDconst { 2085 break 2086 } 2087 x := v_0.AuxInt 2088 if !(int32(x) > int32(y) && uint32(x) < uint32(y)) { 2089 break 2090 } 2091 v.reset(OpARM64FlagGT_ULT) 2092 return true 2093 } 2094 // match: (CMPWconst (MOVDconst [x]) [y]) 2095 // cond: int32(x)>int32(y) && uint32(x)>uint32(y) 2096 // result: (FlagGT_UGT) 2097 for { 2098 y := v.AuxInt 2099 v_0 := v.Args[0] 2100 if v_0.Op != OpARM64MOVDconst { 2101 break 2102 } 2103 x := v_0.AuxInt 2104 if !(int32(x) > int32(y) && uint32(x) > uint32(y)) { 2105 break 2106 } 2107 v.reset(OpARM64FlagGT_UGT) 2108 return true 2109 } 2110 // match: (CMPWconst (MOVBUreg _) [c]) 2111 // cond: 0xff < int32(c) 2112 // result: (FlagLT_ULT) 2113 for { 2114 c := v.AuxInt 2115 v_0 := v.Args[0] 2116 if v_0.Op != OpARM64MOVBUreg { 2117 break 2118 } 2119 if !(0xff < int32(c)) { 2120 break 2121 } 2122 v.reset(OpARM64FlagLT_ULT) 2123 return true 2124 } 2125 // match: (CMPWconst (MOVHUreg _) [c]) 2126 // cond: 0xffff < int32(c) 2127 // result: (FlagLT_ULT) 2128 for { 2129 c := v.AuxInt 2130 v_0 := v.Args[0] 2131 if v_0.Op != OpARM64MOVHUreg { 2132 break 2133 } 2134 if !(0xffff < int32(c)) { 2135 break 2136 } 2137 v.reset(OpARM64FlagLT_ULT) 2138 return true 2139 } 2140 return false 2141 } 2142 func rewriteValueARM64_OpARM64CMPconst(v *Value, config *Config) bool { 2143 b := v.Block 2144 _ = b 2145 // match: (CMPconst (MOVDconst [x]) [y]) 2146 // cond: x==y 2147 // result: (FlagEQ) 2148 for { 2149 y := v.AuxInt 2150 v_0 := v.Args[0] 2151 if v_0.Op != OpARM64MOVDconst { 2152 break 2153 } 2154 x := v_0.AuxInt 2155 if !(x == y) { 2156 break 2157 } 2158 v.reset(OpARM64FlagEQ) 2159 return true 2160 } 2161 // match: (CMPconst (MOVDconst [x]) [y]) 2162 // cond: int64(x)<int64(y) && uint64(x)<uint64(y) 2163 // result: (FlagLT_ULT) 2164 for { 2165 y := v.AuxInt 2166 v_0 := v.Args[0] 2167 if v_0.Op != OpARM64MOVDconst { 2168 break 2169 } 2170 x := v_0.AuxInt 2171 if !(int64(x) < int64(y) && uint64(x) < uint64(y)) { 2172 break 2173 } 2174 v.reset(OpARM64FlagLT_ULT) 2175 return true 2176 } 2177 // match: (CMPconst (MOVDconst [x]) [y]) 2178 // cond: int64(x)<int64(y) && uint64(x)>uint64(y) 2179 // result: (FlagLT_UGT) 2180 for { 2181 y := v.AuxInt 2182 v_0 := v.Args[0] 2183 if v_0.Op != OpARM64MOVDconst { 2184 break 2185 } 2186 x := v_0.AuxInt 2187 if !(int64(x) < int64(y) && uint64(x) > uint64(y)) { 2188 break 2189 } 2190 v.reset(OpARM64FlagLT_UGT) 2191 return true 2192 } 2193 // match: (CMPconst (MOVDconst [x]) [y]) 2194 // cond: int64(x)>int64(y) && uint64(x)<uint64(y) 2195 // result: (FlagGT_ULT) 2196 for { 2197 y := v.AuxInt 2198 v_0 := v.Args[0] 2199 if v_0.Op != OpARM64MOVDconst { 2200 break 2201 } 2202 x := v_0.AuxInt 2203 if !(int64(x) > int64(y) && uint64(x) < uint64(y)) { 2204 break 2205 } 2206 v.reset(OpARM64FlagGT_ULT) 2207 return true 2208 } 2209 // match: (CMPconst (MOVDconst [x]) [y]) 2210 // cond: int64(x)>int64(y) && uint64(x)>uint64(y) 2211 // result: (FlagGT_UGT) 2212 for { 2213 y := v.AuxInt 2214 v_0 := v.Args[0] 2215 if v_0.Op != OpARM64MOVDconst { 2216 break 2217 } 2218 x := v_0.AuxInt 2219 if !(int64(x) > int64(y) && uint64(x) > uint64(y)) { 2220 break 2221 } 2222 v.reset(OpARM64FlagGT_UGT) 2223 return true 2224 } 2225 // match: (CMPconst (MOVBUreg _) [c]) 2226 // cond: 0xff < c 2227 // result: (FlagLT_ULT) 2228 for { 2229 c := v.AuxInt 2230 v_0 := v.Args[0] 2231 if v_0.Op != OpARM64MOVBUreg { 2232 break 2233 } 2234 if !(0xff < c) { 2235 break 2236 } 2237 v.reset(OpARM64FlagLT_ULT) 2238 return true 2239 } 2240 // match: (CMPconst (MOVHUreg _) [c]) 2241 // cond: 0xffff < c 2242 // result: (FlagLT_ULT) 2243 for { 2244 c := v.AuxInt 2245 v_0 := v.Args[0] 2246 if v_0.Op != OpARM64MOVHUreg { 2247 break 2248 } 2249 if !(0xffff < c) { 2250 break 2251 } 2252 v.reset(OpARM64FlagLT_ULT) 2253 return true 2254 } 2255 // match: (CMPconst (MOVWUreg _) [c]) 2256 // cond: 0xffffffff < c 2257 // result: (FlagLT_ULT) 2258 for { 2259 c := v.AuxInt 2260 v_0 := v.Args[0] 2261 if v_0.Op != OpARM64MOVWUreg { 2262 break 2263 } 2264 if !(0xffffffff < c) { 2265 break 2266 } 2267 v.reset(OpARM64FlagLT_ULT) 2268 return true 2269 } 2270 // match: (CMPconst (ANDconst _ [m]) [n]) 2271 // cond: 0 <= m && m < n 2272 // result: (FlagLT_ULT) 2273 for { 2274 n := v.AuxInt 2275 v_0 := v.Args[0] 2276 if v_0.Op != OpARM64ANDconst { 2277 break 2278 } 2279 m := v_0.AuxInt 2280 if !(0 <= m && m < n) { 2281 break 2282 } 2283 v.reset(OpARM64FlagLT_ULT) 2284 return true 2285 } 2286 // match: (CMPconst (SRLconst _ [c]) [n]) 2287 // cond: 0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n) 2288 // result: (FlagLT_ULT) 2289 for { 2290 n := v.AuxInt 2291 v_0 := v.Args[0] 2292 if v_0.Op != OpARM64SRLconst { 2293 break 2294 } 2295 c := v_0.AuxInt 2296 if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) { 2297 break 2298 } 2299 v.reset(OpARM64FlagLT_ULT) 2300 return true 2301 } 2302 return false 2303 } 2304 func rewriteValueARM64_OpARM64CMPshiftLL(v *Value, config *Config) bool { 2305 b := v.Block 2306 _ = b 2307 // match: (CMPshiftLL (MOVDconst [c]) x [d]) 2308 // cond: 2309 // result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d]))) 2310 for { 2311 d := v.AuxInt 2312 v_0 := v.Args[0] 2313 if v_0.Op != OpARM64MOVDconst { 2314 break 2315 } 2316 c := v_0.AuxInt 2317 x := v.Args[1] 2318 v.reset(OpARM64InvertFlags) 2319 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 2320 v0.AuxInt = c 2321 v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 2322 v1.AuxInt = d 2323 v1.AddArg(x) 2324 v0.AddArg(v1) 2325 v.AddArg(v0) 2326 return true 2327 } 2328 // match: (CMPshiftLL x (MOVDconst [c]) [d]) 2329 // cond: 2330 // result: (CMPconst x [int64(uint64(c)<<uint64(d))]) 2331 for { 2332 d := v.AuxInt 2333 x := v.Args[0] 2334 v_1 := v.Args[1] 2335 if v_1.Op != OpARM64MOVDconst { 2336 break 2337 } 2338 c := v_1.AuxInt 2339 v.reset(OpARM64CMPconst) 2340 v.AuxInt = int64(uint64(c) << uint64(d)) 2341 v.AddArg(x) 2342 return true 2343 } 2344 return false 2345 } 2346 func rewriteValueARM64_OpARM64CMPshiftRA(v *Value, config *Config) bool { 2347 b := v.Block 2348 _ = b 2349 // match: (CMPshiftRA (MOVDconst [c]) x [d]) 2350 // cond: 2351 // result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d]))) 2352 for { 2353 d := v.AuxInt 2354 v_0 := v.Args[0] 2355 if v_0.Op != OpARM64MOVDconst { 2356 break 2357 } 2358 c := v_0.AuxInt 2359 x := v.Args[1] 2360 v.reset(OpARM64InvertFlags) 2361 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 2362 v0.AuxInt = c 2363 v1 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 2364 v1.AuxInt = d 2365 v1.AddArg(x) 2366 v0.AddArg(v1) 2367 v.AddArg(v0) 2368 return true 2369 } 2370 // match: (CMPshiftRA x (MOVDconst [c]) [d]) 2371 // cond: 2372 // result: (CMPconst x [int64(int64(c)>>uint64(d))]) 2373 for { 2374 d := v.AuxInt 2375 x := v.Args[0] 2376 v_1 := v.Args[1] 2377 if v_1.Op != OpARM64MOVDconst { 2378 break 2379 } 2380 c := v_1.AuxInt 2381 v.reset(OpARM64CMPconst) 2382 v.AuxInt = int64(int64(c) >> uint64(d)) 2383 v.AddArg(x) 2384 return true 2385 } 2386 return false 2387 } 2388 func rewriteValueARM64_OpARM64CMPshiftRL(v *Value, config *Config) bool { 2389 b := v.Block 2390 _ = b 2391 // match: (CMPshiftRL (MOVDconst [c]) x [d]) 2392 // cond: 2393 // result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d]))) 2394 for { 2395 d := v.AuxInt 2396 v_0 := v.Args[0] 2397 if v_0.Op != OpARM64MOVDconst { 2398 break 2399 } 2400 c := v_0.AuxInt 2401 x := v.Args[1] 2402 v.reset(OpARM64InvertFlags) 2403 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 2404 v0.AuxInt = c 2405 v1 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 2406 v1.AuxInt = d 2407 v1.AddArg(x) 2408 v0.AddArg(v1) 2409 v.AddArg(v0) 2410 return true 2411 } 2412 // match: (CMPshiftRL x (MOVDconst [c]) [d]) 2413 // cond: 2414 // result: (CMPconst x [int64(uint64(c)>>uint64(d))]) 2415 for { 2416 d := v.AuxInt 2417 x := v.Args[0] 2418 v_1 := v.Args[1] 2419 if v_1.Op != OpARM64MOVDconst { 2420 break 2421 } 2422 c := v_1.AuxInt 2423 v.reset(OpARM64CMPconst) 2424 v.AuxInt = int64(uint64(c) >> uint64(d)) 2425 v.AddArg(x) 2426 return true 2427 } 2428 return false 2429 } 2430 func rewriteValueARM64_OpARM64CSELULT(v *Value, config *Config) bool { 2431 b := v.Block 2432 _ = b 2433 // match: (CSELULT x (MOVDconst [0]) flag) 2434 // cond: 2435 // result: (CSELULT0 x flag) 2436 for { 2437 x := v.Args[0] 2438 v_1 := v.Args[1] 2439 if v_1.Op != OpARM64MOVDconst { 2440 break 2441 } 2442 if v_1.AuxInt != 0 { 2443 break 2444 } 2445 flag := v.Args[2] 2446 v.reset(OpARM64CSELULT0) 2447 v.AddArg(x) 2448 v.AddArg(flag) 2449 return true 2450 } 2451 // match: (CSELULT _ y (FlagEQ)) 2452 // cond: 2453 // result: y 2454 for { 2455 y := v.Args[1] 2456 v_2 := v.Args[2] 2457 if v_2.Op != OpARM64FlagEQ { 2458 break 2459 } 2460 v.reset(OpCopy) 2461 v.Type = y.Type 2462 v.AddArg(y) 2463 return true 2464 } 2465 // match: (CSELULT x _ (FlagLT_ULT)) 2466 // cond: 2467 // result: x 2468 for { 2469 x := v.Args[0] 2470 v_2 := v.Args[2] 2471 if v_2.Op != OpARM64FlagLT_ULT { 2472 break 2473 } 2474 v.reset(OpCopy) 2475 v.Type = x.Type 2476 v.AddArg(x) 2477 return true 2478 } 2479 // match: (CSELULT _ y (FlagLT_UGT)) 2480 // cond: 2481 // result: y 2482 for { 2483 y := v.Args[1] 2484 v_2 := v.Args[2] 2485 if v_2.Op != OpARM64FlagLT_UGT { 2486 break 2487 } 2488 v.reset(OpCopy) 2489 v.Type = y.Type 2490 v.AddArg(y) 2491 return true 2492 } 2493 // match: (CSELULT x _ (FlagGT_ULT)) 2494 // cond: 2495 // result: x 2496 for { 2497 x := v.Args[0] 2498 v_2 := v.Args[2] 2499 if v_2.Op != OpARM64FlagGT_ULT { 2500 break 2501 } 2502 v.reset(OpCopy) 2503 v.Type = x.Type 2504 v.AddArg(x) 2505 return true 2506 } 2507 // match: (CSELULT _ y (FlagGT_UGT)) 2508 // cond: 2509 // result: y 2510 for { 2511 y := v.Args[1] 2512 v_2 := v.Args[2] 2513 if v_2.Op != OpARM64FlagGT_UGT { 2514 break 2515 } 2516 v.reset(OpCopy) 2517 v.Type = y.Type 2518 v.AddArg(y) 2519 return true 2520 } 2521 return false 2522 } 2523 func rewriteValueARM64_OpARM64CSELULT0(v *Value, config *Config) bool { 2524 b := v.Block 2525 _ = b 2526 // match: (CSELULT0 _ (FlagEQ)) 2527 // cond: 2528 // result: (MOVDconst [0]) 2529 for { 2530 v_1 := v.Args[1] 2531 if v_1.Op != OpARM64FlagEQ { 2532 break 2533 } 2534 v.reset(OpARM64MOVDconst) 2535 v.AuxInt = 0 2536 return true 2537 } 2538 // match: (CSELULT0 x (FlagLT_ULT)) 2539 // cond: 2540 // result: x 2541 for { 2542 x := v.Args[0] 2543 v_1 := v.Args[1] 2544 if v_1.Op != OpARM64FlagLT_ULT { 2545 break 2546 } 2547 v.reset(OpCopy) 2548 v.Type = x.Type 2549 v.AddArg(x) 2550 return true 2551 } 2552 // match: (CSELULT0 _ (FlagLT_UGT)) 2553 // cond: 2554 // result: (MOVDconst [0]) 2555 for { 2556 v_1 := v.Args[1] 2557 if v_1.Op != OpARM64FlagLT_UGT { 2558 break 2559 } 2560 v.reset(OpARM64MOVDconst) 2561 v.AuxInt = 0 2562 return true 2563 } 2564 // match: (CSELULT0 x (FlagGT_ULT)) 2565 // cond: 2566 // result: x 2567 for { 2568 x := v.Args[0] 2569 v_1 := v.Args[1] 2570 if v_1.Op != OpARM64FlagGT_ULT { 2571 break 2572 } 2573 v.reset(OpCopy) 2574 v.Type = x.Type 2575 v.AddArg(x) 2576 return true 2577 } 2578 // match: (CSELULT0 _ (FlagGT_UGT)) 2579 // cond: 2580 // result: (MOVDconst [0]) 2581 for { 2582 v_1 := v.Args[1] 2583 if v_1.Op != OpARM64FlagGT_UGT { 2584 break 2585 } 2586 v.reset(OpARM64MOVDconst) 2587 v.AuxInt = 0 2588 return true 2589 } 2590 return false 2591 } 2592 func rewriteValueARM64_OpARM64DIV(v *Value, config *Config) bool { 2593 b := v.Block 2594 _ = b 2595 // match: (DIV (MOVDconst [c]) (MOVDconst [d])) 2596 // cond: 2597 // result: (MOVDconst [int64(c)/int64(d)]) 2598 for { 2599 v_0 := v.Args[0] 2600 if v_0.Op != OpARM64MOVDconst { 2601 break 2602 } 2603 c := v_0.AuxInt 2604 v_1 := v.Args[1] 2605 if v_1.Op != OpARM64MOVDconst { 2606 break 2607 } 2608 d := v_1.AuxInt 2609 v.reset(OpARM64MOVDconst) 2610 v.AuxInt = int64(c) / int64(d) 2611 return true 2612 } 2613 return false 2614 } 2615 func rewriteValueARM64_OpARM64DIVW(v *Value, config *Config) bool { 2616 b := v.Block 2617 _ = b 2618 // match: (DIVW (MOVDconst [c]) (MOVDconst [d])) 2619 // cond: 2620 // result: (MOVDconst [int64(int32(c)/int32(d))]) 2621 for { 2622 v_0 := v.Args[0] 2623 if v_0.Op != OpARM64MOVDconst { 2624 break 2625 } 2626 c := v_0.AuxInt 2627 v_1 := v.Args[1] 2628 if v_1.Op != OpARM64MOVDconst { 2629 break 2630 } 2631 d := v_1.AuxInt 2632 v.reset(OpARM64MOVDconst) 2633 v.AuxInt = int64(int32(c) / int32(d)) 2634 return true 2635 } 2636 return false 2637 } 2638 func rewriteValueARM64_OpARM64Equal(v *Value, config *Config) bool { 2639 b := v.Block 2640 _ = b 2641 // match: (Equal (FlagEQ)) 2642 // cond: 2643 // result: (MOVDconst [1]) 2644 for { 2645 v_0 := v.Args[0] 2646 if v_0.Op != OpARM64FlagEQ { 2647 break 2648 } 2649 v.reset(OpARM64MOVDconst) 2650 v.AuxInt = 1 2651 return true 2652 } 2653 // match: (Equal (FlagLT_ULT)) 2654 // cond: 2655 // result: (MOVDconst [0]) 2656 for { 2657 v_0 := v.Args[0] 2658 if v_0.Op != OpARM64FlagLT_ULT { 2659 break 2660 } 2661 v.reset(OpARM64MOVDconst) 2662 v.AuxInt = 0 2663 return true 2664 } 2665 // match: (Equal (FlagLT_UGT)) 2666 // cond: 2667 // result: (MOVDconst [0]) 2668 for { 2669 v_0 := v.Args[0] 2670 if v_0.Op != OpARM64FlagLT_UGT { 2671 break 2672 } 2673 v.reset(OpARM64MOVDconst) 2674 v.AuxInt = 0 2675 return true 2676 } 2677 // match: (Equal (FlagGT_ULT)) 2678 // cond: 2679 // result: (MOVDconst [0]) 2680 for { 2681 v_0 := v.Args[0] 2682 if v_0.Op != OpARM64FlagGT_ULT { 2683 break 2684 } 2685 v.reset(OpARM64MOVDconst) 2686 v.AuxInt = 0 2687 return true 2688 } 2689 // match: (Equal (FlagGT_UGT)) 2690 // cond: 2691 // result: (MOVDconst [0]) 2692 for { 2693 v_0 := v.Args[0] 2694 if v_0.Op != OpARM64FlagGT_UGT { 2695 break 2696 } 2697 v.reset(OpARM64MOVDconst) 2698 v.AuxInt = 0 2699 return true 2700 } 2701 // match: (Equal (InvertFlags x)) 2702 // cond: 2703 // result: (Equal x) 2704 for { 2705 v_0 := v.Args[0] 2706 if v_0.Op != OpARM64InvertFlags { 2707 break 2708 } 2709 x := v_0.Args[0] 2710 v.reset(OpARM64Equal) 2711 v.AddArg(x) 2712 return true 2713 } 2714 return false 2715 } 2716 func rewriteValueARM64_OpARM64FMOVDload(v *Value, config *Config) bool { 2717 b := v.Block 2718 _ = b 2719 // match: (FMOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 2720 // cond: is32Bit(off1+off2) && ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)) 2721 // result: (FMOVDload [off1+off2] {sym} ptr mem) 2722 for { 2723 off1 := v.AuxInt 2724 sym := v.Aux 2725 v_0 := v.Args[0] 2726 if v_0.Op != OpARM64ADDconst { 2727 break 2728 } 2729 off2 := v_0.AuxInt 2730 ptr := v_0.Args[0] 2731 mem := v.Args[1] 2732 if !(is32Bit(off1+off2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym))) { 2733 break 2734 } 2735 v.reset(OpARM64FMOVDload) 2736 v.AuxInt = off1 + off2 2737 v.Aux = sym 2738 v.AddArg(ptr) 2739 v.AddArg(mem) 2740 return true 2741 } 2742 // match: (FMOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 2743 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1)) 2744 // result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 2745 for { 2746 off1 := v.AuxInt 2747 sym1 := v.Aux 2748 v_0 := v.Args[0] 2749 if v_0.Op != OpARM64MOVDaddr { 2750 break 2751 } 2752 off2 := v_0.AuxInt 2753 sym2 := v_0.Aux 2754 ptr := v_0.Args[0] 2755 mem := v.Args[1] 2756 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) { 2757 break 2758 } 2759 v.reset(OpARM64FMOVDload) 2760 v.AuxInt = off1 + off2 2761 v.Aux = mergeSym(sym1, sym2) 2762 v.AddArg(ptr) 2763 v.AddArg(mem) 2764 return true 2765 } 2766 return false 2767 } 2768 func rewriteValueARM64_OpARM64FMOVDstore(v *Value, config *Config) bool { 2769 b := v.Block 2770 _ = b 2771 // match: (FMOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 2772 // cond: is32Bit(off1+off2) && ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)) 2773 // result: (FMOVDstore [off1+off2] {sym} ptr val mem) 2774 for { 2775 off1 := v.AuxInt 2776 sym := v.Aux 2777 v_0 := v.Args[0] 2778 if v_0.Op != OpARM64ADDconst { 2779 break 2780 } 2781 off2 := v_0.AuxInt 2782 ptr := v_0.Args[0] 2783 val := v.Args[1] 2784 mem := v.Args[2] 2785 if !(is32Bit(off1+off2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym))) { 2786 break 2787 } 2788 v.reset(OpARM64FMOVDstore) 2789 v.AuxInt = off1 + off2 2790 v.Aux = sym 2791 v.AddArg(ptr) 2792 v.AddArg(val) 2793 v.AddArg(mem) 2794 return true 2795 } 2796 // match: (FMOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 2797 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1)) 2798 // result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 2799 for { 2800 off1 := v.AuxInt 2801 sym1 := v.Aux 2802 v_0 := v.Args[0] 2803 if v_0.Op != OpARM64MOVDaddr { 2804 break 2805 } 2806 off2 := v_0.AuxInt 2807 sym2 := v_0.Aux 2808 ptr := v_0.Args[0] 2809 val := v.Args[1] 2810 mem := v.Args[2] 2811 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) { 2812 break 2813 } 2814 v.reset(OpARM64FMOVDstore) 2815 v.AuxInt = off1 + off2 2816 v.Aux = mergeSym(sym1, sym2) 2817 v.AddArg(ptr) 2818 v.AddArg(val) 2819 v.AddArg(mem) 2820 return true 2821 } 2822 return false 2823 } 2824 func rewriteValueARM64_OpARM64FMOVSload(v *Value, config *Config) bool { 2825 b := v.Block 2826 _ = b 2827 // match: (FMOVSload [off1] {sym} (ADDconst [off2] ptr) mem) 2828 // cond: is32Bit(off1+off2) && ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)) 2829 // result: (FMOVSload [off1+off2] {sym} ptr mem) 2830 for { 2831 off1 := v.AuxInt 2832 sym := v.Aux 2833 v_0 := v.Args[0] 2834 if v_0.Op != OpARM64ADDconst { 2835 break 2836 } 2837 off2 := v_0.AuxInt 2838 ptr := v_0.Args[0] 2839 mem := v.Args[1] 2840 if !(is32Bit(off1+off2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym))) { 2841 break 2842 } 2843 v.reset(OpARM64FMOVSload) 2844 v.AuxInt = off1 + off2 2845 v.Aux = sym 2846 v.AddArg(ptr) 2847 v.AddArg(mem) 2848 return true 2849 } 2850 // match: (FMOVSload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 2851 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1)) 2852 // result: (FMOVSload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 2853 for { 2854 off1 := v.AuxInt 2855 sym1 := v.Aux 2856 v_0 := v.Args[0] 2857 if v_0.Op != OpARM64MOVDaddr { 2858 break 2859 } 2860 off2 := v_0.AuxInt 2861 sym2 := v_0.Aux 2862 ptr := v_0.Args[0] 2863 mem := v.Args[1] 2864 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) { 2865 break 2866 } 2867 v.reset(OpARM64FMOVSload) 2868 v.AuxInt = off1 + off2 2869 v.Aux = mergeSym(sym1, sym2) 2870 v.AddArg(ptr) 2871 v.AddArg(mem) 2872 return true 2873 } 2874 return false 2875 } 2876 func rewriteValueARM64_OpARM64FMOVSstore(v *Value, config *Config) bool { 2877 b := v.Block 2878 _ = b 2879 // match: (FMOVSstore [off1] {sym} (ADDconst [off2] ptr) val mem) 2880 // cond: is32Bit(off1+off2) && ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)) 2881 // result: (FMOVSstore [off1+off2] {sym} ptr val mem) 2882 for { 2883 off1 := v.AuxInt 2884 sym := v.Aux 2885 v_0 := v.Args[0] 2886 if v_0.Op != OpARM64ADDconst { 2887 break 2888 } 2889 off2 := v_0.AuxInt 2890 ptr := v_0.Args[0] 2891 val := v.Args[1] 2892 mem := v.Args[2] 2893 if !(is32Bit(off1+off2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym))) { 2894 break 2895 } 2896 v.reset(OpARM64FMOVSstore) 2897 v.AuxInt = off1 + off2 2898 v.Aux = sym 2899 v.AddArg(ptr) 2900 v.AddArg(val) 2901 v.AddArg(mem) 2902 return true 2903 } 2904 // match: (FMOVSstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 2905 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1)) 2906 // result: (FMOVSstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 2907 for { 2908 off1 := v.AuxInt 2909 sym1 := v.Aux 2910 v_0 := v.Args[0] 2911 if v_0.Op != OpARM64MOVDaddr { 2912 break 2913 } 2914 off2 := v_0.AuxInt 2915 sym2 := v_0.Aux 2916 ptr := v_0.Args[0] 2917 val := v.Args[1] 2918 mem := v.Args[2] 2919 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) { 2920 break 2921 } 2922 v.reset(OpARM64FMOVSstore) 2923 v.AuxInt = off1 + off2 2924 v.Aux = mergeSym(sym1, sym2) 2925 v.AddArg(ptr) 2926 v.AddArg(val) 2927 v.AddArg(mem) 2928 return true 2929 } 2930 return false 2931 } 2932 func rewriteValueARM64_OpARM64GreaterEqual(v *Value, config *Config) bool { 2933 b := v.Block 2934 _ = b 2935 // match: (GreaterEqual (FlagEQ)) 2936 // cond: 2937 // result: (MOVDconst [1]) 2938 for { 2939 v_0 := v.Args[0] 2940 if v_0.Op != OpARM64FlagEQ { 2941 break 2942 } 2943 v.reset(OpARM64MOVDconst) 2944 v.AuxInt = 1 2945 return true 2946 } 2947 // match: (GreaterEqual (FlagLT_ULT)) 2948 // cond: 2949 // result: (MOVDconst [0]) 2950 for { 2951 v_0 := v.Args[0] 2952 if v_0.Op != OpARM64FlagLT_ULT { 2953 break 2954 } 2955 v.reset(OpARM64MOVDconst) 2956 v.AuxInt = 0 2957 return true 2958 } 2959 // match: (GreaterEqual (FlagLT_UGT)) 2960 // cond: 2961 // result: (MOVDconst [0]) 2962 for { 2963 v_0 := v.Args[0] 2964 if v_0.Op != OpARM64FlagLT_UGT { 2965 break 2966 } 2967 v.reset(OpARM64MOVDconst) 2968 v.AuxInt = 0 2969 return true 2970 } 2971 // match: (GreaterEqual (FlagGT_ULT)) 2972 // cond: 2973 // result: (MOVDconst [1]) 2974 for { 2975 v_0 := v.Args[0] 2976 if v_0.Op != OpARM64FlagGT_ULT { 2977 break 2978 } 2979 v.reset(OpARM64MOVDconst) 2980 v.AuxInt = 1 2981 return true 2982 } 2983 // match: (GreaterEqual (FlagGT_UGT)) 2984 // cond: 2985 // result: (MOVDconst [1]) 2986 for { 2987 v_0 := v.Args[0] 2988 if v_0.Op != OpARM64FlagGT_UGT { 2989 break 2990 } 2991 v.reset(OpARM64MOVDconst) 2992 v.AuxInt = 1 2993 return true 2994 } 2995 // match: (GreaterEqual (InvertFlags x)) 2996 // cond: 2997 // result: (LessEqual x) 2998 for { 2999 v_0 := v.Args[0] 3000 if v_0.Op != OpARM64InvertFlags { 3001 break 3002 } 3003 x := v_0.Args[0] 3004 v.reset(OpARM64LessEqual) 3005 v.AddArg(x) 3006 return true 3007 } 3008 return false 3009 } 3010 func rewriteValueARM64_OpARM64GreaterEqualU(v *Value, config *Config) bool { 3011 b := v.Block 3012 _ = b 3013 // match: (GreaterEqualU (FlagEQ)) 3014 // cond: 3015 // result: (MOVDconst [1]) 3016 for { 3017 v_0 := v.Args[0] 3018 if v_0.Op != OpARM64FlagEQ { 3019 break 3020 } 3021 v.reset(OpARM64MOVDconst) 3022 v.AuxInt = 1 3023 return true 3024 } 3025 // match: (GreaterEqualU (FlagLT_ULT)) 3026 // cond: 3027 // result: (MOVDconst [0]) 3028 for { 3029 v_0 := v.Args[0] 3030 if v_0.Op != OpARM64FlagLT_ULT { 3031 break 3032 } 3033 v.reset(OpARM64MOVDconst) 3034 v.AuxInt = 0 3035 return true 3036 } 3037 // match: (GreaterEqualU (FlagLT_UGT)) 3038 // cond: 3039 // result: (MOVDconst [1]) 3040 for { 3041 v_0 := v.Args[0] 3042 if v_0.Op != OpARM64FlagLT_UGT { 3043 break 3044 } 3045 v.reset(OpARM64MOVDconst) 3046 v.AuxInt = 1 3047 return true 3048 } 3049 // match: (GreaterEqualU (FlagGT_ULT)) 3050 // cond: 3051 // result: (MOVDconst [0]) 3052 for { 3053 v_0 := v.Args[0] 3054 if v_0.Op != OpARM64FlagGT_ULT { 3055 break 3056 } 3057 v.reset(OpARM64MOVDconst) 3058 v.AuxInt = 0 3059 return true 3060 } 3061 // match: (GreaterEqualU (FlagGT_UGT)) 3062 // cond: 3063 // result: (MOVDconst [1]) 3064 for { 3065 v_0 := v.Args[0] 3066 if v_0.Op != OpARM64FlagGT_UGT { 3067 break 3068 } 3069 v.reset(OpARM64MOVDconst) 3070 v.AuxInt = 1 3071 return true 3072 } 3073 // match: (GreaterEqualU (InvertFlags x)) 3074 // cond: 3075 // result: (LessEqualU x) 3076 for { 3077 v_0 := v.Args[0] 3078 if v_0.Op != OpARM64InvertFlags { 3079 break 3080 } 3081 x := v_0.Args[0] 3082 v.reset(OpARM64LessEqualU) 3083 v.AddArg(x) 3084 return true 3085 } 3086 return false 3087 } 3088 func rewriteValueARM64_OpARM64GreaterThan(v *Value, config *Config) bool { 3089 b := v.Block 3090 _ = b 3091 // match: (GreaterThan (FlagEQ)) 3092 // cond: 3093 // result: (MOVDconst [0]) 3094 for { 3095 v_0 := v.Args[0] 3096 if v_0.Op != OpARM64FlagEQ { 3097 break 3098 } 3099 v.reset(OpARM64MOVDconst) 3100 v.AuxInt = 0 3101 return true 3102 } 3103 // match: (GreaterThan (FlagLT_ULT)) 3104 // cond: 3105 // result: (MOVDconst [0]) 3106 for { 3107 v_0 := v.Args[0] 3108 if v_0.Op != OpARM64FlagLT_ULT { 3109 break 3110 } 3111 v.reset(OpARM64MOVDconst) 3112 v.AuxInt = 0 3113 return true 3114 } 3115 // match: (GreaterThan (FlagLT_UGT)) 3116 // cond: 3117 // result: (MOVDconst [0]) 3118 for { 3119 v_0 := v.Args[0] 3120 if v_0.Op != OpARM64FlagLT_UGT { 3121 break 3122 } 3123 v.reset(OpARM64MOVDconst) 3124 v.AuxInt = 0 3125 return true 3126 } 3127 // match: (GreaterThan (FlagGT_ULT)) 3128 // cond: 3129 // result: (MOVDconst [1]) 3130 for { 3131 v_0 := v.Args[0] 3132 if v_0.Op != OpARM64FlagGT_ULT { 3133 break 3134 } 3135 v.reset(OpARM64MOVDconst) 3136 v.AuxInt = 1 3137 return true 3138 } 3139 // match: (GreaterThan (FlagGT_UGT)) 3140 // cond: 3141 // result: (MOVDconst [1]) 3142 for { 3143 v_0 := v.Args[0] 3144 if v_0.Op != OpARM64FlagGT_UGT { 3145 break 3146 } 3147 v.reset(OpARM64MOVDconst) 3148 v.AuxInt = 1 3149 return true 3150 } 3151 // match: (GreaterThan (InvertFlags x)) 3152 // cond: 3153 // result: (LessThan x) 3154 for { 3155 v_0 := v.Args[0] 3156 if v_0.Op != OpARM64InvertFlags { 3157 break 3158 } 3159 x := v_0.Args[0] 3160 v.reset(OpARM64LessThan) 3161 v.AddArg(x) 3162 return true 3163 } 3164 return false 3165 } 3166 func rewriteValueARM64_OpARM64GreaterThanU(v *Value, config *Config) bool { 3167 b := v.Block 3168 _ = b 3169 // match: (GreaterThanU (FlagEQ)) 3170 // cond: 3171 // result: (MOVDconst [0]) 3172 for { 3173 v_0 := v.Args[0] 3174 if v_0.Op != OpARM64FlagEQ { 3175 break 3176 } 3177 v.reset(OpARM64MOVDconst) 3178 v.AuxInt = 0 3179 return true 3180 } 3181 // match: (GreaterThanU (FlagLT_ULT)) 3182 // cond: 3183 // result: (MOVDconst [0]) 3184 for { 3185 v_0 := v.Args[0] 3186 if v_0.Op != OpARM64FlagLT_ULT { 3187 break 3188 } 3189 v.reset(OpARM64MOVDconst) 3190 v.AuxInt = 0 3191 return true 3192 } 3193 // match: (GreaterThanU (FlagLT_UGT)) 3194 // cond: 3195 // result: (MOVDconst [1]) 3196 for { 3197 v_0 := v.Args[0] 3198 if v_0.Op != OpARM64FlagLT_UGT { 3199 break 3200 } 3201 v.reset(OpARM64MOVDconst) 3202 v.AuxInt = 1 3203 return true 3204 } 3205 // match: (GreaterThanU (FlagGT_ULT)) 3206 // cond: 3207 // result: (MOVDconst [0]) 3208 for { 3209 v_0 := v.Args[0] 3210 if v_0.Op != OpARM64FlagGT_ULT { 3211 break 3212 } 3213 v.reset(OpARM64MOVDconst) 3214 v.AuxInt = 0 3215 return true 3216 } 3217 // match: (GreaterThanU (FlagGT_UGT)) 3218 // cond: 3219 // result: (MOVDconst [1]) 3220 for { 3221 v_0 := v.Args[0] 3222 if v_0.Op != OpARM64FlagGT_UGT { 3223 break 3224 } 3225 v.reset(OpARM64MOVDconst) 3226 v.AuxInt = 1 3227 return true 3228 } 3229 // match: (GreaterThanU (InvertFlags x)) 3230 // cond: 3231 // result: (LessThanU x) 3232 for { 3233 v_0 := v.Args[0] 3234 if v_0.Op != OpARM64InvertFlags { 3235 break 3236 } 3237 x := v_0.Args[0] 3238 v.reset(OpARM64LessThanU) 3239 v.AddArg(x) 3240 return true 3241 } 3242 return false 3243 } 3244 func rewriteValueARM64_OpARM64LessEqual(v *Value, config *Config) bool { 3245 b := v.Block 3246 _ = b 3247 // match: (LessEqual (FlagEQ)) 3248 // cond: 3249 // result: (MOVDconst [1]) 3250 for { 3251 v_0 := v.Args[0] 3252 if v_0.Op != OpARM64FlagEQ { 3253 break 3254 } 3255 v.reset(OpARM64MOVDconst) 3256 v.AuxInt = 1 3257 return true 3258 } 3259 // match: (LessEqual (FlagLT_ULT)) 3260 // cond: 3261 // result: (MOVDconst [1]) 3262 for { 3263 v_0 := v.Args[0] 3264 if v_0.Op != OpARM64FlagLT_ULT { 3265 break 3266 } 3267 v.reset(OpARM64MOVDconst) 3268 v.AuxInt = 1 3269 return true 3270 } 3271 // match: (LessEqual (FlagLT_UGT)) 3272 // cond: 3273 // result: (MOVDconst [1]) 3274 for { 3275 v_0 := v.Args[0] 3276 if v_0.Op != OpARM64FlagLT_UGT { 3277 break 3278 } 3279 v.reset(OpARM64MOVDconst) 3280 v.AuxInt = 1 3281 return true 3282 } 3283 // match: (LessEqual (FlagGT_ULT)) 3284 // cond: 3285 // result: (MOVDconst [0]) 3286 for { 3287 v_0 := v.Args[0] 3288 if v_0.Op != OpARM64FlagGT_ULT { 3289 break 3290 } 3291 v.reset(OpARM64MOVDconst) 3292 v.AuxInt = 0 3293 return true 3294 } 3295 // match: (LessEqual (FlagGT_UGT)) 3296 // cond: 3297 // result: (MOVDconst [0]) 3298 for { 3299 v_0 := v.Args[0] 3300 if v_0.Op != OpARM64FlagGT_UGT { 3301 break 3302 } 3303 v.reset(OpARM64MOVDconst) 3304 v.AuxInt = 0 3305 return true 3306 } 3307 // match: (LessEqual (InvertFlags x)) 3308 // cond: 3309 // result: (GreaterEqual x) 3310 for { 3311 v_0 := v.Args[0] 3312 if v_0.Op != OpARM64InvertFlags { 3313 break 3314 } 3315 x := v_0.Args[0] 3316 v.reset(OpARM64GreaterEqual) 3317 v.AddArg(x) 3318 return true 3319 } 3320 return false 3321 } 3322 func rewriteValueARM64_OpARM64LessEqualU(v *Value, config *Config) bool { 3323 b := v.Block 3324 _ = b 3325 // match: (LessEqualU (FlagEQ)) 3326 // cond: 3327 // result: (MOVDconst [1]) 3328 for { 3329 v_0 := v.Args[0] 3330 if v_0.Op != OpARM64FlagEQ { 3331 break 3332 } 3333 v.reset(OpARM64MOVDconst) 3334 v.AuxInt = 1 3335 return true 3336 } 3337 // match: (LessEqualU (FlagLT_ULT)) 3338 // cond: 3339 // result: (MOVDconst [1]) 3340 for { 3341 v_0 := v.Args[0] 3342 if v_0.Op != OpARM64FlagLT_ULT { 3343 break 3344 } 3345 v.reset(OpARM64MOVDconst) 3346 v.AuxInt = 1 3347 return true 3348 } 3349 // match: (LessEqualU (FlagLT_UGT)) 3350 // cond: 3351 // result: (MOVDconst [0]) 3352 for { 3353 v_0 := v.Args[0] 3354 if v_0.Op != OpARM64FlagLT_UGT { 3355 break 3356 } 3357 v.reset(OpARM64MOVDconst) 3358 v.AuxInt = 0 3359 return true 3360 } 3361 // match: (LessEqualU (FlagGT_ULT)) 3362 // cond: 3363 // result: (MOVDconst [1]) 3364 for { 3365 v_0 := v.Args[0] 3366 if v_0.Op != OpARM64FlagGT_ULT { 3367 break 3368 } 3369 v.reset(OpARM64MOVDconst) 3370 v.AuxInt = 1 3371 return true 3372 } 3373 // match: (LessEqualU (FlagGT_UGT)) 3374 // cond: 3375 // result: (MOVDconst [0]) 3376 for { 3377 v_0 := v.Args[0] 3378 if v_0.Op != OpARM64FlagGT_UGT { 3379 break 3380 } 3381 v.reset(OpARM64MOVDconst) 3382 v.AuxInt = 0 3383 return true 3384 } 3385 // match: (LessEqualU (InvertFlags x)) 3386 // cond: 3387 // result: (GreaterEqualU x) 3388 for { 3389 v_0 := v.Args[0] 3390 if v_0.Op != OpARM64InvertFlags { 3391 break 3392 } 3393 x := v_0.Args[0] 3394 v.reset(OpARM64GreaterEqualU) 3395 v.AddArg(x) 3396 return true 3397 } 3398 return false 3399 } 3400 func rewriteValueARM64_OpARM64LessThan(v *Value, config *Config) bool { 3401 b := v.Block 3402 _ = b 3403 // match: (LessThan (FlagEQ)) 3404 // cond: 3405 // result: (MOVDconst [0]) 3406 for { 3407 v_0 := v.Args[0] 3408 if v_0.Op != OpARM64FlagEQ { 3409 break 3410 } 3411 v.reset(OpARM64MOVDconst) 3412 v.AuxInt = 0 3413 return true 3414 } 3415 // match: (LessThan (FlagLT_ULT)) 3416 // cond: 3417 // result: (MOVDconst [1]) 3418 for { 3419 v_0 := v.Args[0] 3420 if v_0.Op != OpARM64FlagLT_ULT { 3421 break 3422 } 3423 v.reset(OpARM64MOVDconst) 3424 v.AuxInt = 1 3425 return true 3426 } 3427 // match: (LessThan (FlagLT_UGT)) 3428 // cond: 3429 // result: (MOVDconst [1]) 3430 for { 3431 v_0 := v.Args[0] 3432 if v_0.Op != OpARM64FlagLT_UGT { 3433 break 3434 } 3435 v.reset(OpARM64MOVDconst) 3436 v.AuxInt = 1 3437 return true 3438 } 3439 // match: (LessThan (FlagGT_ULT)) 3440 // cond: 3441 // result: (MOVDconst [0]) 3442 for { 3443 v_0 := v.Args[0] 3444 if v_0.Op != OpARM64FlagGT_ULT { 3445 break 3446 } 3447 v.reset(OpARM64MOVDconst) 3448 v.AuxInt = 0 3449 return true 3450 } 3451 // match: (LessThan (FlagGT_UGT)) 3452 // cond: 3453 // result: (MOVDconst [0]) 3454 for { 3455 v_0 := v.Args[0] 3456 if v_0.Op != OpARM64FlagGT_UGT { 3457 break 3458 } 3459 v.reset(OpARM64MOVDconst) 3460 v.AuxInt = 0 3461 return true 3462 } 3463 // match: (LessThan (InvertFlags x)) 3464 // cond: 3465 // result: (GreaterThan x) 3466 for { 3467 v_0 := v.Args[0] 3468 if v_0.Op != OpARM64InvertFlags { 3469 break 3470 } 3471 x := v_0.Args[0] 3472 v.reset(OpARM64GreaterThan) 3473 v.AddArg(x) 3474 return true 3475 } 3476 return false 3477 } 3478 func rewriteValueARM64_OpARM64LessThanU(v *Value, config *Config) bool { 3479 b := v.Block 3480 _ = b 3481 // match: (LessThanU (FlagEQ)) 3482 // cond: 3483 // result: (MOVDconst [0]) 3484 for { 3485 v_0 := v.Args[0] 3486 if v_0.Op != OpARM64FlagEQ { 3487 break 3488 } 3489 v.reset(OpARM64MOVDconst) 3490 v.AuxInt = 0 3491 return true 3492 } 3493 // match: (LessThanU (FlagLT_ULT)) 3494 // cond: 3495 // result: (MOVDconst [1]) 3496 for { 3497 v_0 := v.Args[0] 3498 if v_0.Op != OpARM64FlagLT_ULT { 3499 break 3500 } 3501 v.reset(OpARM64MOVDconst) 3502 v.AuxInt = 1 3503 return true 3504 } 3505 // match: (LessThanU (FlagLT_UGT)) 3506 // cond: 3507 // result: (MOVDconst [0]) 3508 for { 3509 v_0 := v.Args[0] 3510 if v_0.Op != OpARM64FlagLT_UGT { 3511 break 3512 } 3513 v.reset(OpARM64MOVDconst) 3514 v.AuxInt = 0 3515 return true 3516 } 3517 // match: (LessThanU (FlagGT_ULT)) 3518 // cond: 3519 // result: (MOVDconst [1]) 3520 for { 3521 v_0 := v.Args[0] 3522 if v_0.Op != OpARM64FlagGT_ULT { 3523 break 3524 } 3525 v.reset(OpARM64MOVDconst) 3526 v.AuxInt = 1 3527 return true 3528 } 3529 // match: (LessThanU (FlagGT_UGT)) 3530 // cond: 3531 // result: (MOVDconst [0]) 3532 for { 3533 v_0 := v.Args[0] 3534 if v_0.Op != OpARM64FlagGT_UGT { 3535 break 3536 } 3537 v.reset(OpARM64MOVDconst) 3538 v.AuxInt = 0 3539 return true 3540 } 3541 // match: (LessThanU (InvertFlags x)) 3542 // cond: 3543 // result: (GreaterThanU x) 3544 for { 3545 v_0 := v.Args[0] 3546 if v_0.Op != OpARM64InvertFlags { 3547 break 3548 } 3549 x := v_0.Args[0] 3550 v.reset(OpARM64GreaterThanU) 3551 v.AddArg(x) 3552 return true 3553 } 3554 return false 3555 } 3556 func rewriteValueARM64_OpARM64MOD(v *Value, config *Config) bool { 3557 b := v.Block 3558 _ = b 3559 // match: (MOD (MOVDconst [c]) (MOVDconst [d])) 3560 // cond: 3561 // result: (MOVDconst [int64(c)%int64(d)]) 3562 for { 3563 v_0 := v.Args[0] 3564 if v_0.Op != OpARM64MOVDconst { 3565 break 3566 } 3567 c := v_0.AuxInt 3568 v_1 := v.Args[1] 3569 if v_1.Op != OpARM64MOVDconst { 3570 break 3571 } 3572 d := v_1.AuxInt 3573 v.reset(OpARM64MOVDconst) 3574 v.AuxInt = int64(c) % int64(d) 3575 return true 3576 } 3577 return false 3578 } 3579 func rewriteValueARM64_OpARM64MODW(v *Value, config *Config) bool { 3580 b := v.Block 3581 _ = b 3582 // match: (MODW (MOVDconst [c]) (MOVDconst [d])) 3583 // cond: 3584 // result: (MOVDconst [int64(int32(c)%int32(d))]) 3585 for { 3586 v_0 := v.Args[0] 3587 if v_0.Op != OpARM64MOVDconst { 3588 break 3589 } 3590 c := v_0.AuxInt 3591 v_1 := v.Args[1] 3592 if v_1.Op != OpARM64MOVDconst { 3593 break 3594 } 3595 d := v_1.AuxInt 3596 v.reset(OpARM64MOVDconst) 3597 v.AuxInt = int64(int32(c) % int32(d)) 3598 return true 3599 } 3600 return false 3601 } 3602 func rewriteValueARM64_OpARM64MOVBUload(v *Value, config *Config) bool { 3603 b := v.Block 3604 _ = b 3605 // match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem) 3606 // cond: is32Bit(off1+off2) 3607 // result: (MOVBUload [off1+off2] {sym} ptr mem) 3608 for { 3609 off1 := v.AuxInt 3610 sym := v.Aux 3611 v_0 := v.Args[0] 3612 if v_0.Op != OpARM64ADDconst { 3613 break 3614 } 3615 off2 := v_0.AuxInt 3616 ptr := v_0.Args[0] 3617 mem := v.Args[1] 3618 if !(is32Bit(off1 + off2)) { 3619 break 3620 } 3621 v.reset(OpARM64MOVBUload) 3622 v.AuxInt = off1 + off2 3623 v.Aux = sym 3624 v.AddArg(ptr) 3625 v.AddArg(mem) 3626 return true 3627 } 3628 // match: (MOVBUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 3629 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 3630 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3631 for { 3632 off1 := v.AuxInt 3633 sym1 := v.Aux 3634 v_0 := v.Args[0] 3635 if v_0.Op != OpARM64MOVDaddr { 3636 break 3637 } 3638 off2 := v_0.AuxInt 3639 sym2 := v_0.Aux 3640 ptr := v_0.Args[0] 3641 mem := v.Args[1] 3642 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 3643 break 3644 } 3645 v.reset(OpARM64MOVBUload) 3646 v.AuxInt = off1 + off2 3647 v.Aux = mergeSym(sym1, sym2) 3648 v.AddArg(ptr) 3649 v.AddArg(mem) 3650 return true 3651 } 3652 // match: (MOVBUload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 3653 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 3654 // result: (MOVDconst [0]) 3655 for { 3656 off := v.AuxInt 3657 sym := v.Aux 3658 ptr := v.Args[0] 3659 v_1 := v.Args[1] 3660 if v_1.Op != OpARM64MOVBstorezero { 3661 break 3662 } 3663 off2 := v_1.AuxInt 3664 sym2 := v_1.Aux 3665 ptr2 := v_1.Args[0] 3666 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 3667 break 3668 } 3669 v.reset(OpARM64MOVDconst) 3670 v.AuxInt = 0 3671 return true 3672 } 3673 return false 3674 } 3675 func rewriteValueARM64_OpARM64MOVBUreg(v *Value, config *Config) bool { 3676 b := v.Block 3677 _ = b 3678 // match: (MOVBUreg x:(MOVBUload _ _)) 3679 // cond: 3680 // result: (MOVDreg x) 3681 for { 3682 x := v.Args[0] 3683 if x.Op != OpARM64MOVBUload { 3684 break 3685 } 3686 v.reset(OpARM64MOVDreg) 3687 v.AddArg(x) 3688 return true 3689 } 3690 // match: (MOVBUreg x:(MOVBUreg _)) 3691 // cond: 3692 // result: (MOVDreg x) 3693 for { 3694 x := v.Args[0] 3695 if x.Op != OpARM64MOVBUreg { 3696 break 3697 } 3698 v.reset(OpARM64MOVDreg) 3699 v.AddArg(x) 3700 return true 3701 } 3702 // match: (MOVBUreg (MOVDconst [c])) 3703 // cond: 3704 // result: (MOVDconst [int64(uint8(c))]) 3705 for { 3706 v_0 := v.Args[0] 3707 if v_0.Op != OpARM64MOVDconst { 3708 break 3709 } 3710 c := v_0.AuxInt 3711 v.reset(OpARM64MOVDconst) 3712 v.AuxInt = int64(uint8(c)) 3713 return true 3714 } 3715 return false 3716 } 3717 func rewriteValueARM64_OpARM64MOVBload(v *Value, config *Config) bool { 3718 b := v.Block 3719 _ = b 3720 // match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem) 3721 // cond: is32Bit(off1+off2) 3722 // result: (MOVBload [off1+off2] {sym} ptr mem) 3723 for { 3724 off1 := v.AuxInt 3725 sym := v.Aux 3726 v_0 := v.Args[0] 3727 if v_0.Op != OpARM64ADDconst { 3728 break 3729 } 3730 off2 := v_0.AuxInt 3731 ptr := v_0.Args[0] 3732 mem := v.Args[1] 3733 if !(is32Bit(off1 + off2)) { 3734 break 3735 } 3736 v.reset(OpARM64MOVBload) 3737 v.AuxInt = off1 + off2 3738 v.Aux = sym 3739 v.AddArg(ptr) 3740 v.AddArg(mem) 3741 return true 3742 } 3743 // match: (MOVBload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 3744 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 3745 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3746 for { 3747 off1 := v.AuxInt 3748 sym1 := v.Aux 3749 v_0 := v.Args[0] 3750 if v_0.Op != OpARM64MOVDaddr { 3751 break 3752 } 3753 off2 := v_0.AuxInt 3754 sym2 := v_0.Aux 3755 ptr := v_0.Args[0] 3756 mem := v.Args[1] 3757 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 3758 break 3759 } 3760 v.reset(OpARM64MOVBload) 3761 v.AuxInt = off1 + off2 3762 v.Aux = mergeSym(sym1, sym2) 3763 v.AddArg(ptr) 3764 v.AddArg(mem) 3765 return true 3766 } 3767 // match: (MOVBload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 3768 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 3769 // result: (MOVDconst [0]) 3770 for { 3771 off := v.AuxInt 3772 sym := v.Aux 3773 ptr := v.Args[0] 3774 v_1 := v.Args[1] 3775 if v_1.Op != OpARM64MOVBstorezero { 3776 break 3777 } 3778 off2 := v_1.AuxInt 3779 sym2 := v_1.Aux 3780 ptr2 := v_1.Args[0] 3781 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 3782 break 3783 } 3784 v.reset(OpARM64MOVDconst) 3785 v.AuxInt = 0 3786 return true 3787 } 3788 return false 3789 } 3790 func rewriteValueARM64_OpARM64MOVBreg(v *Value, config *Config) bool { 3791 b := v.Block 3792 _ = b 3793 // match: (MOVBreg x:(MOVBload _ _)) 3794 // cond: 3795 // result: (MOVDreg x) 3796 for { 3797 x := v.Args[0] 3798 if x.Op != OpARM64MOVBload { 3799 break 3800 } 3801 v.reset(OpARM64MOVDreg) 3802 v.AddArg(x) 3803 return true 3804 } 3805 // match: (MOVBreg x:(MOVBreg _)) 3806 // cond: 3807 // result: (MOVDreg x) 3808 for { 3809 x := v.Args[0] 3810 if x.Op != OpARM64MOVBreg { 3811 break 3812 } 3813 v.reset(OpARM64MOVDreg) 3814 v.AddArg(x) 3815 return true 3816 } 3817 // match: (MOVBreg (MOVDconst [c])) 3818 // cond: 3819 // result: (MOVDconst [int64(int8(c))]) 3820 for { 3821 v_0 := v.Args[0] 3822 if v_0.Op != OpARM64MOVDconst { 3823 break 3824 } 3825 c := v_0.AuxInt 3826 v.reset(OpARM64MOVDconst) 3827 v.AuxInt = int64(int8(c)) 3828 return true 3829 } 3830 return false 3831 } 3832 func rewriteValueARM64_OpARM64MOVBstore(v *Value, config *Config) bool { 3833 b := v.Block 3834 _ = b 3835 // match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem) 3836 // cond: is32Bit(off1+off2) 3837 // result: (MOVBstore [off1+off2] {sym} ptr val mem) 3838 for { 3839 off1 := v.AuxInt 3840 sym := v.Aux 3841 v_0 := v.Args[0] 3842 if v_0.Op != OpARM64ADDconst { 3843 break 3844 } 3845 off2 := v_0.AuxInt 3846 ptr := v_0.Args[0] 3847 val := v.Args[1] 3848 mem := v.Args[2] 3849 if !(is32Bit(off1 + off2)) { 3850 break 3851 } 3852 v.reset(OpARM64MOVBstore) 3853 v.AuxInt = off1 + off2 3854 v.Aux = sym 3855 v.AddArg(ptr) 3856 v.AddArg(val) 3857 v.AddArg(mem) 3858 return true 3859 } 3860 // match: (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 3861 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 3862 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 3863 for { 3864 off1 := v.AuxInt 3865 sym1 := v.Aux 3866 v_0 := v.Args[0] 3867 if v_0.Op != OpARM64MOVDaddr { 3868 break 3869 } 3870 off2 := v_0.AuxInt 3871 sym2 := v_0.Aux 3872 ptr := v_0.Args[0] 3873 val := v.Args[1] 3874 mem := v.Args[2] 3875 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 3876 break 3877 } 3878 v.reset(OpARM64MOVBstore) 3879 v.AuxInt = off1 + off2 3880 v.Aux = mergeSym(sym1, sym2) 3881 v.AddArg(ptr) 3882 v.AddArg(val) 3883 v.AddArg(mem) 3884 return true 3885 } 3886 // match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem) 3887 // cond: 3888 // result: (MOVBstorezero [off] {sym} ptr mem) 3889 for { 3890 off := v.AuxInt 3891 sym := v.Aux 3892 ptr := v.Args[0] 3893 v_1 := v.Args[1] 3894 if v_1.Op != OpARM64MOVDconst { 3895 break 3896 } 3897 if v_1.AuxInt != 0 { 3898 break 3899 } 3900 mem := v.Args[2] 3901 v.reset(OpARM64MOVBstorezero) 3902 v.AuxInt = off 3903 v.Aux = sym 3904 v.AddArg(ptr) 3905 v.AddArg(mem) 3906 return true 3907 } 3908 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem) 3909 // cond: 3910 // result: (MOVBstore [off] {sym} ptr x mem) 3911 for { 3912 off := v.AuxInt 3913 sym := v.Aux 3914 ptr := v.Args[0] 3915 v_1 := v.Args[1] 3916 if v_1.Op != OpARM64MOVBreg { 3917 break 3918 } 3919 x := v_1.Args[0] 3920 mem := v.Args[2] 3921 v.reset(OpARM64MOVBstore) 3922 v.AuxInt = off 3923 v.Aux = sym 3924 v.AddArg(ptr) 3925 v.AddArg(x) 3926 v.AddArg(mem) 3927 return true 3928 } 3929 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem) 3930 // cond: 3931 // result: (MOVBstore [off] {sym} ptr x mem) 3932 for { 3933 off := v.AuxInt 3934 sym := v.Aux 3935 ptr := v.Args[0] 3936 v_1 := v.Args[1] 3937 if v_1.Op != OpARM64MOVBUreg { 3938 break 3939 } 3940 x := v_1.Args[0] 3941 mem := v.Args[2] 3942 v.reset(OpARM64MOVBstore) 3943 v.AuxInt = off 3944 v.Aux = sym 3945 v.AddArg(ptr) 3946 v.AddArg(x) 3947 v.AddArg(mem) 3948 return true 3949 } 3950 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem) 3951 // cond: 3952 // result: (MOVBstore [off] {sym} ptr x mem) 3953 for { 3954 off := v.AuxInt 3955 sym := v.Aux 3956 ptr := v.Args[0] 3957 v_1 := v.Args[1] 3958 if v_1.Op != OpARM64MOVHreg { 3959 break 3960 } 3961 x := v_1.Args[0] 3962 mem := v.Args[2] 3963 v.reset(OpARM64MOVBstore) 3964 v.AuxInt = off 3965 v.Aux = sym 3966 v.AddArg(ptr) 3967 v.AddArg(x) 3968 v.AddArg(mem) 3969 return true 3970 } 3971 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem) 3972 // cond: 3973 // result: (MOVBstore [off] {sym} ptr x mem) 3974 for { 3975 off := v.AuxInt 3976 sym := v.Aux 3977 ptr := v.Args[0] 3978 v_1 := v.Args[1] 3979 if v_1.Op != OpARM64MOVHUreg { 3980 break 3981 } 3982 x := v_1.Args[0] 3983 mem := v.Args[2] 3984 v.reset(OpARM64MOVBstore) 3985 v.AuxInt = off 3986 v.Aux = sym 3987 v.AddArg(ptr) 3988 v.AddArg(x) 3989 v.AddArg(mem) 3990 return true 3991 } 3992 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem) 3993 // cond: 3994 // result: (MOVBstore [off] {sym} ptr x mem) 3995 for { 3996 off := v.AuxInt 3997 sym := v.Aux 3998 ptr := v.Args[0] 3999 v_1 := v.Args[1] 4000 if v_1.Op != OpARM64MOVWreg { 4001 break 4002 } 4003 x := v_1.Args[0] 4004 mem := v.Args[2] 4005 v.reset(OpARM64MOVBstore) 4006 v.AuxInt = off 4007 v.Aux = sym 4008 v.AddArg(ptr) 4009 v.AddArg(x) 4010 v.AddArg(mem) 4011 return true 4012 } 4013 // match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem) 4014 // cond: 4015 // result: (MOVBstore [off] {sym} ptr x mem) 4016 for { 4017 off := v.AuxInt 4018 sym := v.Aux 4019 ptr := v.Args[0] 4020 v_1 := v.Args[1] 4021 if v_1.Op != OpARM64MOVWUreg { 4022 break 4023 } 4024 x := v_1.Args[0] 4025 mem := v.Args[2] 4026 v.reset(OpARM64MOVBstore) 4027 v.AuxInt = off 4028 v.Aux = sym 4029 v.AddArg(ptr) 4030 v.AddArg(x) 4031 v.AddArg(mem) 4032 return true 4033 } 4034 return false 4035 } 4036 func rewriteValueARM64_OpARM64MOVBstorezero(v *Value, config *Config) bool { 4037 b := v.Block 4038 _ = b 4039 // match: (MOVBstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 4040 // cond: is32Bit(off1+off2) 4041 // result: (MOVBstorezero [off1+off2] {sym} ptr mem) 4042 for { 4043 off1 := v.AuxInt 4044 sym := v.Aux 4045 v_0 := v.Args[0] 4046 if v_0.Op != OpARM64ADDconst { 4047 break 4048 } 4049 off2 := v_0.AuxInt 4050 ptr := v_0.Args[0] 4051 mem := v.Args[1] 4052 if !(is32Bit(off1 + off2)) { 4053 break 4054 } 4055 v.reset(OpARM64MOVBstorezero) 4056 v.AuxInt = off1 + off2 4057 v.Aux = sym 4058 v.AddArg(ptr) 4059 v.AddArg(mem) 4060 return true 4061 } 4062 // match: (MOVBstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4063 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4064 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4065 for { 4066 off1 := v.AuxInt 4067 sym1 := v.Aux 4068 v_0 := v.Args[0] 4069 if v_0.Op != OpARM64MOVDaddr { 4070 break 4071 } 4072 off2 := v_0.AuxInt 4073 sym2 := v_0.Aux 4074 ptr := v_0.Args[0] 4075 mem := v.Args[1] 4076 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4077 break 4078 } 4079 v.reset(OpARM64MOVBstorezero) 4080 v.AuxInt = off1 + off2 4081 v.Aux = mergeSym(sym1, sym2) 4082 v.AddArg(ptr) 4083 v.AddArg(mem) 4084 return true 4085 } 4086 return false 4087 } 4088 func rewriteValueARM64_OpARM64MOVDload(v *Value, config *Config) bool { 4089 b := v.Block 4090 _ = b 4091 // match: (MOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 4092 // cond: is32Bit(off1+off2) && ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)) 4093 // result: (MOVDload [off1+off2] {sym} ptr mem) 4094 for { 4095 off1 := v.AuxInt 4096 sym := v.Aux 4097 v_0 := v.Args[0] 4098 if v_0.Op != OpARM64ADDconst { 4099 break 4100 } 4101 off2 := v_0.AuxInt 4102 ptr := v_0.Args[0] 4103 mem := v.Args[1] 4104 if !(is32Bit(off1+off2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym))) { 4105 break 4106 } 4107 v.reset(OpARM64MOVDload) 4108 v.AuxInt = off1 + off2 4109 v.Aux = sym 4110 v.AddArg(ptr) 4111 v.AddArg(mem) 4112 return true 4113 } 4114 // match: (MOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4115 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1)) 4116 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4117 for { 4118 off1 := v.AuxInt 4119 sym1 := v.Aux 4120 v_0 := v.Args[0] 4121 if v_0.Op != OpARM64MOVDaddr { 4122 break 4123 } 4124 off2 := v_0.AuxInt 4125 sym2 := v_0.Aux 4126 ptr := v_0.Args[0] 4127 mem := v.Args[1] 4128 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) { 4129 break 4130 } 4131 v.reset(OpARM64MOVDload) 4132 v.AuxInt = off1 + off2 4133 v.Aux = mergeSym(sym1, sym2) 4134 v.AddArg(ptr) 4135 v.AddArg(mem) 4136 return true 4137 } 4138 // match: (MOVDload [off] {sym} ptr (MOVDstorezero [off2] {sym2} ptr2 _)) 4139 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4140 // result: (MOVDconst [0]) 4141 for { 4142 off := v.AuxInt 4143 sym := v.Aux 4144 ptr := v.Args[0] 4145 v_1 := v.Args[1] 4146 if v_1.Op != OpARM64MOVDstorezero { 4147 break 4148 } 4149 off2 := v_1.AuxInt 4150 sym2 := v_1.Aux 4151 ptr2 := v_1.Args[0] 4152 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4153 break 4154 } 4155 v.reset(OpARM64MOVDconst) 4156 v.AuxInt = 0 4157 return true 4158 } 4159 return false 4160 } 4161 func rewriteValueARM64_OpARM64MOVDreg(v *Value, config *Config) bool { 4162 b := v.Block 4163 _ = b 4164 // match: (MOVDreg x) 4165 // cond: x.Uses == 1 4166 // result: (MOVDnop x) 4167 for { 4168 x := v.Args[0] 4169 if !(x.Uses == 1) { 4170 break 4171 } 4172 v.reset(OpARM64MOVDnop) 4173 v.AddArg(x) 4174 return true 4175 } 4176 // match: (MOVDreg (MOVDconst [c])) 4177 // cond: 4178 // result: (MOVDconst [c]) 4179 for { 4180 v_0 := v.Args[0] 4181 if v_0.Op != OpARM64MOVDconst { 4182 break 4183 } 4184 c := v_0.AuxInt 4185 v.reset(OpARM64MOVDconst) 4186 v.AuxInt = c 4187 return true 4188 } 4189 return false 4190 } 4191 func rewriteValueARM64_OpARM64MOVDstore(v *Value, config *Config) bool { 4192 b := v.Block 4193 _ = b 4194 // match: (MOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 4195 // cond: is32Bit(off1+off2) && ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)) 4196 // result: (MOVDstore [off1+off2] {sym} ptr val mem) 4197 for { 4198 off1 := v.AuxInt 4199 sym := v.Aux 4200 v_0 := v.Args[0] 4201 if v_0.Op != OpARM64ADDconst { 4202 break 4203 } 4204 off2 := v_0.AuxInt 4205 ptr := v_0.Args[0] 4206 val := v.Args[1] 4207 mem := v.Args[2] 4208 if !(is32Bit(off1+off2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym))) { 4209 break 4210 } 4211 v.reset(OpARM64MOVDstore) 4212 v.AuxInt = off1 + off2 4213 v.Aux = sym 4214 v.AddArg(ptr) 4215 v.AddArg(val) 4216 v.AddArg(mem) 4217 return true 4218 } 4219 // match: (MOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 4220 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1)) 4221 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4222 for { 4223 off1 := v.AuxInt 4224 sym1 := v.Aux 4225 v_0 := v.Args[0] 4226 if v_0.Op != OpARM64MOVDaddr { 4227 break 4228 } 4229 off2 := v_0.AuxInt 4230 sym2 := v_0.Aux 4231 ptr := v_0.Args[0] 4232 val := v.Args[1] 4233 mem := v.Args[2] 4234 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) { 4235 break 4236 } 4237 v.reset(OpARM64MOVDstore) 4238 v.AuxInt = off1 + off2 4239 v.Aux = mergeSym(sym1, sym2) 4240 v.AddArg(ptr) 4241 v.AddArg(val) 4242 v.AddArg(mem) 4243 return true 4244 } 4245 // match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem) 4246 // cond: 4247 // result: (MOVDstorezero [off] {sym} ptr mem) 4248 for { 4249 off := v.AuxInt 4250 sym := v.Aux 4251 ptr := v.Args[0] 4252 v_1 := v.Args[1] 4253 if v_1.Op != OpARM64MOVDconst { 4254 break 4255 } 4256 if v_1.AuxInt != 0 { 4257 break 4258 } 4259 mem := v.Args[2] 4260 v.reset(OpARM64MOVDstorezero) 4261 v.AuxInt = off 4262 v.Aux = sym 4263 v.AddArg(ptr) 4264 v.AddArg(mem) 4265 return true 4266 } 4267 return false 4268 } 4269 func rewriteValueARM64_OpARM64MOVDstorezero(v *Value, config *Config) bool { 4270 b := v.Block 4271 _ = b 4272 // match: (MOVDstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 4273 // cond: is32Bit(off1+off2) && ((off1+off2)%2==8 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)) 4274 // result: (MOVDstorezero [off1+off2] {sym} ptr mem) 4275 for { 4276 off1 := v.AuxInt 4277 sym := v.Aux 4278 v_0 := v.Args[0] 4279 if v_0.Op != OpARM64ADDconst { 4280 break 4281 } 4282 off2 := v_0.AuxInt 4283 ptr := v_0.Args[0] 4284 mem := v.Args[1] 4285 if !(is32Bit(off1+off2) && ((off1+off2)%2 == 8 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym))) { 4286 break 4287 } 4288 v.reset(OpARM64MOVDstorezero) 4289 v.AuxInt = off1 + off2 4290 v.Aux = sym 4291 v.AddArg(ptr) 4292 v.AddArg(mem) 4293 return true 4294 } 4295 // match: (MOVDstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4296 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1)) 4297 // result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4298 for { 4299 off1 := v.AuxInt 4300 sym1 := v.Aux 4301 v_0 := v.Args[0] 4302 if v_0.Op != OpARM64MOVDaddr { 4303 break 4304 } 4305 off2 := v_0.AuxInt 4306 sym2 := v_0.Aux 4307 ptr := v_0.Args[0] 4308 mem := v.Args[1] 4309 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) { 4310 break 4311 } 4312 v.reset(OpARM64MOVDstorezero) 4313 v.AuxInt = off1 + off2 4314 v.Aux = mergeSym(sym1, sym2) 4315 v.AddArg(ptr) 4316 v.AddArg(mem) 4317 return true 4318 } 4319 return false 4320 } 4321 func rewriteValueARM64_OpARM64MOVHUload(v *Value, config *Config) bool { 4322 b := v.Block 4323 _ = b 4324 // match: (MOVHUload [off1] {sym} (ADDconst [off2] ptr) mem) 4325 // cond: is32Bit(off1+off2) && ((off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)) 4326 // result: (MOVHUload [off1+off2] {sym} ptr mem) 4327 for { 4328 off1 := v.AuxInt 4329 sym := v.Aux 4330 v_0 := v.Args[0] 4331 if v_0.Op != OpARM64ADDconst { 4332 break 4333 } 4334 off2 := v_0.AuxInt 4335 ptr := v_0.Args[0] 4336 mem := v.Args[1] 4337 if !(is32Bit(off1+off2) && ((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym))) { 4338 break 4339 } 4340 v.reset(OpARM64MOVHUload) 4341 v.AuxInt = off1 + off2 4342 v.Aux = sym 4343 v.AddArg(ptr) 4344 v.AddArg(mem) 4345 return true 4346 } 4347 // match: (MOVHUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4348 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && ((off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1)) 4349 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4350 for { 4351 off1 := v.AuxInt 4352 sym1 := v.Aux 4353 v_0 := v.Args[0] 4354 if v_0.Op != OpARM64MOVDaddr { 4355 break 4356 } 4357 off2 := v_0.AuxInt 4358 sym2 := v_0.Aux 4359 ptr := v_0.Args[0] 4360 mem := v.Args[1] 4361 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && ((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) { 4362 break 4363 } 4364 v.reset(OpARM64MOVHUload) 4365 v.AuxInt = off1 + off2 4366 v.Aux = mergeSym(sym1, sym2) 4367 v.AddArg(ptr) 4368 v.AddArg(mem) 4369 return true 4370 } 4371 // match: (MOVHUload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 4372 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4373 // result: (MOVDconst [0]) 4374 for { 4375 off := v.AuxInt 4376 sym := v.Aux 4377 ptr := v.Args[0] 4378 v_1 := v.Args[1] 4379 if v_1.Op != OpARM64MOVHstorezero { 4380 break 4381 } 4382 off2 := v_1.AuxInt 4383 sym2 := v_1.Aux 4384 ptr2 := v_1.Args[0] 4385 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4386 break 4387 } 4388 v.reset(OpARM64MOVDconst) 4389 v.AuxInt = 0 4390 return true 4391 } 4392 return false 4393 } 4394 func rewriteValueARM64_OpARM64MOVHUreg(v *Value, config *Config) bool { 4395 b := v.Block 4396 _ = b 4397 // match: (MOVHUreg x:(MOVBUload _ _)) 4398 // cond: 4399 // result: (MOVDreg x) 4400 for { 4401 x := v.Args[0] 4402 if x.Op != OpARM64MOVBUload { 4403 break 4404 } 4405 v.reset(OpARM64MOVDreg) 4406 v.AddArg(x) 4407 return true 4408 } 4409 // match: (MOVHUreg x:(MOVHUload _ _)) 4410 // cond: 4411 // result: (MOVDreg x) 4412 for { 4413 x := v.Args[0] 4414 if x.Op != OpARM64MOVHUload { 4415 break 4416 } 4417 v.reset(OpARM64MOVDreg) 4418 v.AddArg(x) 4419 return true 4420 } 4421 // match: (MOVHUreg x:(MOVBUreg _)) 4422 // cond: 4423 // result: (MOVDreg x) 4424 for { 4425 x := v.Args[0] 4426 if x.Op != OpARM64MOVBUreg { 4427 break 4428 } 4429 v.reset(OpARM64MOVDreg) 4430 v.AddArg(x) 4431 return true 4432 } 4433 // match: (MOVHUreg x:(MOVHUreg _)) 4434 // cond: 4435 // result: (MOVDreg x) 4436 for { 4437 x := v.Args[0] 4438 if x.Op != OpARM64MOVHUreg { 4439 break 4440 } 4441 v.reset(OpARM64MOVDreg) 4442 v.AddArg(x) 4443 return true 4444 } 4445 // match: (MOVHUreg (MOVDconst [c])) 4446 // cond: 4447 // result: (MOVDconst [int64(uint16(c))]) 4448 for { 4449 v_0 := v.Args[0] 4450 if v_0.Op != OpARM64MOVDconst { 4451 break 4452 } 4453 c := v_0.AuxInt 4454 v.reset(OpARM64MOVDconst) 4455 v.AuxInt = int64(uint16(c)) 4456 return true 4457 } 4458 return false 4459 } 4460 func rewriteValueARM64_OpARM64MOVHload(v *Value, config *Config) bool { 4461 b := v.Block 4462 _ = b 4463 // match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem) 4464 // cond: is32Bit(off1+off2) && ((off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)) 4465 // result: (MOVHload [off1+off2] {sym} ptr mem) 4466 for { 4467 off1 := v.AuxInt 4468 sym := v.Aux 4469 v_0 := v.Args[0] 4470 if v_0.Op != OpARM64ADDconst { 4471 break 4472 } 4473 off2 := v_0.AuxInt 4474 ptr := v_0.Args[0] 4475 mem := v.Args[1] 4476 if !(is32Bit(off1+off2) && ((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym))) { 4477 break 4478 } 4479 v.reset(OpARM64MOVHload) 4480 v.AuxInt = off1 + off2 4481 v.Aux = sym 4482 v.AddArg(ptr) 4483 v.AddArg(mem) 4484 return true 4485 } 4486 // match: (MOVHload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4487 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && ((off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1)) 4488 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4489 for { 4490 off1 := v.AuxInt 4491 sym1 := v.Aux 4492 v_0 := v.Args[0] 4493 if v_0.Op != OpARM64MOVDaddr { 4494 break 4495 } 4496 off2 := v_0.AuxInt 4497 sym2 := v_0.Aux 4498 ptr := v_0.Args[0] 4499 mem := v.Args[1] 4500 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && ((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) { 4501 break 4502 } 4503 v.reset(OpARM64MOVHload) 4504 v.AuxInt = off1 + off2 4505 v.Aux = mergeSym(sym1, sym2) 4506 v.AddArg(ptr) 4507 v.AddArg(mem) 4508 return true 4509 } 4510 // match: (MOVHload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 4511 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4512 // result: (MOVDconst [0]) 4513 for { 4514 off := v.AuxInt 4515 sym := v.Aux 4516 ptr := v.Args[0] 4517 v_1 := v.Args[1] 4518 if v_1.Op != OpARM64MOVHstorezero { 4519 break 4520 } 4521 off2 := v_1.AuxInt 4522 sym2 := v_1.Aux 4523 ptr2 := v_1.Args[0] 4524 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4525 break 4526 } 4527 v.reset(OpARM64MOVDconst) 4528 v.AuxInt = 0 4529 return true 4530 } 4531 return false 4532 } 4533 func rewriteValueARM64_OpARM64MOVHreg(v *Value, config *Config) bool { 4534 b := v.Block 4535 _ = b 4536 // match: (MOVHreg x:(MOVBload _ _)) 4537 // cond: 4538 // result: (MOVDreg x) 4539 for { 4540 x := v.Args[0] 4541 if x.Op != OpARM64MOVBload { 4542 break 4543 } 4544 v.reset(OpARM64MOVDreg) 4545 v.AddArg(x) 4546 return true 4547 } 4548 // match: (MOVHreg x:(MOVBUload _ _)) 4549 // cond: 4550 // result: (MOVDreg x) 4551 for { 4552 x := v.Args[0] 4553 if x.Op != OpARM64MOVBUload { 4554 break 4555 } 4556 v.reset(OpARM64MOVDreg) 4557 v.AddArg(x) 4558 return true 4559 } 4560 // match: (MOVHreg x:(MOVHload _ _)) 4561 // cond: 4562 // result: (MOVDreg x) 4563 for { 4564 x := v.Args[0] 4565 if x.Op != OpARM64MOVHload { 4566 break 4567 } 4568 v.reset(OpARM64MOVDreg) 4569 v.AddArg(x) 4570 return true 4571 } 4572 // match: (MOVHreg x:(MOVBreg _)) 4573 // cond: 4574 // result: (MOVDreg x) 4575 for { 4576 x := v.Args[0] 4577 if x.Op != OpARM64MOVBreg { 4578 break 4579 } 4580 v.reset(OpARM64MOVDreg) 4581 v.AddArg(x) 4582 return true 4583 } 4584 // match: (MOVHreg x:(MOVBUreg _)) 4585 // cond: 4586 // result: (MOVDreg x) 4587 for { 4588 x := v.Args[0] 4589 if x.Op != OpARM64MOVBUreg { 4590 break 4591 } 4592 v.reset(OpARM64MOVDreg) 4593 v.AddArg(x) 4594 return true 4595 } 4596 // match: (MOVHreg x:(MOVHreg _)) 4597 // cond: 4598 // result: (MOVDreg x) 4599 for { 4600 x := v.Args[0] 4601 if x.Op != OpARM64MOVHreg { 4602 break 4603 } 4604 v.reset(OpARM64MOVDreg) 4605 v.AddArg(x) 4606 return true 4607 } 4608 // match: (MOVHreg (MOVDconst [c])) 4609 // cond: 4610 // result: (MOVDconst [int64(int16(c))]) 4611 for { 4612 v_0 := v.Args[0] 4613 if v_0.Op != OpARM64MOVDconst { 4614 break 4615 } 4616 c := v_0.AuxInt 4617 v.reset(OpARM64MOVDconst) 4618 v.AuxInt = int64(int16(c)) 4619 return true 4620 } 4621 return false 4622 } 4623 func rewriteValueARM64_OpARM64MOVHstore(v *Value, config *Config) bool { 4624 b := v.Block 4625 _ = b 4626 // match: (MOVHstore [off1] {sym} (ADDconst [off2] ptr) val mem) 4627 // cond: is32Bit(off1+off2) && ((off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)) 4628 // result: (MOVHstore [off1+off2] {sym} ptr val mem) 4629 for { 4630 off1 := v.AuxInt 4631 sym := v.Aux 4632 v_0 := v.Args[0] 4633 if v_0.Op != OpARM64ADDconst { 4634 break 4635 } 4636 off2 := v_0.AuxInt 4637 ptr := v_0.Args[0] 4638 val := v.Args[1] 4639 mem := v.Args[2] 4640 if !(is32Bit(off1+off2) && ((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym))) { 4641 break 4642 } 4643 v.reset(OpARM64MOVHstore) 4644 v.AuxInt = off1 + off2 4645 v.Aux = sym 4646 v.AddArg(ptr) 4647 v.AddArg(val) 4648 v.AddArg(mem) 4649 return true 4650 } 4651 // match: (MOVHstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 4652 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && ((off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1)) 4653 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4654 for { 4655 off1 := v.AuxInt 4656 sym1 := v.Aux 4657 v_0 := v.Args[0] 4658 if v_0.Op != OpARM64MOVDaddr { 4659 break 4660 } 4661 off2 := v_0.AuxInt 4662 sym2 := v_0.Aux 4663 ptr := v_0.Args[0] 4664 val := v.Args[1] 4665 mem := v.Args[2] 4666 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && ((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) { 4667 break 4668 } 4669 v.reset(OpARM64MOVHstore) 4670 v.AuxInt = off1 + off2 4671 v.Aux = mergeSym(sym1, sym2) 4672 v.AddArg(ptr) 4673 v.AddArg(val) 4674 v.AddArg(mem) 4675 return true 4676 } 4677 // match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem) 4678 // cond: 4679 // result: (MOVHstorezero [off] {sym} ptr mem) 4680 for { 4681 off := v.AuxInt 4682 sym := v.Aux 4683 ptr := v.Args[0] 4684 v_1 := v.Args[1] 4685 if v_1.Op != OpARM64MOVDconst { 4686 break 4687 } 4688 if v_1.AuxInt != 0 { 4689 break 4690 } 4691 mem := v.Args[2] 4692 v.reset(OpARM64MOVHstorezero) 4693 v.AuxInt = off 4694 v.Aux = sym 4695 v.AddArg(ptr) 4696 v.AddArg(mem) 4697 return true 4698 } 4699 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem) 4700 // cond: 4701 // result: (MOVHstore [off] {sym} ptr x mem) 4702 for { 4703 off := v.AuxInt 4704 sym := v.Aux 4705 ptr := v.Args[0] 4706 v_1 := v.Args[1] 4707 if v_1.Op != OpARM64MOVHreg { 4708 break 4709 } 4710 x := v_1.Args[0] 4711 mem := v.Args[2] 4712 v.reset(OpARM64MOVHstore) 4713 v.AuxInt = off 4714 v.Aux = sym 4715 v.AddArg(ptr) 4716 v.AddArg(x) 4717 v.AddArg(mem) 4718 return true 4719 } 4720 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem) 4721 // cond: 4722 // result: (MOVHstore [off] {sym} ptr x mem) 4723 for { 4724 off := v.AuxInt 4725 sym := v.Aux 4726 ptr := v.Args[0] 4727 v_1 := v.Args[1] 4728 if v_1.Op != OpARM64MOVHUreg { 4729 break 4730 } 4731 x := v_1.Args[0] 4732 mem := v.Args[2] 4733 v.reset(OpARM64MOVHstore) 4734 v.AuxInt = off 4735 v.Aux = sym 4736 v.AddArg(ptr) 4737 v.AddArg(x) 4738 v.AddArg(mem) 4739 return true 4740 } 4741 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem) 4742 // cond: 4743 // result: (MOVHstore [off] {sym} ptr x mem) 4744 for { 4745 off := v.AuxInt 4746 sym := v.Aux 4747 ptr := v.Args[0] 4748 v_1 := v.Args[1] 4749 if v_1.Op != OpARM64MOVWreg { 4750 break 4751 } 4752 x := v_1.Args[0] 4753 mem := v.Args[2] 4754 v.reset(OpARM64MOVHstore) 4755 v.AuxInt = off 4756 v.Aux = sym 4757 v.AddArg(ptr) 4758 v.AddArg(x) 4759 v.AddArg(mem) 4760 return true 4761 } 4762 // match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem) 4763 // cond: 4764 // result: (MOVHstore [off] {sym} ptr x mem) 4765 for { 4766 off := v.AuxInt 4767 sym := v.Aux 4768 ptr := v.Args[0] 4769 v_1 := v.Args[1] 4770 if v_1.Op != OpARM64MOVWUreg { 4771 break 4772 } 4773 x := v_1.Args[0] 4774 mem := v.Args[2] 4775 v.reset(OpARM64MOVHstore) 4776 v.AuxInt = off 4777 v.Aux = sym 4778 v.AddArg(ptr) 4779 v.AddArg(x) 4780 v.AddArg(mem) 4781 return true 4782 } 4783 return false 4784 } 4785 func rewriteValueARM64_OpARM64MOVHstorezero(v *Value, config *Config) bool { 4786 b := v.Block 4787 _ = b 4788 // match: (MOVHstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 4789 // cond: is32Bit(off1+off2) && ((off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)) 4790 // result: (MOVHstorezero [off1+off2] {sym} ptr mem) 4791 for { 4792 off1 := v.AuxInt 4793 sym := v.Aux 4794 v_0 := v.Args[0] 4795 if v_0.Op != OpARM64ADDconst { 4796 break 4797 } 4798 off2 := v_0.AuxInt 4799 ptr := v_0.Args[0] 4800 mem := v.Args[1] 4801 if !(is32Bit(off1+off2) && ((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym))) { 4802 break 4803 } 4804 v.reset(OpARM64MOVHstorezero) 4805 v.AuxInt = off1 + off2 4806 v.Aux = sym 4807 v.AddArg(ptr) 4808 v.AddArg(mem) 4809 return true 4810 } 4811 // match: (MOVHstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4812 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && ((off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1)) 4813 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4814 for { 4815 off1 := v.AuxInt 4816 sym1 := v.Aux 4817 v_0 := v.Args[0] 4818 if v_0.Op != OpARM64MOVDaddr { 4819 break 4820 } 4821 off2 := v_0.AuxInt 4822 sym2 := v_0.Aux 4823 ptr := v_0.Args[0] 4824 mem := v.Args[1] 4825 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && ((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) { 4826 break 4827 } 4828 v.reset(OpARM64MOVHstorezero) 4829 v.AuxInt = off1 + off2 4830 v.Aux = mergeSym(sym1, sym2) 4831 v.AddArg(ptr) 4832 v.AddArg(mem) 4833 return true 4834 } 4835 return false 4836 } 4837 func rewriteValueARM64_OpARM64MOVWUload(v *Value, config *Config) bool { 4838 b := v.Block 4839 _ = b 4840 // match: (MOVWUload [off1] {sym} (ADDconst [off2] ptr) mem) 4841 // cond: is32Bit(off1+off2) && ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)) 4842 // result: (MOVWUload [off1+off2] {sym} ptr mem) 4843 for { 4844 off1 := v.AuxInt 4845 sym := v.Aux 4846 v_0 := v.Args[0] 4847 if v_0.Op != OpARM64ADDconst { 4848 break 4849 } 4850 off2 := v_0.AuxInt 4851 ptr := v_0.Args[0] 4852 mem := v.Args[1] 4853 if !(is32Bit(off1+off2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym))) { 4854 break 4855 } 4856 v.reset(OpARM64MOVWUload) 4857 v.AuxInt = off1 + off2 4858 v.Aux = sym 4859 v.AddArg(ptr) 4860 v.AddArg(mem) 4861 return true 4862 } 4863 // match: (MOVWUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4864 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1)) 4865 // result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4866 for { 4867 off1 := v.AuxInt 4868 sym1 := v.Aux 4869 v_0 := v.Args[0] 4870 if v_0.Op != OpARM64MOVDaddr { 4871 break 4872 } 4873 off2 := v_0.AuxInt 4874 sym2 := v_0.Aux 4875 ptr := v_0.Args[0] 4876 mem := v.Args[1] 4877 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) { 4878 break 4879 } 4880 v.reset(OpARM64MOVWUload) 4881 v.AuxInt = off1 + off2 4882 v.Aux = mergeSym(sym1, sym2) 4883 v.AddArg(ptr) 4884 v.AddArg(mem) 4885 return true 4886 } 4887 // match: (MOVWUload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 4888 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4889 // result: (MOVDconst [0]) 4890 for { 4891 off := v.AuxInt 4892 sym := v.Aux 4893 ptr := v.Args[0] 4894 v_1 := v.Args[1] 4895 if v_1.Op != OpARM64MOVWstorezero { 4896 break 4897 } 4898 off2 := v_1.AuxInt 4899 sym2 := v_1.Aux 4900 ptr2 := v_1.Args[0] 4901 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4902 break 4903 } 4904 v.reset(OpARM64MOVDconst) 4905 v.AuxInt = 0 4906 return true 4907 } 4908 return false 4909 } 4910 func rewriteValueARM64_OpARM64MOVWUreg(v *Value, config *Config) bool { 4911 b := v.Block 4912 _ = b 4913 // match: (MOVWUreg x:(MOVBUload _ _)) 4914 // cond: 4915 // result: (MOVDreg x) 4916 for { 4917 x := v.Args[0] 4918 if x.Op != OpARM64MOVBUload { 4919 break 4920 } 4921 v.reset(OpARM64MOVDreg) 4922 v.AddArg(x) 4923 return true 4924 } 4925 // match: (MOVWUreg x:(MOVHUload _ _)) 4926 // cond: 4927 // result: (MOVDreg x) 4928 for { 4929 x := v.Args[0] 4930 if x.Op != OpARM64MOVHUload { 4931 break 4932 } 4933 v.reset(OpARM64MOVDreg) 4934 v.AddArg(x) 4935 return true 4936 } 4937 // match: (MOVWUreg x:(MOVWUload _ _)) 4938 // cond: 4939 // result: (MOVDreg x) 4940 for { 4941 x := v.Args[0] 4942 if x.Op != OpARM64MOVWUload { 4943 break 4944 } 4945 v.reset(OpARM64MOVDreg) 4946 v.AddArg(x) 4947 return true 4948 } 4949 // match: (MOVWUreg x:(MOVBUreg _)) 4950 // cond: 4951 // result: (MOVDreg x) 4952 for { 4953 x := v.Args[0] 4954 if x.Op != OpARM64MOVBUreg { 4955 break 4956 } 4957 v.reset(OpARM64MOVDreg) 4958 v.AddArg(x) 4959 return true 4960 } 4961 // match: (MOVWUreg x:(MOVHUreg _)) 4962 // cond: 4963 // result: (MOVDreg x) 4964 for { 4965 x := v.Args[0] 4966 if x.Op != OpARM64MOVHUreg { 4967 break 4968 } 4969 v.reset(OpARM64MOVDreg) 4970 v.AddArg(x) 4971 return true 4972 } 4973 // match: (MOVWUreg x:(MOVWUreg _)) 4974 // cond: 4975 // result: (MOVDreg x) 4976 for { 4977 x := v.Args[0] 4978 if x.Op != OpARM64MOVWUreg { 4979 break 4980 } 4981 v.reset(OpARM64MOVDreg) 4982 v.AddArg(x) 4983 return true 4984 } 4985 // match: (MOVWUreg (MOVDconst [c])) 4986 // cond: 4987 // result: (MOVDconst [int64(uint32(c))]) 4988 for { 4989 v_0 := v.Args[0] 4990 if v_0.Op != OpARM64MOVDconst { 4991 break 4992 } 4993 c := v_0.AuxInt 4994 v.reset(OpARM64MOVDconst) 4995 v.AuxInt = int64(uint32(c)) 4996 return true 4997 } 4998 return false 4999 } 5000 func rewriteValueARM64_OpARM64MOVWload(v *Value, config *Config) bool { 5001 b := v.Block 5002 _ = b 5003 // match: (MOVWload [off1] {sym} (ADDconst [off2] ptr) mem) 5004 // cond: is32Bit(off1+off2) && ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)) 5005 // result: (MOVWload [off1+off2] {sym} ptr mem) 5006 for { 5007 off1 := v.AuxInt 5008 sym := v.Aux 5009 v_0 := v.Args[0] 5010 if v_0.Op != OpARM64ADDconst { 5011 break 5012 } 5013 off2 := v_0.AuxInt 5014 ptr := v_0.Args[0] 5015 mem := v.Args[1] 5016 if !(is32Bit(off1+off2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym))) { 5017 break 5018 } 5019 v.reset(OpARM64MOVWload) 5020 v.AuxInt = off1 + off2 5021 v.Aux = sym 5022 v.AddArg(ptr) 5023 v.AddArg(mem) 5024 return true 5025 } 5026 // match: (MOVWload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5027 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1)) 5028 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5029 for { 5030 off1 := v.AuxInt 5031 sym1 := v.Aux 5032 v_0 := v.Args[0] 5033 if v_0.Op != OpARM64MOVDaddr { 5034 break 5035 } 5036 off2 := v_0.AuxInt 5037 sym2 := v_0.Aux 5038 ptr := v_0.Args[0] 5039 mem := v.Args[1] 5040 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) { 5041 break 5042 } 5043 v.reset(OpARM64MOVWload) 5044 v.AuxInt = off1 + off2 5045 v.Aux = mergeSym(sym1, sym2) 5046 v.AddArg(ptr) 5047 v.AddArg(mem) 5048 return true 5049 } 5050 // match: (MOVWload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 5051 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 5052 // result: (MOVDconst [0]) 5053 for { 5054 off := v.AuxInt 5055 sym := v.Aux 5056 ptr := v.Args[0] 5057 v_1 := v.Args[1] 5058 if v_1.Op != OpARM64MOVWstorezero { 5059 break 5060 } 5061 off2 := v_1.AuxInt 5062 sym2 := v_1.Aux 5063 ptr2 := v_1.Args[0] 5064 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 5065 break 5066 } 5067 v.reset(OpARM64MOVDconst) 5068 v.AuxInt = 0 5069 return true 5070 } 5071 return false 5072 } 5073 func rewriteValueARM64_OpARM64MOVWreg(v *Value, config *Config) bool { 5074 b := v.Block 5075 _ = b 5076 // match: (MOVWreg x:(MOVBload _ _)) 5077 // cond: 5078 // result: (MOVDreg x) 5079 for { 5080 x := v.Args[0] 5081 if x.Op != OpARM64MOVBload { 5082 break 5083 } 5084 v.reset(OpARM64MOVDreg) 5085 v.AddArg(x) 5086 return true 5087 } 5088 // match: (MOVWreg x:(MOVBUload _ _)) 5089 // cond: 5090 // result: (MOVDreg x) 5091 for { 5092 x := v.Args[0] 5093 if x.Op != OpARM64MOVBUload { 5094 break 5095 } 5096 v.reset(OpARM64MOVDreg) 5097 v.AddArg(x) 5098 return true 5099 } 5100 // match: (MOVWreg x:(MOVHload _ _)) 5101 // cond: 5102 // result: (MOVDreg x) 5103 for { 5104 x := v.Args[0] 5105 if x.Op != OpARM64MOVHload { 5106 break 5107 } 5108 v.reset(OpARM64MOVDreg) 5109 v.AddArg(x) 5110 return true 5111 } 5112 // match: (MOVWreg x:(MOVHUload _ _)) 5113 // cond: 5114 // result: (MOVDreg x) 5115 for { 5116 x := v.Args[0] 5117 if x.Op != OpARM64MOVHUload { 5118 break 5119 } 5120 v.reset(OpARM64MOVDreg) 5121 v.AddArg(x) 5122 return true 5123 } 5124 // match: (MOVWreg x:(MOVWload _ _)) 5125 // cond: 5126 // result: (MOVDreg x) 5127 for { 5128 x := v.Args[0] 5129 if x.Op != OpARM64MOVWload { 5130 break 5131 } 5132 v.reset(OpARM64MOVDreg) 5133 v.AddArg(x) 5134 return true 5135 } 5136 // match: (MOVWreg x:(MOVBreg _)) 5137 // cond: 5138 // result: (MOVDreg x) 5139 for { 5140 x := v.Args[0] 5141 if x.Op != OpARM64MOVBreg { 5142 break 5143 } 5144 v.reset(OpARM64MOVDreg) 5145 v.AddArg(x) 5146 return true 5147 } 5148 // match: (MOVWreg x:(MOVBUreg _)) 5149 // cond: 5150 // result: (MOVDreg x) 5151 for { 5152 x := v.Args[0] 5153 if x.Op != OpARM64MOVBUreg { 5154 break 5155 } 5156 v.reset(OpARM64MOVDreg) 5157 v.AddArg(x) 5158 return true 5159 } 5160 // match: (MOVWreg x:(MOVHreg _)) 5161 // cond: 5162 // result: (MOVDreg x) 5163 for { 5164 x := v.Args[0] 5165 if x.Op != OpARM64MOVHreg { 5166 break 5167 } 5168 v.reset(OpARM64MOVDreg) 5169 v.AddArg(x) 5170 return true 5171 } 5172 // match: (MOVWreg x:(MOVHreg _)) 5173 // cond: 5174 // result: (MOVDreg x) 5175 for { 5176 x := v.Args[0] 5177 if x.Op != OpARM64MOVHreg { 5178 break 5179 } 5180 v.reset(OpARM64MOVDreg) 5181 v.AddArg(x) 5182 return true 5183 } 5184 // match: (MOVWreg x:(MOVWreg _)) 5185 // cond: 5186 // result: (MOVDreg x) 5187 for { 5188 x := v.Args[0] 5189 if x.Op != OpARM64MOVWreg { 5190 break 5191 } 5192 v.reset(OpARM64MOVDreg) 5193 v.AddArg(x) 5194 return true 5195 } 5196 // match: (MOVWreg (MOVDconst [c])) 5197 // cond: 5198 // result: (MOVDconst [int64(int32(c))]) 5199 for { 5200 v_0 := v.Args[0] 5201 if v_0.Op != OpARM64MOVDconst { 5202 break 5203 } 5204 c := v_0.AuxInt 5205 v.reset(OpARM64MOVDconst) 5206 v.AuxInt = int64(int32(c)) 5207 return true 5208 } 5209 return false 5210 } 5211 func rewriteValueARM64_OpARM64MOVWstore(v *Value, config *Config) bool { 5212 b := v.Block 5213 _ = b 5214 // match: (MOVWstore [off1] {sym} (ADDconst [off2] ptr) val mem) 5215 // cond: is32Bit(off1+off2) && ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)) 5216 // result: (MOVWstore [off1+off2] {sym} ptr val mem) 5217 for { 5218 off1 := v.AuxInt 5219 sym := v.Aux 5220 v_0 := v.Args[0] 5221 if v_0.Op != OpARM64ADDconst { 5222 break 5223 } 5224 off2 := v_0.AuxInt 5225 ptr := v_0.Args[0] 5226 val := v.Args[1] 5227 mem := v.Args[2] 5228 if !(is32Bit(off1+off2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym))) { 5229 break 5230 } 5231 v.reset(OpARM64MOVWstore) 5232 v.AuxInt = off1 + off2 5233 v.Aux = sym 5234 v.AddArg(ptr) 5235 v.AddArg(val) 5236 v.AddArg(mem) 5237 return true 5238 } 5239 // match: (MOVWstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 5240 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1)) 5241 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 5242 for { 5243 off1 := v.AuxInt 5244 sym1 := v.Aux 5245 v_0 := v.Args[0] 5246 if v_0.Op != OpARM64MOVDaddr { 5247 break 5248 } 5249 off2 := v_0.AuxInt 5250 sym2 := v_0.Aux 5251 ptr := v_0.Args[0] 5252 val := v.Args[1] 5253 mem := v.Args[2] 5254 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) { 5255 break 5256 } 5257 v.reset(OpARM64MOVWstore) 5258 v.AuxInt = off1 + off2 5259 v.Aux = mergeSym(sym1, sym2) 5260 v.AddArg(ptr) 5261 v.AddArg(val) 5262 v.AddArg(mem) 5263 return true 5264 } 5265 // match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem) 5266 // cond: 5267 // result: (MOVWstorezero [off] {sym} ptr mem) 5268 for { 5269 off := v.AuxInt 5270 sym := v.Aux 5271 ptr := v.Args[0] 5272 v_1 := v.Args[1] 5273 if v_1.Op != OpARM64MOVDconst { 5274 break 5275 } 5276 if v_1.AuxInt != 0 { 5277 break 5278 } 5279 mem := v.Args[2] 5280 v.reset(OpARM64MOVWstorezero) 5281 v.AuxInt = off 5282 v.Aux = sym 5283 v.AddArg(ptr) 5284 v.AddArg(mem) 5285 return true 5286 } 5287 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem) 5288 // cond: 5289 // result: (MOVWstore [off] {sym} ptr x mem) 5290 for { 5291 off := v.AuxInt 5292 sym := v.Aux 5293 ptr := v.Args[0] 5294 v_1 := v.Args[1] 5295 if v_1.Op != OpARM64MOVWreg { 5296 break 5297 } 5298 x := v_1.Args[0] 5299 mem := v.Args[2] 5300 v.reset(OpARM64MOVWstore) 5301 v.AuxInt = off 5302 v.Aux = sym 5303 v.AddArg(ptr) 5304 v.AddArg(x) 5305 v.AddArg(mem) 5306 return true 5307 } 5308 // match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem) 5309 // cond: 5310 // result: (MOVWstore [off] {sym} ptr x mem) 5311 for { 5312 off := v.AuxInt 5313 sym := v.Aux 5314 ptr := v.Args[0] 5315 v_1 := v.Args[1] 5316 if v_1.Op != OpARM64MOVWUreg { 5317 break 5318 } 5319 x := v_1.Args[0] 5320 mem := v.Args[2] 5321 v.reset(OpARM64MOVWstore) 5322 v.AuxInt = off 5323 v.Aux = sym 5324 v.AddArg(ptr) 5325 v.AddArg(x) 5326 v.AddArg(mem) 5327 return true 5328 } 5329 return false 5330 } 5331 func rewriteValueARM64_OpARM64MOVWstorezero(v *Value, config *Config) bool { 5332 b := v.Block 5333 _ = b 5334 // match: (MOVWstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 5335 // cond: is32Bit(off1+off2) && ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)) 5336 // result: (MOVWstorezero [off1+off2] {sym} ptr mem) 5337 for { 5338 off1 := v.AuxInt 5339 sym := v.Aux 5340 v_0 := v.Args[0] 5341 if v_0.Op != OpARM64ADDconst { 5342 break 5343 } 5344 off2 := v_0.AuxInt 5345 ptr := v_0.Args[0] 5346 mem := v.Args[1] 5347 if !(is32Bit(off1+off2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym))) { 5348 break 5349 } 5350 v.reset(OpARM64MOVWstorezero) 5351 v.AuxInt = off1 + off2 5352 v.Aux = sym 5353 v.AddArg(ptr) 5354 v.AddArg(mem) 5355 return true 5356 } 5357 // match: (MOVWstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5358 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1)) 5359 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5360 for { 5361 off1 := v.AuxInt 5362 sym1 := v.Aux 5363 v_0 := v.Args[0] 5364 if v_0.Op != OpARM64MOVDaddr { 5365 break 5366 } 5367 off2 := v_0.AuxInt 5368 sym2 := v_0.Aux 5369 ptr := v_0.Args[0] 5370 mem := v.Args[1] 5371 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) { 5372 break 5373 } 5374 v.reset(OpARM64MOVWstorezero) 5375 v.AuxInt = off1 + off2 5376 v.Aux = mergeSym(sym1, sym2) 5377 v.AddArg(ptr) 5378 v.AddArg(mem) 5379 return true 5380 } 5381 return false 5382 } 5383 func rewriteValueARM64_OpARM64MUL(v *Value, config *Config) bool { 5384 b := v.Block 5385 _ = b 5386 // match: (MUL x (MOVDconst [-1])) 5387 // cond: 5388 // result: (NEG x) 5389 for { 5390 x := v.Args[0] 5391 v_1 := v.Args[1] 5392 if v_1.Op != OpARM64MOVDconst { 5393 break 5394 } 5395 if v_1.AuxInt != -1 { 5396 break 5397 } 5398 v.reset(OpARM64NEG) 5399 v.AddArg(x) 5400 return true 5401 } 5402 // match: (MUL _ (MOVDconst [0])) 5403 // cond: 5404 // result: (MOVDconst [0]) 5405 for { 5406 v_1 := v.Args[1] 5407 if v_1.Op != OpARM64MOVDconst { 5408 break 5409 } 5410 if v_1.AuxInt != 0 { 5411 break 5412 } 5413 v.reset(OpARM64MOVDconst) 5414 v.AuxInt = 0 5415 return true 5416 } 5417 // match: (MUL x (MOVDconst [1])) 5418 // cond: 5419 // result: x 5420 for { 5421 x := v.Args[0] 5422 v_1 := v.Args[1] 5423 if v_1.Op != OpARM64MOVDconst { 5424 break 5425 } 5426 if v_1.AuxInt != 1 { 5427 break 5428 } 5429 v.reset(OpCopy) 5430 v.Type = x.Type 5431 v.AddArg(x) 5432 return true 5433 } 5434 // match: (MUL x (MOVDconst [c])) 5435 // cond: isPowerOfTwo(c) 5436 // result: (SLLconst [log2(c)] x) 5437 for { 5438 x := v.Args[0] 5439 v_1 := v.Args[1] 5440 if v_1.Op != OpARM64MOVDconst { 5441 break 5442 } 5443 c := v_1.AuxInt 5444 if !(isPowerOfTwo(c)) { 5445 break 5446 } 5447 v.reset(OpARM64SLLconst) 5448 v.AuxInt = log2(c) 5449 v.AddArg(x) 5450 return true 5451 } 5452 // match: (MUL x (MOVDconst [c])) 5453 // cond: isPowerOfTwo(c-1) && c >= 3 5454 // result: (ADDshiftLL x x [log2(c-1)]) 5455 for { 5456 x := v.Args[0] 5457 v_1 := v.Args[1] 5458 if v_1.Op != OpARM64MOVDconst { 5459 break 5460 } 5461 c := v_1.AuxInt 5462 if !(isPowerOfTwo(c-1) && c >= 3) { 5463 break 5464 } 5465 v.reset(OpARM64ADDshiftLL) 5466 v.AuxInt = log2(c - 1) 5467 v.AddArg(x) 5468 v.AddArg(x) 5469 return true 5470 } 5471 // match: (MUL x (MOVDconst [c])) 5472 // cond: isPowerOfTwo(c+1) && c >= 7 5473 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 5474 for { 5475 x := v.Args[0] 5476 v_1 := v.Args[1] 5477 if v_1.Op != OpARM64MOVDconst { 5478 break 5479 } 5480 c := v_1.AuxInt 5481 if !(isPowerOfTwo(c+1) && c >= 7) { 5482 break 5483 } 5484 v.reset(OpARM64ADDshiftLL) 5485 v.AuxInt = log2(c + 1) 5486 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5487 v0.AddArg(x) 5488 v.AddArg(v0) 5489 v.AddArg(x) 5490 return true 5491 } 5492 // match: (MUL x (MOVDconst [c])) 5493 // cond: c%3 == 0 && isPowerOfTwo(c/3) 5494 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 5495 for { 5496 x := v.Args[0] 5497 v_1 := v.Args[1] 5498 if v_1.Op != OpARM64MOVDconst { 5499 break 5500 } 5501 c := v_1.AuxInt 5502 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 5503 break 5504 } 5505 v.reset(OpARM64SLLconst) 5506 v.AuxInt = log2(c / 3) 5507 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5508 v0.AuxInt = 1 5509 v0.AddArg(x) 5510 v0.AddArg(x) 5511 v.AddArg(v0) 5512 return true 5513 } 5514 // match: (MUL x (MOVDconst [c])) 5515 // cond: c%5 == 0 && isPowerOfTwo(c/5) 5516 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 5517 for { 5518 x := v.Args[0] 5519 v_1 := v.Args[1] 5520 if v_1.Op != OpARM64MOVDconst { 5521 break 5522 } 5523 c := v_1.AuxInt 5524 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 5525 break 5526 } 5527 v.reset(OpARM64SLLconst) 5528 v.AuxInt = log2(c / 5) 5529 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5530 v0.AuxInt = 2 5531 v0.AddArg(x) 5532 v0.AddArg(x) 5533 v.AddArg(v0) 5534 return true 5535 } 5536 // match: (MUL x (MOVDconst [c])) 5537 // cond: c%7 == 0 && isPowerOfTwo(c/7) 5538 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 5539 for { 5540 x := v.Args[0] 5541 v_1 := v.Args[1] 5542 if v_1.Op != OpARM64MOVDconst { 5543 break 5544 } 5545 c := v_1.AuxInt 5546 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 5547 break 5548 } 5549 v.reset(OpARM64SLLconst) 5550 v.AuxInt = log2(c / 7) 5551 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5552 v0.AuxInt = 3 5553 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5554 v1.AddArg(x) 5555 v0.AddArg(v1) 5556 v0.AddArg(x) 5557 v.AddArg(v0) 5558 return true 5559 } 5560 // match: (MUL x (MOVDconst [c])) 5561 // cond: c%9 == 0 && isPowerOfTwo(c/9) 5562 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 5563 for { 5564 x := v.Args[0] 5565 v_1 := v.Args[1] 5566 if v_1.Op != OpARM64MOVDconst { 5567 break 5568 } 5569 c := v_1.AuxInt 5570 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 5571 break 5572 } 5573 v.reset(OpARM64SLLconst) 5574 v.AuxInt = log2(c / 9) 5575 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5576 v0.AuxInt = 3 5577 v0.AddArg(x) 5578 v0.AddArg(x) 5579 v.AddArg(v0) 5580 return true 5581 } 5582 // match: (MUL (MOVDconst [-1]) x) 5583 // cond: 5584 // result: (NEG x) 5585 for { 5586 v_0 := v.Args[0] 5587 if v_0.Op != OpARM64MOVDconst { 5588 break 5589 } 5590 if v_0.AuxInt != -1 { 5591 break 5592 } 5593 x := v.Args[1] 5594 v.reset(OpARM64NEG) 5595 v.AddArg(x) 5596 return true 5597 } 5598 // match: (MUL (MOVDconst [0]) _) 5599 // cond: 5600 // result: (MOVDconst [0]) 5601 for { 5602 v_0 := v.Args[0] 5603 if v_0.Op != OpARM64MOVDconst { 5604 break 5605 } 5606 if v_0.AuxInt != 0 { 5607 break 5608 } 5609 v.reset(OpARM64MOVDconst) 5610 v.AuxInt = 0 5611 return true 5612 } 5613 // match: (MUL (MOVDconst [1]) x) 5614 // cond: 5615 // result: x 5616 for { 5617 v_0 := v.Args[0] 5618 if v_0.Op != OpARM64MOVDconst { 5619 break 5620 } 5621 if v_0.AuxInt != 1 { 5622 break 5623 } 5624 x := v.Args[1] 5625 v.reset(OpCopy) 5626 v.Type = x.Type 5627 v.AddArg(x) 5628 return true 5629 } 5630 // match: (MUL (MOVDconst [c]) x) 5631 // cond: isPowerOfTwo(c) 5632 // result: (SLLconst [log2(c)] x) 5633 for { 5634 v_0 := v.Args[0] 5635 if v_0.Op != OpARM64MOVDconst { 5636 break 5637 } 5638 c := v_0.AuxInt 5639 x := v.Args[1] 5640 if !(isPowerOfTwo(c)) { 5641 break 5642 } 5643 v.reset(OpARM64SLLconst) 5644 v.AuxInt = log2(c) 5645 v.AddArg(x) 5646 return true 5647 } 5648 // match: (MUL (MOVDconst [c]) x) 5649 // cond: isPowerOfTwo(c) 5650 // result: (SLLconst [log2(c)] x) 5651 for { 5652 v_0 := v.Args[0] 5653 if v_0.Op != OpARM64MOVDconst { 5654 break 5655 } 5656 c := v_0.AuxInt 5657 x := v.Args[1] 5658 if !(isPowerOfTwo(c)) { 5659 break 5660 } 5661 v.reset(OpARM64SLLconst) 5662 v.AuxInt = log2(c) 5663 v.AddArg(x) 5664 return true 5665 } 5666 // match: (MUL (MOVDconst [c]) x) 5667 // cond: isPowerOfTwo(c-1) && c >= 3 5668 // result: (ADDshiftLL x x [log2(c-1)]) 5669 for { 5670 v_0 := v.Args[0] 5671 if v_0.Op != OpARM64MOVDconst { 5672 break 5673 } 5674 c := v_0.AuxInt 5675 x := v.Args[1] 5676 if !(isPowerOfTwo(c-1) && c >= 3) { 5677 break 5678 } 5679 v.reset(OpARM64ADDshiftLL) 5680 v.AuxInt = log2(c - 1) 5681 v.AddArg(x) 5682 v.AddArg(x) 5683 return true 5684 } 5685 // match: (MUL (MOVDconst [c]) x) 5686 // cond: isPowerOfTwo(c+1) && c >= 7 5687 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 5688 for { 5689 v_0 := v.Args[0] 5690 if v_0.Op != OpARM64MOVDconst { 5691 break 5692 } 5693 c := v_0.AuxInt 5694 x := v.Args[1] 5695 if !(isPowerOfTwo(c+1) && c >= 7) { 5696 break 5697 } 5698 v.reset(OpARM64ADDshiftLL) 5699 v.AuxInt = log2(c + 1) 5700 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5701 v0.AddArg(x) 5702 v.AddArg(v0) 5703 v.AddArg(x) 5704 return true 5705 } 5706 // match: (MUL (MOVDconst [c]) x) 5707 // cond: c%3 == 0 && isPowerOfTwo(c/3) 5708 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 5709 for { 5710 v_0 := v.Args[0] 5711 if v_0.Op != OpARM64MOVDconst { 5712 break 5713 } 5714 c := v_0.AuxInt 5715 x := v.Args[1] 5716 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 5717 break 5718 } 5719 v.reset(OpARM64SLLconst) 5720 v.AuxInt = log2(c / 3) 5721 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5722 v0.AuxInt = 1 5723 v0.AddArg(x) 5724 v0.AddArg(x) 5725 v.AddArg(v0) 5726 return true 5727 } 5728 // match: (MUL (MOVDconst [c]) x) 5729 // cond: c%5 == 0 && isPowerOfTwo(c/5) 5730 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 5731 for { 5732 v_0 := v.Args[0] 5733 if v_0.Op != OpARM64MOVDconst { 5734 break 5735 } 5736 c := v_0.AuxInt 5737 x := v.Args[1] 5738 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 5739 break 5740 } 5741 v.reset(OpARM64SLLconst) 5742 v.AuxInt = log2(c / 5) 5743 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5744 v0.AuxInt = 2 5745 v0.AddArg(x) 5746 v0.AddArg(x) 5747 v.AddArg(v0) 5748 return true 5749 } 5750 // match: (MUL (MOVDconst [c]) x) 5751 // cond: c%7 == 0 && isPowerOfTwo(c/7) 5752 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 5753 for { 5754 v_0 := v.Args[0] 5755 if v_0.Op != OpARM64MOVDconst { 5756 break 5757 } 5758 c := v_0.AuxInt 5759 x := v.Args[1] 5760 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 5761 break 5762 } 5763 v.reset(OpARM64SLLconst) 5764 v.AuxInt = log2(c / 7) 5765 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5766 v0.AuxInt = 3 5767 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5768 v1.AddArg(x) 5769 v0.AddArg(v1) 5770 v0.AddArg(x) 5771 v.AddArg(v0) 5772 return true 5773 } 5774 // match: (MUL (MOVDconst [c]) x) 5775 // cond: c%9 == 0 && isPowerOfTwo(c/9) 5776 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 5777 for { 5778 v_0 := v.Args[0] 5779 if v_0.Op != OpARM64MOVDconst { 5780 break 5781 } 5782 c := v_0.AuxInt 5783 x := v.Args[1] 5784 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 5785 break 5786 } 5787 v.reset(OpARM64SLLconst) 5788 v.AuxInt = log2(c / 9) 5789 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5790 v0.AuxInt = 3 5791 v0.AddArg(x) 5792 v0.AddArg(x) 5793 v.AddArg(v0) 5794 return true 5795 } 5796 // match: (MUL (MOVDconst [c]) (MOVDconst [d])) 5797 // cond: 5798 // result: (MOVDconst [c*d]) 5799 for { 5800 v_0 := v.Args[0] 5801 if v_0.Op != OpARM64MOVDconst { 5802 break 5803 } 5804 c := v_0.AuxInt 5805 v_1 := v.Args[1] 5806 if v_1.Op != OpARM64MOVDconst { 5807 break 5808 } 5809 d := v_1.AuxInt 5810 v.reset(OpARM64MOVDconst) 5811 v.AuxInt = c * d 5812 return true 5813 } 5814 return false 5815 } 5816 func rewriteValueARM64_OpARM64MULW(v *Value, config *Config) bool { 5817 b := v.Block 5818 _ = b 5819 // match: (MULW x (MOVDconst [c])) 5820 // cond: int32(c)==-1 5821 // result: (NEG x) 5822 for { 5823 x := v.Args[0] 5824 v_1 := v.Args[1] 5825 if v_1.Op != OpARM64MOVDconst { 5826 break 5827 } 5828 c := v_1.AuxInt 5829 if !(int32(c) == -1) { 5830 break 5831 } 5832 v.reset(OpARM64NEG) 5833 v.AddArg(x) 5834 return true 5835 } 5836 // match: (MULW _ (MOVDconst [c])) 5837 // cond: int32(c)==0 5838 // result: (MOVDconst [0]) 5839 for { 5840 v_1 := v.Args[1] 5841 if v_1.Op != OpARM64MOVDconst { 5842 break 5843 } 5844 c := v_1.AuxInt 5845 if !(int32(c) == 0) { 5846 break 5847 } 5848 v.reset(OpARM64MOVDconst) 5849 v.AuxInt = 0 5850 return true 5851 } 5852 // match: (MULW x (MOVDconst [c])) 5853 // cond: int32(c)==1 5854 // result: x 5855 for { 5856 x := v.Args[0] 5857 v_1 := v.Args[1] 5858 if v_1.Op != OpARM64MOVDconst { 5859 break 5860 } 5861 c := v_1.AuxInt 5862 if !(int32(c) == 1) { 5863 break 5864 } 5865 v.reset(OpCopy) 5866 v.Type = x.Type 5867 v.AddArg(x) 5868 return true 5869 } 5870 // match: (MULW x (MOVDconst [c])) 5871 // cond: isPowerOfTwo(c) 5872 // result: (SLLconst [log2(c)] x) 5873 for { 5874 x := v.Args[0] 5875 v_1 := v.Args[1] 5876 if v_1.Op != OpARM64MOVDconst { 5877 break 5878 } 5879 c := v_1.AuxInt 5880 if !(isPowerOfTwo(c)) { 5881 break 5882 } 5883 v.reset(OpARM64SLLconst) 5884 v.AuxInt = log2(c) 5885 v.AddArg(x) 5886 return true 5887 } 5888 // match: (MULW x (MOVDconst [c])) 5889 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 5890 // result: (ADDshiftLL x x [log2(c-1)]) 5891 for { 5892 x := v.Args[0] 5893 v_1 := v.Args[1] 5894 if v_1.Op != OpARM64MOVDconst { 5895 break 5896 } 5897 c := v_1.AuxInt 5898 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 5899 break 5900 } 5901 v.reset(OpARM64ADDshiftLL) 5902 v.AuxInt = log2(c - 1) 5903 v.AddArg(x) 5904 v.AddArg(x) 5905 return true 5906 } 5907 // match: (MULW x (MOVDconst [c])) 5908 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 5909 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 5910 for { 5911 x := v.Args[0] 5912 v_1 := v.Args[1] 5913 if v_1.Op != OpARM64MOVDconst { 5914 break 5915 } 5916 c := v_1.AuxInt 5917 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 5918 break 5919 } 5920 v.reset(OpARM64ADDshiftLL) 5921 v.AuxInt = log2(c + 1) 5922 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5923 v0.AddArg(x) 5924 v.AddArg(v0) 5925 v.AddArg(x) 5926 return true 5927 } 5928 // match: (MULW x (MOVDconst [c])) 5929 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 5930 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 5931 for { 5932 x := v.Args[0] 5933 v_1 := v.Args[1] 5934 if v_1.Op != OpARM64MOVDconst { 5935 break 5936 } 5937 c := v_1.AuxInt 5938 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 5939 break 5940 } 5941 v.reset(OpARM64SLLconst) 5942 v.AuxInt = log2(c / 3) 5943 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5944 v0.AuxInt = 1 5945 v0.AddArg(x) 5946 v0.AddArg(x) 5947 v.AddArg(v0) 5948 return true 5949 } 5950 // match: (MULW x (MOVDconst [c])) 5951 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 5952 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 5953 for { 5954 x := v.Args[0] 5955 v_1 := v.Args[1] 5956 if v_1.Op != OpARM64MOVDconst { 5957 break 5958 } 5959 c := v_1.AuxInt 5960 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 5961 break 5962 } 5963 v.reset(OpARM64SLLconst) 5964 v.AuxInt = log2(c / 5) 5965 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5966 v0.AuxInt = 2 5967 v0.AddArg(x) 5968 v0.AddArg(x) 5969 v.AddArg(v0) 5970 return true 5971 } 5972 // match: (MULW x (MOVDconst [c])) 5973 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 5974 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 5975 for { 5976 x := v.Args[0] 5977 v_1 := v.Args[1] 5978 if v_1.Op != OpARM64MOVDconst { 5979 break 5980 } 5981 c := v_1.AuxInt 5982 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 5983 break 5984 } 5985 v.reset(OpARM64SLLconst) 5986 v.AuxInt = log2(c / 7) 5987 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5988 v0.AuxInt = 3 5989 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5990 v1.AddArg(x) 5991 v0.AddArg(v1) 5992 v0.AddArg(x) 5993 v.AddArg(v0) 5994 return true 5995 } 5996 // match: (MULW x (MOVDconst [c])) 5997 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 5998 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 5999 for { 6000 x := v.Args[0] 6001 v_1 := v.Args[1] 6002 if v_1.Op != OpARM64MOVDconst { 6003 break 6004 } 6005 c := v_1.AuxInt 6006 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 6007 break 6008 } 6009 v.reset(OpARM64SLLconst) 6010 v.AuxInt = log2(c / 9) 6011 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6012 v0.AuxInt = 3 6013 v0.AddArg(x) 6014 v0.AddArg(x) 6015 v.AddArg(v0) 6016 return true 6017 } 6018 // match: (MULW (MOVDconst [c]) x) 6019 // cond: int32(c)==-1 6020 // result: (NEG x) 6021 for { 6022 v_0 := v.Args[0] 6023 if v_0.Op != OpARM64MOVDconst { 6024 break 6025 } 6026 c := v_0.AuxInt 6027 x := v.Args[1] 6028 if !(int32(c) == -1) { 6029 break 6030 } 6031 v.reset(OpARM64NEG) 6032 v.AddArg(x) 6033 return true 6034 } 6035 // match: (MULW (MOVDconst [c]) _) 6036 // cond: int32(c)==0 6037 // result: (MOVDconst [0]) 6038 for { 6039 v_0 := v.Args[0] 6040 if v_0.Op != OpARM64MOVDconst { 6041 break 6042 } 6043 c := v_0.AuxInt 6044 if !(int32(c) == 0) { 6045 break 6046 } 6047 v.reset(OpARM64MOVDconst) 6048 v.AuxInt = 0 6049 return true 6050 } 6051 // match: (MULW (MOVDconst [c]) x) 6052 // cond: int32(c)==1 6053 // result: x 6054 for { 6055 v_0 := v.Args[0] 6056 if v_0.Op != OpARM64MOVDconst { 6057 break 6058 } 6059 c := v_0.AuxInt 6060 x := v.Args[1] 6061 if !(int32(c) == 1) { 6062 break 6063 } 6064 v.reset(OpCopy) 6065 v.Type = x.Type 6066 v.AddArg(x) 6067 return true 6068 } 6069 // match: (MULW (MOVDconst [c]) x) 6070 // cond: isPowerOfTwo(c) 6071 // result: (SLLconst [log2(c)] x) 6072 for { 6073 v_0 := v.Args[0] 6074 if v_0.Op != OpARM64MOVDconst { 6075 break 6076 } 6077 c := v_0.AuxInt 6078 x := v.Args[1] 6079 if !(isPowerOfTwo(c)) { 6080 break 6081 } 6082 v.reset(OpARM64SLLconst) 6083 v.AuxInt = log2(c) 6084 v.AddArg(x) 6085 return true 6086 } 6087 // match: (MULW (MOVDconst [c]) x) 6088 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 6089 // result: (ADDshiftLL x x [log2(c-1)]) 6090 for { 6091 v_0 := v.Args[0] 6092 if v_0.Op != OpARM64MOVDconst { 6093 break 6094 } 6095 c := v_0.AuxInt 6096 x := v.Args[1] 6097 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 6098 break 6099 } 6100 v.reset(OpARM64ADDshiftLL) 6101 v.AuxInt = log2(c - 1) 6102 v.AddArg(x) 6103 v.AddArg(x) 6104 return true 6105 } 6106 // match: (MULW (MOVDconst [c]) x) 6107 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 6108 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 6109 for { 6110 v_0 := v.Args[0] 6111 if v_0.Op != OpARM64MOVDconst { 6112 break 6113 } 6114 c := v_0.AuxInt 6115 x := v.Args[1] 6116 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 6117 break 6118 } 6119 v.reset(OpARM64ADDshiftLL) 6120 v.AuxInt = log2(c + 1) 6121 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 6122 v0.AddArg(x) 6123 v.AddArg(v0) 6124 v.AddArg(x) 6125 return true 6126 } 6127 // match: (MULW (MOVDconst [c]) x) 6128 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 6129 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 6130 for { 6131 v_0 := v.Args[0] 6132 if v_0.Op != OpARM64MOVDconst { 6133 break 6134 } 6135 c := v_0.AuxInt 6136 x := v.Args[1] 6137 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 6138 break 6139 } 6140 v.reset(OpARM64SLLconst) 6141 v.AuxInt = log2(c / 3) 6142 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6143 v0.AuxInt = 1 6144 v0.AddArg(x) 6145 v0.AddArg(x) 6146 v.AddArg(v0) 6147 return true 6148 } 6149 // match: (MULW (MOVDconst [c]) x) 6150 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 6151 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 6152 for { 6153 v_0 := v.Args[0] 6154 if v_0.Op != OpARM64MOVDconst { 6155 break 6156 } 6157 c := v_0.AuxInt 6158 x := v.Args[1] 6159 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 6160 break 6161 } 6162 v.reset(OpARM64SLLconst) 6163 v.AuxInt = log2(c / 5) 6164 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6165 v0.AuxInt = 2 6166 v0.AddArg(x) 6167 v0.AddArg(x) 6168 v.AddArg(v0) 6169 return true 6170 } 6171 // match: (MULW (MOVDconst [c]) x) 6172 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 6173 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 6174 for { 6175 v_0 := v.Args[0] 6176 if v_0.Op != OpARM64MOVDconst { 6177 break 6178 } 6179 c := v_0.AuxInt 6180 x := v.Args[1] 6181 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 6182 break 6183 } 6184 v.reset(OpARM64SLLconst) 6185 v.AuxInt = log2(c / 7) 6186 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6187 v0.AuxInt = 3 6188 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 6189 v1.AddArg(x) 6190 v0.AddArg(v1) 6191 v0.AddArg(x) 6192 v.AddArg(v0) 6193 return true 6194 } 6195 // match: (MULW (MOVDconst [c]) x) 6196 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 6197 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 6198 for { 6199 v_0 := v.Args[0] 6200 if v_0.Op != OpARM64MOVDconst { 6201 break 6202 } 6203 c := v_0.AuxInt 6204 x := v.Args[1] 6205 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 6206 break 6207 } 6208 v.reset(OpARM64SLLconst) 6209 v.AuxInt = log2(c / 9) 6210 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6211 v0.AuxInt = 3 6212 v0.AddArg(x) 6213 v0.AddArg(x) 6214 v.AddArg(v0) 6215 return true 6216 } 6217 // match: (MULW (MOVDconst [c]) (MOVDconst [d])) 6218 // cond: 6219 // result: (MOVDconst [int64(int32(c)*int32(d))]) 6220 for { 6221 v_0 := v.Args[0] 6222 if v_0.Op != OpARM64MOVDconst { 6223 break 6224 } 6225 c := v_0.AuxInt 6226 v_1 := v.Args[1] 6227 if v_1.Op != OpARM64MOVDconst { 6228 break 6229 } 6230 d := v_1.AuxInt 6231 v.reset(OpARM64MOVDconst) 6232 v.AuxInt = int64(int32(c) * int32(d)) 6233 return true 6234 } 6235 return false 6236 } 6237 func rewriteValueARM64_OpARM64MVN(v *Value, config *Config) bool { 6238 b := v.Block 6239 _ = b 6240 // match: (MVN (MOVDconst [c])) 6241 // cond: 6242 // result: (MOVDconst [^c]) 6243 for { 6244 v_0 := v.Args[0] 6245 if v_0.Op != OpARM64MOVDconst { 6246 break 6247 } 6248 c := v_0.AuxInt 6249 v.reset(OpARM64MOVDconst) 6250 v.AuxInt = ^c 6251 return true 6252 } 6253 return false 6254 } 6255 func rewriteValueARM64_OpARM64NEG(v *Value, config *Config) bool { 6256 b := v.Block 6257 _ = b 6258 // match: (NEG (MOVDconst [c])) 6259 // cond: 6260 // result: (MOVDconst [-c]) 6261 for { 6262 v_0 := v.Args[0] 6263 if v_0.Op != OpARM64MOVDconst { 6264 break 6265 } 6266 c := v_0.AuxInt 6267 v.reset(OpARM64MOVDconst) 6268 v.AuxInt = -c 6269 return true 6270 } 6271 return false 6272 } 6273 func rewriteValueARM64_OpARM64NotEqual(v *Value, config *Config) bool { 6274 b := v.Block 6275 _ = b 6276 // match: (NotEqual (FlagEQ)) 6277 // cond: 6278 // result: (MOVDconst [0]) 6279 for { 6280 v_0 := v.Args[0] 6281 if v_0.Op != OpARM64FlagEQ { 6282 break 6283 } 6284 v.reset(OpARM64MOVDconst) 6285 v.AuxInt = 0 6286 return true 6287 } 6288 // match: (NotEqual (FlagLT_ULT)) 6289 // cond: 6290 // result: (MOVDconst [1]) 6291 for { 6292 v_0 := v.Args[0] 6293 if v_0.Op != OpARM64FlagLT_ULT { 6294 break 6295 } 6296 v.reset(OpARM64MOVDconst) 6297 v.AuxInt = 1 6298 return true 6299 } 6300 // match: (NotEqual (FlagLT_UGT)) 6301 // cond: 6302 // result: (MOVDconst [1]) 6303 for { 6304 v_0 := v.Args[0] 6305 if v_0.Op != OpARM64FlagLT_UGT { 6306 break 6307 } 6308 v.reset(OpARM64MOVDconst) 6309 v.AuxInt = 1 6310 return true 6311 } 6312 // match: (NotEqual (FlagGT_ULT)) 6313 // cond: 6314 // result: (MOVDconst [1]) 6315 for { 6316 v_0 := v.Args[0] 6317 if v_0.Op != OpARM64FlagGT_ULT { 6318 break 6319 } 6320 v.reset(OpARM64MOVDconst) 6321 v.AuxInt = 1 6322 return true 6323 } 6324 // match: (NotEqual (FlagGT_UGT)) 6325 // cond: 6326 // result: (MOVDconst [1]) 6327 for { 6328 v_0 := v.Args[0] 6329 if v_0.Op != OpARM64FlagGT_UGT { 6330 break 6331 } 6332 v.reset(OpARM64MOVDconst) 6333 v.AuxInt = 1 6334 return true 6335 } 6336 // match: (NotEqual (InvertFlags x)) 6337 // cond: 6338 // result: (NotEqual x) 6339 for { 6340 v_0 := v.Args[0] 6341 if v_0.Op != OpARM64InvertFlags { 6342 break 6343 } 6344 x := v_0.Args[0] 6345 v.reset(OpARM64NotEqual) 6346 v.AddArg(x) 6347 return true 6348 } 6349 return false 6350 } 6351 func rewriteValueARM64_OpARM64OR(v *Value, config *Config) bool { 6352 b := v.Block 6353 _ = b 6354 // match: (OR (MOVDconst [c]) x) 6355 // cond: 6356 // result: (ORconst [c] x) 6357 for { 6358 v_0 := v.Args[0] 6359 if v_0.Op != OpARM64MOVDconst { 6360 break 6361 } 6362 c := v_0.AuxInt 6363 x := v.Args[1] 6364 v.reset(OpARM64ORconst) 6365 v.AuxInt = c 6366 v.AddArg(x) 6367 return true 6368 } 6369 // match: (OR x (MOVDconst [c])) 6370 // cond: 6371 // result: (ORconst [c] x) 6372 for { 6373 x := v.Args[0] 6374 v_1 := v.Args[1] 6375 if v_1.Op != OpARM64MOVDconst { 6376 break 6377 } 6378 c := v_1.AuxInt 6379 v.reset(OpARM64ORconst) 6380 v.AuxInt = c 6381 v.AddArg(x) 6382 return true 6383 } 6384 // match: (OR x x) 6385 // cond: 6386 // result: x 6387 for { 6388 x := v.Args[0] 6389 if x != v.Args[1] { 6390 break 6391 } 6392 v.reset(OpCopy) 6393 v.Type = x.Type 6394 v.AddArg(x) 6395 return true 6396 } 6397 // match: (OR x s:(SLLconst [c] y)) 6398 // cond: s.Uses == 1 && clobber(s) 6399 // result: (ORshiftLL x y [c]) 6400 for { 6401 x := v.Args[0] 6402 s := v.Args[1] 6403 if s.Op != OpARM64SLLconst { 6404 break 6405 } 6406 c := s.AuxInt 6407 y := s.Args[0] 6408 if !(s.Uses == 1 && clobber(s)) { 6409 break 6410 } 6411 v.reset(OpARM64ORshiftLL) 6412 v.AuxInt = c 6413 v.AddArg(x) 6414 v.AddArg(y) 6415 return true 6416 } 6417 // match: (OR s:(SLLconst [c] y) x) 6418 // cond: s.Uses == 1 && clobber(s) 6419 // result: (ORshiftLL x y [c]) 6420 for { 6421 s := v.Args[0] 6422 if s.Op != OpARM64SLLconst { 6423 break 6424 } 6425 c := s.AuxInt 6426 y := s.Args[0] 6427 x := v.Args[1] 6428 if !(s.Uses == 1 && clobber(s)) { 6429 break 6430 } 6431 v.reset(OpARM64ORshiftLL) 6432 v.AuxInt = c 6433 v.AddArg(x) 6434 v.AddArg(y) 6435 return true 6436 } 6437 // match: (OR x (SLLconst [c] y)) 6438 // cond: 6439 // result: (ORshiftLL x y [c]) 6440 for { 6441 x := v.Args[0] 6442 v_1 := v.Args[1] 6443 if v_1.Op != OpARM64SLLconst { 6444 break 6445 } 6446 c := v_1.AuxInt 6447 y := v_1.Args[0] 6448 v.reset(OpARM64ORshiftLL) 6449 v.AuxInt = c 6450 v.AddArg(x) 6451 v.AddArg(y) 6452 return true 6453 } 6454 // match: (OR (SLLconst [c] y) x) 6455 // cond: 6456 // result: (ORshiftLL x y [c]) 6457 for { 6458 v_0 := v.Args[0] 6459 if v_0.Op != OpARM64SLLconst { 6460 break 6461 } 6462 c := v_0.AuxInt 6463 y := v_0.Args[0] 6464 x := v.Args[1] 6465 v.reset(OpARM64ORshiftLL) 6466 v.AuxInt = c 6467 v.AddArg(x) 6468 v.AddArg(y) 6469 return true 6470 } 6471 // match: (OR x (SRLconst [c] y)) 6472 // cond: 6473 // result: (ORshiftRL x y [c]) 6474 for { 6475 x := v.Args[0] 6476 v_1 := v.Args[1] 6477 if v_1.Op != OpARM64SRLconst { 6478 break 6479 } 6480 c := v_1.AuxInt 6481 y := v_1.Args[0] 6482 v.reset(OpARM64ORshiftRL) 6483 v.AuxInt = c 6484 v.AddArg(x) 6485 v.AddArg(y) 6486 return true 6487 } 6488 // match: (OR (SRLconst [c] y) x) 6489 // cond: 6490 // result: (ORshiftRL x y [c]) 6491 for { 6492 v_0 := v.Args[0] 6493 if v_0.Op != OpARM64SRLconst { 6494 break 6495 } 6496 c := v_0.AuxInt 6497 y := v_0.Args[0] 6498 x := v.Args[1] 6499 v.reset(OpARM64ORshiftRL) 6500 v.AuxInt = c 6501 v.AddArg(x) 6502 v.AddArg(y) 6503 return true 6504 } 6505 // match: (OR x (SRAconst [c] y)) 6506 // cond: 6507 // result: (ORshiftRA x y [c]) 6508 for { 6509 x := v.Args[0] 6510 v_1 := v.Args[1] 6511 if v_1.Op != OpARM64SRAconst { 6512 break 6513 } 6514 c := v_1.AuxInt 6515 y := v_1.Args[0] 6516 v.reset(OpARM64ORshiftRA) 6517 v.AuxInt = c 6518 v.AddArg(x) 6519 v.AddArg(y) 6520 return true 6521 } 6522 // match: (OR (SRAconst [c] y) x) 6523 // cond: 6524 // result: (ORshiftRA x y [c]) 6525 for { 6526 v_0 := v.Args[0] 6527 if v_0.Op != OpARM64SRAconst { 6528 break 6529 } 6530 c := v_0.AuxInt 6531 y := v_0.Args[0] 6532 x := v.Args[1] 6533 v.reset(OpARM64ORshiftRA) 6534 v.AuxInt = c 6535 v.AddArg(x) 6536 v.AddArg(y) 6537 return true 6538 } 6539 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i-1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i-2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i-3] {s} p mem))) 6540 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 6541 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i-3] p) mem) 6542 for { 6543 t := v.Type 6544 o0 := v.Args[0] 6545 if o0.Op != OpARM64ORshiftLL { 6546 break 6547 } 6548 if o0.AuxInt != 8 { 6549 break 6550 } 6551 o1 := o0.Args[0] 6552 if o1.Op != OpARM64ORshiftLL { 6553 break 6554 } 6555 if o1.AuxInt != 16 { 6556 break 6557 } 6558 s0 := o1.Args[0] 6559 if s0.Op != OpARM64SLLconst { 6560 break 6561 } 6562 if s0.AuxInt != 24 { 6563 break 6564 } 6565 y0 := s0.Args[0] 6566 if y0.Op != OpARM64MOVDnop { 6567 break 6568 } 6569 x0 := y0.Args[0] 6570 if x0.Op != OpARM64MOVBUload { 6571 break 6572 } 6573 i := x0.AuxInt 6574 s := x0.Aux 6575 p := x0.Args[0] 6576 mem := x0.Args[1] 6577 y1 := o1.Args[1] 6578 if y1.Op != OpARM64MOVDnop { 6579 break 6580 } 6581 x1 := y1.Args[0] 6582 if x1.Op != OpARM64MOVBUload { 6583 break 6584 } 6585 if x1.AuxInt != i-1 { 6586 break 6587 } 6588 if x1.Aux != s { 6589 break 6590 } 6591 if p != x1.Args[0] { 6592 break 6593 } 6594 if mem != x1.Args[1] { 6595 break 6596 } 6597 y2 := o0.Args[1] 6598 if y2.Op != OpARM64MOVDnop { 6599 break 6600 } 6601 x2 := y2.Args[0] 6602 if x2.Op != OpARM64MOVBUload { 6603 break 6604 } 6605 if x2.AuxInt != i-2 { 6606 break 6607 } 6608 if x2.Aux != s { 6609 break 6610 } 6611 if p != x2.Args[0] { 6612 break 6613 } 6614 if mem != x2.Args[1] { 6615 break 6616 } 6617 y3 := v.Args[1] 6618 if y3.Op != OpARM64MOVDnop { 6619 break 6620 } 6621 x3 := y3.Args[0] 6622 if x3.Op != OpARM64MOVBUload { 6623 break 6624 } 6625 if x3.AuxInt != i-3 { 6626 break 6627 } 6628 if x3.Aux != s { 6629 break 6630 } 6631 if p != x3.Args[0] { 6632 break 6633 } 6634 if mem != x3.Args[1] { 6635 break 6636 } 6637 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 6638 break 6639 } 6640 b = mergePoint(b, x0, x1, x2, x3) 6641 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 6642 v.reset(OpCopy) 6643 v.AddArg(v0) 6644 v0.Aux = s 6645 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 6646 v1.AuxInt = i - 3 6647 v1.AddArg(p) 6648 v0.AddArg(v1) 6649 v0.AddArg(mem) 6650 return true 6651 } 6652 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i-1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i-2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i-3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i-4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i-5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i-6] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i-7] {s} p mem))) 6653 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 6654 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i-7] p) mem)) 6655 for { 6656 t := v.Type 6657 o0 := v.Args[0] 6658 if o0.Op != OpARM64ORshiftLL { 6659 break 6660 } 6661 if o0.AuxInt != 8 { 6662 break 6663 } 6664 o1 := o0.Args[0] 6665 if o1.Op != OpARM64ORshiftLL { 6666 break 6667 } 6668 if o1.AuxInt != 16 { 6669 break 6670 } 6671 o2 := o1.Args[0] 6672 if o2.Op != OpARM64ORshiftLL { 6673 break 6674 } 6675 if o2.AuxInt != 24 { 6676 break 6677 } 6678 o3 := o2.Args[0] 6679 if o3.Op != OpARM64ORshiftLL { 6680 break 6681 } 6682 if o3.AuxInt != 32 { 6683 break 6684 } 6685 o4 := o3.Args[0] 6686 if o4.Op != OpARM64ORshiftLL { 6687 break 6688 } 6689 if o4.AuxInt != 40 { 6690 break 6691 } 6692 o5 := o4.Args[0] 6693 if o5.Op != OpARM64ORshiftLL { 6694 break 6695 } 6696 if o5.AuxInt != 48 { 6697 break 6698 } 6699 s0 := o5.Args[0] 6700 if s0.Op != OpARM64SLLconst { 6701 break 6702 } 6703 if s0.AuxInt != 56 { 6704 break 6705 } 6706 y0 := s0.Args[0] 6707 if y0.Op != OpARM64MOVDnop { 6708 break 6709 } 6710 x0 := y0.Args[0] 6711 if x0.Op != OpARM64MOVBUload { 6712 break 6713 } 6714 i := x0.AuxInt 6715 s := x0.Aux 6716 p := x0.Args[0] 6717 mem := x0.Args[1] 6718 y1 := o5.Args[1] 6719 if y1.Op != OpARM64MOVDnop { 6720 break 6721 } 6722 x1 := y1.Args[0] 6723 if x1.Op != OpARM64MOVBUload { 6724 break 6725 } 6726 if x1.AuxInt != i-1 { 6727 break 6728 } 6729 if x1.Aux != s { 6730 break 6731 } 6732 if p != x1.Args[0] { 6733 break 6734 } 6735 if mem != x1.Args[1] { 6736 break 6737 } 6738 y2 := o4.Args[1] 6739 if y2.Op != OpARM64MOVDnop { 6740 break 6741 } 6742 x2 := y2.Args[0] 6743 if x2.Op != OpARM64MOVBUload { 6744 break 6745 } 6746 if x2.AuxInt != i-2 { 6747 break 6748 } 6749 if x2.Aux != s { 6750 break 6751 } 6752 if p != x2.Args[0] { 6753 break 6754 } 6755 if mem != x2.Args[1] { 6756 break 6757 } 6758 y3 := o3.Args[1] 6759 if y3.Op != OpARM64MOVDnop { 6760 break 6761 } 6762 x3 := y3.Args[0] 6763 if x3.Op != OpARM64MOVBUload { 6764 break 6765 } 6766 if x3.AuxInt != i-3 { 6767 break 6768 } 6769 if x3.Aux != s { 6770 break 6771 } 6772 if p != x3.Args[0] { 6773 break 6774 } 6775 if mem != x3.Args[1] { 6776 break 6777 } 6778 y4 := o2.Args[1] 6779 if y4.Op != OpARM64MOVDnop { 6780 break 6781 } 6782 x4 := y4.Args[0] 6783 if x4.Op != OpARM64MOVBUload { 6784 break 6785 } 6786 if x4.AuxInt != i-4 { 6787 break 6788 } 6789 if x4.Aux != s { 6790 break 6791 } 6792 if p != x4.Args[0] { 6793 break 6794 } 6795 if mem != x4.Args[1] { 6796 break 6797 } 6798 y5 := o1.Args[1] 6799 if y5.Op != OpARM64MOVDnop { 6800 break 6801 } 6802 x5 := y5.Args[0] 6803 if x5.Op != OpARM64MOVBUload { 6804 break 6805 } 6806 if x5.AuxInt != i-5 { 6807 break 6808 } 6809 if x5.Aux != s { 6810 break 6811 } 6812 if p != x5.Args[0] { 6813 break 6814 } 6815 if mem != x5.Args[1] { 6816 break 6817 } 6818 y6 := o0.Args[1] 6819 if y6.Op != OpARM64MOVDnop { 6820 break 6821 } 6822 x6 := y6.Args[0] 6823 if x6.Op != OpARM64MOVBUload { 6824 break 6825 } 6826 if x6.AuxInt != i-6 { 6827 break 6828 } 6829 if x6.Aux != s { 6830 break 6831 } 6832 if p != x6.Args[0] { 6833 break 6834 } 6835 if mem != x6.Args[1] { 6836 break 6837 } 6838 y7 := v.Args[1] 6839 if y7.Op != OpARM64MOVDnop { 6840 break 6841 } 6842 x7 := y7.Args[0] 6843 if x7.Op != OpARM64MOVBUload { 6844 break 6845 } 6846 if x7.AuxInt != i-7 { 6847 break 6848 } 6849 if x7.Aux != s { 6850 break 6851 } 6852 if p != x7.Args[0] { 6853 break 6854 } 6855 if mem != x7.Args[1] { 6856 break 6857 } 6858 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 6859 break 6860 } 6861 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 6862 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 6863 v.reset(OpCopy) 6864 v.AddArg(v0) 6865 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 6866 v1.Aux = s 6867 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 6868 v2.AuxInt = i - 7 6869 v2.AddArg(p) 6870 v1.AddArg(v2) 6871 v1.AddArg(mem) 6872 v0.AddArg(v1) 6873 return true 6874 } 6875 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i+1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i+2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i+3] {s} p mem))) 6876 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 6877 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i] p) mem)) 6878 for { 6879 t := v.Type 6880 o0 := v.Args[0] 6881 if o0.Op != OpARM64ORshiftLL { 6882 break 6883 } 6884 if o0.AuxInt != 8 { 6885 break 6886 } 6887 o1 := o0.Args[0] 6888 if o1.Op != OpARM64ORshiftLL { 6889 break 6890 } 6891 if o1.AuxInt != 16 { 6892 break 6893 } 6894 s0 := o1.Args[0] 6895 if s0.Op != OpARM64SLLconst { 6896 break 6897 } 6898 if s0.AuxInt != 24 { 6899 break 6900 } 6901 y0 := s0.Args[0] 6902 if y0.Op != OpARM64MOVDnop { 6903 break 6904 } 6905 x0 := y0.Args[0] 6906 if x0.Op != OpARM64MOVBUload { 6907 break 6908 } 6909 i := x0.AuxInt 6910 s := x0.Aux 6911 p := x0.Args[0] 6912 mem := x0.Args[1] 6913 y1 := o1.Args[1] 6914 if y1.Op != OpARM64MOVDnop { 6915 break 6916 } 6917 x1 := y1.Args[0] 6918 if x1.Op != OpARM64MOVBUload { 6919 break 6920 } 6921 if x1.AuxInt != i+1 { 6922 break 6923 } 6924 if x1.Aux != s { 6925 break 6926 } 6927 if p != x1.Args[0] { 6928 break 6929 } 6930 if mem != x1.Args[1] { 6931 break 6932 } 6933 y2 := o0.Args[1] 6934 if y2.Op != OpARM64MOVDnop { 6935 break 6936 } 6937 x2 := y2.Args[0] 6938 if x2.Op != OpARM64MOVBUload { 6939 break 6940 } 6941 if x2.AuxInt != i+2 { 6942 break 6943 } 6944 if x2.Aux != s { 6945 break 6946 } 6947 if p != x2.Args[0] { 6948 break 6949 } 6950 if mem != x2.Args[1] { 6951 break 6952 } 6953 y3 := v.Args[1] 6954 if y3.Op != OpARM64MOVDnop { 6955 break 6956 } 6957 x3 := y3.Args[0] 6958 if x3.Op != OpARM64MOVBUload { 6959 break 6960 } 6961 if x3.AuxInt != i+3 { 6962 break 6963 } 6964 if x3.Aux != s { 6965 break 6966 } 6967 if p != x3.Args[0] { 6968 break 6969 } 6970 if mem != x3.Args[1] { 6971 break 6972 } 6973 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 6974 break 6975 } 6976 b = mergePoint(b, x0, x1, x2, x3) 6977 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 6978 v.reset(OpCopy) 6979 v.AddArg(v0) 6980 v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 6981 v1.Aux = s 6982 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 6983 v2.AuxInt = i 6984 v2.AddArg(p) 6985 v1.AddArg(v2) 6986 v1.AddArg(mem) 6987 v0.AddArg(v1) 6988 return true 6989 } 6990 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i+1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i+2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i+3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i+4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i+5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i+6] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i+7] {s} p mem))) 6991 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 6992 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i] p) mem)) 6993 for { 6994 t := v.Type 6995 o0 := v.Args[0] 6996 if o0.Op != OpARM64ORshiftLL { 6997 break 6998 } 6999 if o0.AuxInt != 8 { 7000 break 7001 } 7002 o1 := o0.Args[0] 7003 if o1.Op != OpARM64ORshiftLL { 7004 break 7005 } 7006 if o1.AuxInt != 16 { 7007 break 7008 } 7009 o2 := o1.Args[0] 7010 if o2.Op != OpARM64ORshiftLL { 7011 break 7012 } 7013 if o2.AuxInt != 24 { 7014 break 7015 } 7016 o3 := o2.Args[0] 7017 if o3.Op != OpARM64ORshiftLL { 7018 break 7019 } 7020 if o3.AuxInt != 32 { 7021 break 7022 } 7023 o4 := o3.Args[0] 7024 if o4.Op != OpARM64ORshiftLL { 7025 break 7026 } 7027 if o4.AuxInt != 40 { 7028 break 7029 } 7030 o5 := o4.Args[0] 7031 if o5.Op != OpARM64ORshiftLL { 7032 break 7033 } 7034 if o5.AuxInt != 48 { 7035 break 7036 } 7037 s0 := o5.Args[0] 7038 if s0.Op != OpARM64SLLconst { 7039 break 7040 } 7041 if s0.AuxInt != 56 { 7042 break 7043 } 7044 y0 := s0.Args[0] 7045 if y0.Op != OpARM64MOVDnop { 7046 break 7047 } 7048 x0 := y0.Args[0] 7049 if x0.Op != OpARM64MOVBUload { 7050 break 7051 } 7052 i := x0.AuxInt 7053 s := x0.Aux 7054 p := x0.Args[0] 7055 mem := x0.Args[1] 7056 y1 := o5.Args[1] 7057 if y1.Op != OpARM64MOVDnop { 7058 break 7059 } 7060 x1 := y1.Args[0] 7061 if x1.Op != OpARM64MOVBUload { 7062 break 7063 } 7064 if x1.AuxInt != i+1 { 7065 break 7066 } 7067 if x1.Aux != s { 7068 break 7069 } 7070 if p != x1.Args[0] { 7071 break 7072 } 7073 if mem != x1.Args[1] { 7074 break 7075 } 7076 y2 := o4.Args[1] 7077 if y2.Op != OpARM64MOVDnop { 7078 break 7079 } 7080 x2 := y2.Args[0] 7081 if x2.Op != OpARM64MOVBUload { 7082 break 7083 } 7084 if x2.AuxInt != i+2 { 7085 break 7086 } 7087 if x2.Aux != s { 7088 break 7089 } 7090 if p != x2.Args[0] { 7091 break 7092 } 7093 if mem != x2.Args[1] { 7094 break 7095 } 7096 y3 := o3.Args[1] 7097 if y3.Op != OpARM64MOVDnop { 7098 break 7099 } 7100 x3 := y3.Args[0] 7101 if x3.Op != OpARM64MOVBUload { 7102 break 7103 } 7104 if x3.AuxInt != i+3 { 7105 break 7106 } 7107 if x3.Aux != s { 7108 break 7109 } 7110 if p != x3.Args[0] { 7111 break 7112 } 7113 if mem != x3.Args[1] { 7114 break 7115 } 7116 y4 := o2.Args[1] 7117 if y4.Op != OpARM64MOVDnop { 7118 break 7119 } 7120 x4 := y4.Args[0] 7121 if x4.Op != OpARM64MOVBUload { 7122 break 7123 } 7124 if x4.AuxInt != i+4 { 7125 break 7126 } 7127 if x4.Aux != s { 7128 break 7129 } 7130 if p != x4.Args[0] { 7131 break 7132 } 7133 if mem != x4.Args[1] { 7134 break 7135 } 7136 y5 := o1.Args[1] 7137 if y5.Op != OpARM64MOVDnop { 7138 break 7139 } 7140 x5 := y5.Args[0] 7141 if x5.Op != OpARM64MOVBUload { 7142 break 7143 } 7144 if x5.AuxInt != i+5 { 7145 break 7146 } 7147 if x5.Aux != s { 7148 break 7149 } 7150 if p != x5.Args[0] { 7151 break 7152 } 7153 if mem != x5.Args[1] { 7154 break 7155 } 7156 y6 := o0.Args[1] 7157 if y6.Op != OpARM64MOVDnop { 7158 break 7159 } 7160 x6 := y6.Args[0] 7161 if x6.Op != OpARM64MOVBUload { 7162 break 7163 } 7164 if x6.AuxInt != i+6 { 7165 break 7166 } 7167 if x6.Aux != s { 7168 break 7169 } 7170 if p != x6.Args[0] { 7171 break 7172 } 7173 if mem != x6.Args[1] { 7174 break 7175 } 7176 y7 := v.Args[1] 7177 if y7.Op != OpARM64MOVDnop { 7178 break 7179 } 7180 x7 := y7.Args[0] 7181 if x7.Op != OpARM64MOVBUload { 7182 break 7183 } 7184 if x7.AuxInt != i+7 { 7185 break 7186 } 7187 if x7.Aux != s { 7188 break 7189 } 7190 if p != x7.Args[0] { 7191 break 7192 } 7193 if mem != x7.Args[1] { 7194 break 7195 } 7196 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 7197 break 7198 } 7199 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 7200 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 7201 v.reset(OpCopy) 7202 v.AddArg(v0) 7203 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 7204 v1.Aux = s 7205 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7206 v2.AuxInt = i 7207 v2.AddArg(p) 7208 v1.AddArg(v2) 7209 v1.AddArg(mem) 7210 v0.AddArg(v1) 7211 return true 7212 } 7213 return false 7214 } 7215 func rewriteValueARM64_OpARM64ORconst(v *Value, config *Config) bool { 7216 b := v.Block 7217 _ = b 7218 // match: (ORconst [0] x) 7219 // cond: 7220 // result: x 7221 for { 7222 if v.AuxInt != 0 { 7223 break 7224 } 7225 x := v.Args[0] 7226 v.reset(OpCopy) 7227 v.Type = x.Type 7228 v.AddArg(x) 7229 return true 7230 } 7231 // match: (ORconst [-1] _) 7232 // cond: 7233 // result: (MOVDconst [-1]) 7234 for { 7235 if v.AuxInt != -1 { 7236 break 7237 } 7238 v.reset(OpARM64MOVDconst) 7239 v.AuxInt = -1 7240 return true 7241 } 7242 // match: (ORconst [c] (MOVDconst [d])) 7243 // cond: 7244 // result: (MOVDconst [c|d]) 7245 for { 7246 c := v.AuxInt 7247 v_0 := v.Args[0] 7248 if v_0.Op != OpARM64MOVDconst { 7249 break 7250 } 7251 d := v_0.AuxInt 7252 v.reset(OpARM64MOVDconst) 7253 v.AuxInt = c | d 7254 return true 7255 } 7256 // match: (ORconst [c] (ORconst [d] x)) 7257 // cond: 7258 // result: (ORconst [c|d] x) 7259 for { 7260 c := v.AuxInt 7261 v_0 := v.Args[0] 7262 if v_0.Op != OpARM64ORconst { 7263 break 7264 } 7265 d := v_0.AuxInt 7266 x := v_0.Args[0] 7267 v.reset(OpARM64ORconst) 7268 v.AuxInt = c | d 7269 v.AddArg(x) 7270 return true 7271 } 7272 return false 7273 } 7274 func rewriteValueARM64_OpARM64ORshiftLL(v *Value, config *Config) bool { 7275 b := v.Block 7276 _ = b 7277 // match: (ORshiftLL (MOVDconst [c]) x [d]) 7278 // cond: 7279 // result: (ORconst [c] (SLLconst <x.Type> x [d])) 7280 for { 7281 d := v.AuxInt 7282 v_0 := v.Args[0] 7283 if v_0.Op != OpARM64MOVDconst { 7284 break 7285 } 7286 c := v_0.AuxInt 7287 x := v.Args[1] 7288 v.reset(OpARM64ORconst) 7289 v.AuxInt = c 7290 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7291 v0.AuxInt = d 7292 v0.AddArg(x) 7293 v.AddArg(v0) 7294 return true 7295 } 7296 // match: (ORshiftLL x (MOVDconst [c]) [d]) 7297 // cond: 7298 // result: (ORconst x [int64(uint64(c)<<uint64(d))]) 7299 for { 7300 d := v.AuxInt 7301 x := v.Args[0] 7302 v_1 := v.Args[1] 7303 if v_1.Op != OpARM64MOVDconst { 7304 break 7305 } 7306 c := v_1.AuxInt 7307 v.reset(OpARM64ORconst) 7308 v.AuxInt = int64(uint64(c) << uint64(d)) 7309 v.AddArg(x) 7310 return true 7311 } 7312 // match: (ORshiftLL x y:(SLLconst x [c]) [d]) 7313 // cond: c==d 7314 // result: y 7315 for { 7316 d := v.AuxInt 7317 x := v.Args[0] 7318 y := v.Args[1] 7319 if y.Op != OpARM64SLLconst { 7320 break 7321 } 7322 c := y.AuxInt 7323 if x != y.Args[0] { 7324 break 7325 } 7326 if !(c == d) { 7327 break 7328 } 7329 v.reset(OpCopy) 7330 v.Type = y.Type 7331 v.AddArg(y) 7332 return true 7333 } 7334 // match: ( ORshiftLL [c] (SRLconst x [64-c]) x) 7335 // cond: 7336 // result: (RORconst [64-c] x) 7337 for { 7338 c := v.AuxInt 7339 v_0 := v.Args[0] 7340 if v_0.Op != OpARM64SRLconst { 7341 break 7342 } 7343 if v_0.AuxInt != 64-c { 7344 break 7345 } 7346 x := v_0.Args[0] 7347 if x != v.Args[1] { 7348 break 7349 } 7350 v.reset(OpARM64RORconst) 7351 v.AuxInt = 64 - c 7352 v.AddArg(x) 7353 return true 7354 } 7355 // match: ( ORshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x) 7356 // cond: c < 32 && t.Size() == 4 7357 // result: (RORWconst [32-c] x) 7358 for { 7359 t := v.Type 7360 c := v.AuxInt 7361 v_0 := v.Args[0] 7362 if v_0.Op != OpARM64SRLconst { 7363 break 7364 } 7365 if v_0.AuxInt != 32-c { 7366 break 7367 } 7368 v_0_0 := v_0.Args[0] 7369 if v_0_0.Op != OpARM64MOVWUreg { 7370 break 7371 } 7372 x := v_0_0.Args[0] 7373 if x != v.Args[1] { 7374 break 7375 } 7376 if !(c < 32 && t.Size() == 4) { 7377 break 7378 } 7379 v.reset(OpARM64RORWconst) 7380 v.AuxInt = 32 - c 7381 v.AddArg(x) 7382 return true 7383 } 7384 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i+1] {s} p mem))) 7385 // cond: x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 7386 // result: @mergePoint(b,x0,x1) (MOVHUload <t> {s} (OffPtr <p.Type> [i] p) mem) 7387 for { 7388 t := v.Type 7389 if v.AuxInt != 8 { 7390 break 7391 } 7392 y0 := v.Args[0] 7393 if y0.Op != OpARM64MOVDnop { 7394 break 7395 } 7396 x0 := y0.Args[0] 7397 if x0.Op != OpARM64MOVBUload { 7398 break 7399 } 7400 i := x0.AuxInt 7401 s := x0.Aux 7402 p := x0.Args[0] 7403 mem := x0.Args[1] 7404 y1 := v.Args[1] 7405 if y1.Op != OpARM64MOVDnop { 7406 break 7407 } 7408 x1 := y1.Args[0] 7409 if x1.Op != OpARM64MOVBUload { 7410 break 7411 } 7412 if x1.AuxInt != i+1 { 7413 break 7414 } 7415 if x1.Aux != s { 7416 break 7417 } 7418 if p != x1.Args[0] { 7419 break 7420 } 7421 if mem != x1.Args[1] { 7422 break 7423 } 7424 if !(x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 7425 break 7426 } 7427 b = mergePoint(b, x0, x1) 7428 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, t) 7429 v.reset(OpCopy) 7430 v.AddArg(v0) 7431 v0.Aux = s 7432 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7433 v1.AuxInt = i 7434 v1.AddArg(p) 7435 v0.AddArg(v1) 7436 v0.AddArg(mem) 7437 return true 7438 } 7439 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUload [i] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i+2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i+3] {s} p mem))) 7440 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0) 7441 // result: @mergePoint(b,x0,x1,x2) (MOVWUload <t> {s} (OffPtr <p.Type> [i] p) mem) 7442 for { 7443 t := v.Type 7444 if v.AuxInt != 24 { 7445 break 7446 } 7447 o0 := v.Args[0] 7448 if o0.Op != OpARM64ORshiftLL { 7449 break 7450 } 7451 if o0.AuxInt != 16 { 7452 break 7453 } 7454 x0 := o0.Args[0] 7455 if x0.Op != OpARM64MOVHUload { 7456 break 7457 } 7458 i := x0.AuxInt 7459 s := x0.Aux 7460 p := x0.Args[0] 7461 mem := x0.Args[1] 7462 y1 := o0.Args[1] 7463 if y1.Op != OpARM64MOVDnop { 7464 break 7465 } 7466 x1 := y1.Args[0] 7467 if x1.Op != OpARM64MOVBUload { 7468 break 7469 } 7470 if x1.AuxInt != i+2 { 7471 break 7472 } 7473 if x1.Aux != s { 7474 break 7475 } 7476 if p != x1.Args[0] { 7477 break 7478 } 7479 if mem != x1.Args[1] { 7480 break 7481 } 7482 y2 := v.Args[1] 7483 if y2.Op != OpARM64MOVDnop { 7484 break 7485 } 7486 x2 := y2.Args[0] 7487 if x2.Op != OpARM64MOVBUload { 7488 break 7489 } 7490 if x2.AuxInt != i+3 { 7491 break 7492 } 7493 if x2.Aux != s { 7494 break 7495 } 7496 if p != x2.Args[0] { 7497 break 7498 } 7499 if mem != x2.Args[1] { 7500 break 7501 } 7502 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) { 7503 break 7504 } 7505 b = mergePoint(b, x0, x1, x2) 7506 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 7507 v.reset(OpCopy) 7508 v.AddArg(v0) 7509 v0.Aux = s 7510 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7511 v1.AuxInt = i 7512 v1.AddArg(p) 7513 v0.AddArg(v1) 7514 v0.AddArg(mem) 7515 return true 7516 } 7517 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUload [i] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i+4] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i+5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i+6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i+7] {s} p mem))) 7518 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 7519 // result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDload <t> {s} (OffPtr <p.Type> [i] p) mem) 7520 for { 7521 t := v.Type 7522 if v.AuxInt != 56 { 7523 break 7524 } 7525 o0 := v.Args[0] 7526 if o0.Op != OpARM64ORshiftLL { 7527 break 7528 } 7529 if o0.AuxInt != 48 { 7530 break 7531 } 7532 o1 := o0.Args[0] 7533 if o1.Op != OpARM64ORshiftLL { 7534 break 7535 } 7536 if o1.AuxInt != 40 { 7537 break 7538 } 7539 o2 := o1.Args[0] 7540 if o2.Op != OpARM64ORshiftLL { 7541 break 7542 } 7543 if o2.AuxInt != 32 { 7544 break 7545 } 7546 x0 := o2.Args[0] 7547 if x0.Op != OpARM64MOVWUload { 7548 break 7549 } 7550 i := x0.AuxInt 7551 s := x0.Aux 7552 p := x0.Args[0] 7553 mem := x0.Args[1] 7554 y1 := o2.Args[1] 7555 if y1.Op != OpARM64MOVDnop { 7556 break 7557 } 7558 x1 := y1.Args[0] 7559 if x1.Op != OpARM64MOVBUload { 7560 break 7561 } 7562 if x1.AuxInt != i+4 { 7563 break 7564 } 7565 if x1.Aux != s { 7566 break 7567 } 7568 if p != x1.Args[0] { 7569 break 7570 } 7571 if mem != x1.Args[1] { 7572 break 7573 } 7574 y2 := o1.Args[1] 7575 if y2.Op != OpARM64MOVDnop { 7576 break 7577 } 7578 x2 := y2.Args[0] 7579 if x2.Op != OpARM64MOVBUload { 7580 break 7581 } 7582 if x2.AuxInt != i+5 { 7583 break 7584 } 7585 if x2.Aux != s { 7586 break 7587 } 7588 if p != x2.Args[0] { 7589 break 7590 } 7591 if mem != x2.Args[1] { 7592 break 7593 } 7594 y3 := o0.Args[1] 7595 if y3.Op != OpARM64MOVDnop { 7596 break 7597 } 7598 x3 := y3.Args[0] 7599 if x3.Op != OpARM64MOVBUload { 7600 break 7601 } 7602 if x3.AuxInt != i+6 { 7603 break 7604 } 7605 if x3.Aux != s { 7606 break 7607 } 7608 if p != x3.Args[0] { 7609 break 7610 } 7611 if mem != x3.Args[1] { 7612 break 7613 } 7614 y4 := v.Args[1] 7615 if y4.Op != OpARM64MOVDnop { 7616 break 7617 } 7618 x4 := y4.Args[0] 7619 if x4.Op != OpARM64MOVBUload { 7620 break 7621 } 7622 if x4.AuxInt != i+7 { 7623 break 7624 } 7625 if x4.Aux != s { 7626 break 7627 } 7628 if p != x4.Args[0] { 7629 break 7630 } 7631 if mem != x4.Args[1] { 7632 break 7633 } 7634 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 7635 break 7636 } 7637 b = mergePoint(b, x0, x1, x2, x3, x4) 7638 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 7639 v.reset(OpCopy) 7640 v.AddArg(v0) 7641 v0.Aux = s 7642 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7643 v1.AuxInt = i 7644 v1.AddArg(p) 7645 v0.AddArg(v1) 7646 v0.AddArg(mem) 7647 return true 7648 } 7649 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i-1] {s} p mem))) 7650 // cond: ((i-1)%2 == 0 || i-1<256 && i-1>-256 && !isArg(s) && !isAuto(s)) && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 7651 // result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUload <t> [i-1] {s} p mem)) 7652 for { 7653 t := v.Type 7654 if v.AuxInt != 8 { 7655 break 7656 } 7657 y0 := v.Args[0] 7658 if y0.Op != OpARM64MOVDnop { 7659 break 7660 } 7661 x0 := y0.Args[0] 7662 if x0.Op != OpARM64MOVBUload { 7663 break 7664 } 7665 i := x0.AuxInt 7666 s := x0.Aux 7667 p := x0.Args[0] 7668 mem := x0.Args[1] 7669 y1 := v.Args[1] 7670 if y1.Op != OpARM64MOVDnop { 7671 break 7672 } 7673 x1 := y1.Args[0] 7674 if x1.Op != OpARM64MOVBUload { 7675 break 7676 } 7677 if x1.AuxInt != i-1 { 7678 break 7679 } 7680 if x1.Aux != s { 7681 break 7682 } 7683 if p != x1.Args[0] { 7684 break 7685 } 7686 if mem != x1.Args[1] { 7687 break 7688 } 7689 if !(((i-1)%2 == 0 || i-1 < 256 && i-1 > -256 && !isArg(s) && !isAuto(s)) && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 7690 break 7691 } 7692 b = mergePoint(b, x0, x1) 7693 v0 := b.NewValue0(v.Pos, OpARM64REV16W, t) 7694 v.reset(OpCopy) 7695 v.AddArg(v0) 7696 v1 := b.NewValue0(v.Pos, OpARM64MOVHUload, t) 7697 v1.AuxInt = i - 1 7698 v1.Aux = s 7699 v1.AddArg(p) 7700 v1.AddArg(mem) 7701 v0.AddArg(v1) 7702 return true 7703 } 7704 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUload [i] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i-1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i-2] {s} p mem))) 7705 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0) 7706 // result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i-2] p) mem)) 7707 for { 7708 t := v.Type 7709 if v.AuxInt != 24 { 7710 break 7711 } 7712 o0 := v.Args[0] 7713 if o0.Op != OpARM64ORshiftLL { 7714 break 7715 } 7716 if o0.AuxInt != 16 { 7717 break 7718 } 7719 y0 := o0.Args[0] 7720 if y0.Op != OpARM64REV16W { 7721 break 7722 } 7723 x0 := y0.Args[0] 7724 if x0.Op != OpARM64MOVHUload { 7725 break 7726 } 7727 i := x0.AuxInt 7728 s := x0.Aux 7729 p := x0.Args[0] 7730 mem := x0.Args[1] 7731 y1 := o0.Args[1] 7732 if y1.Op != OpARM64MOVDnop { 7733 break 7734 } 7735 x1 := y1.Args[0] 7736 if x1.Op != OpARM64MOVBUload { 7737 break 7738 } 7739 if x1.AuxInt != i-1 { 7740 break 7741 } 7742 if x1.Aux != s { 7743 break 7744 } 7745 if p != x1.Args[0] { 7746 break 7747 } 7748 if mem != x1.Args[1] { 7749 break 7750 } 7751 y2 := v.Args[1] 7752 if y2.Op != OpARM64MOVDnop { 7753 break 7754 } 7755 x2 := y2.Args[0] 7756 if x2.Op != OpARM64MOVBUload { 7757 break 7758 } 7759 if x2.AuxInt != i-2 { 7760 break 7761 } 7762 if x2.Aux != s { 7763 break 7764 } 7765 if p != x2.Args[0] { 7766 break 7767 } 7768 if mem != x2.Args[1] { 7769 break 7770 } 7771 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) { 7772 break 7773 } 7774 b = mergePoint(b, x0, x1, x2) 7775 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 7776 v.reset(OpCopy) 7777 v.AddArg(v0) 7778 v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 7779 v1.Aux = s 7780 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7781 v2.AuxInt = i - 2 7782 v2.AddArg(p) 7783 v1.AddArg(v2) 7784 v1.AddArg(mem) 7785 v0.AddArg(v1) 7786 return true 7787 } 7788 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUload [i] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i-1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i-2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i-3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i-4] {s} p mem))) 7789 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 7790 // result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i-4] p) mem)) 7791 for { 7792 t := v.Type 7793 if v.AuxInt != 56 { 7794 break 7795 } 7796 o0 := v.Args[0] 7797 if o0.Op != OpARM64ORshiftLL { 7798 break 7799 } 7800 if o0.AuxInt != 48 { 7801 break 7802 } 7803 o1 := o0.Args[0] 7804 if o1.Op != OpARM64ORshiftLL { 7805 break 7806 } 7807 if o1.AuxInt != 40 { 7808 break 7809 } 7810 o2 := o1.Args[0] 7811 if o2.Op != OpARM64ORshiftLL { 7812 break 7813 } 7814 if o2.AuxInt != 32 { 7815 break 7816 } 7817 y0 := o2.Args[0] 7818 if y0.Op != OpARM64REVW { 7819 break 7820 } 7821 x0 := y0.Args[0] 7822 if x0.Op != OpARM64MOVWUload { 7823 break 7824 } 7825 i := x0.AuxInt 7826 s := x0.Aux 7827 p := x0.Args[0] 7828 mem := x0.Args[1] 7829 y1 := o2.Args[1] 7830 if y1.Op != OpARM64MOVDnop { 7831 break 7832 } 7833 x1 := y1.Args[0] 7834 if x1.Op != OpARM64MOVBUload { 7835 break 7836 } 7837 if x1.AuxInt != i-1 { 7838 break 7839 } 7840 if x1.Aux != s { 7841 break 7842 } 7843 if p != x1.Args[0] { 7844 break 7845 } 7846 if mem != x1.Args[1] { 7847 break 7848 } 7849 y2 := o1.Args[1] 7850 if y2.Op != OpARM64MOVDnop { 7851 break 7852 } 7853 x2 := y2.Args[0] 7854 if x2.Op != OpARM64MOVBUload { 7855 break 7856 } 7857 if x2.AuxInt != i-2 { 7858 break 7859 } 7860 if x2.Aux != s { 7861 break 7862 } 7863 if p != x2.Args[0] { 7864 break 7865 } 7866 if mem != x2.Args[1] { 7867 break 7868 } 7869 y3 := o0.Args[1] 7870 if y3.Op != OpARM64MOVDnop { 7871 break 7872 } 7873 x3 := y3.Args[0] 7874 if x3.Op != OpARM64MOVBUload { 7875 break 7876 } 7877 if x3.AuxInt != i-3 { 7878 break 7879 } 7880 if x3.Aux != s { 7881 break 7882 } 7883 if p != x3.Args[0] { 7884 break 7885 } 7886 if mem != x3.Args[1] { 7887 break 7888 } 7889 y4 := v.Args[1] 7890 if y4.Op != OpARM64MOVDnop { 7891 break 7892 } 7893 x4 := y4.Args[0] 7894 if x4.Op != OpARM64MOVBUload { 7895 break 7896 } 7897 if x4.AuxInt != i-4 { 7898 break 7899 } 7900 if x4.Aux != s { 7901 break 7902 } 7903 if p != x4.Args[0] { 7904 break 7905 } 7906 if mem != x4.Args[1] { 7907 break 7908 } 7909 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 7910 break 7911 } 7912 b = mergePoint(b, x0, x1, x2, x3, x4) 7913 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 7914 v.reset(OpCopy) 7915 v.AddArg(v0) 7916 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 7917 v1.Aux = s 7918 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7919 v2.AuxInt = i - 4 7920 v2.AddArg(p) 7921 v1.AddArg(v2) 7922 v1.AddArg(mem) 7923 v0.AddArg(v1) 7924 return true 7925 } 7926 return false 7927 } 7928 func rewriteValueARM64_OpARM64ORshiftRA(v *Value, config *Config) bool { 7929 b := v.Block 7930 _ = b 7931 // match: (ORshiftRA (MOVDconst [c]) x [d]) 7932 // cond: 7933 // result: (ORconst [c] (SRAconst <x.Type> x [d])) 7934 for { 7935 d := v.AuxInt 7936 v_0 := v.Args[0] 7937 if v_0.Op != OpARM64MOVDconst { 7938 break 7939 } 7940 c := v_0.AuxInt 7941 x := v.Args[1] 7942 v.reset(OpARM64ORconst) 7943 v.AuxInt = c 7944 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 7945 v0.AuxInt = d 7946 v0.AddArg(x) 7947 v.AddArg(v0) 7948 return true 7949 } 7950 // match: (ORshiftRA x (MOVDconst [c]) [d]) 7951 // cond: 7952 // result: (ORconst x [int64(int64(c)>>uint64(d))]) 7953 for { 7954 d := v.AuxInt 7955 x := v.Args[0] 7956 v_1 := v.Args[1] 7957 if v_1.Op != OpARM64MOVDconst { 7958 break 7959 } 7960 c := v_1.AuxInt 7961 v.reset(OpARM64ORconst) 7962 v.AuxInt = int64(int64(c) >> uint64(d)) 7963 v.AddArg(x) 7964 return true 7965 } 7966 // match: (ORshiftRA x y:(SRAconst x [c]) [d]) 7967 // cond: c==d 7968 // result: y 7969 for { 7970 d := v.AuxInt 7971 x := v.Args[0] 7972 y := v.Args[1] 7973 if y.Op != OpARM64SRAconst { 7974 break 7975 } 7976 c := y.AuxInt 7977 if x != y.Args[0] { 7978 break 7979 } 7980 if !(c == d) { 7981 break 7982 } 7983 v.reset(OpCopy) 7984 v.Type = y.Type 7985 v.AddArg(y) 7986 return true 7987 } 7988 return false 7989 } 7990 func rewriteValueARM64_OpARM64ORshiftRL(v *Value, config *Config) bool { 7991 b := v.Block 7992 _ = b 7993 // match: (ORshiftRL (MOVDconst [c]) x [d]) 7994 // cond: 7995 // result: (ORconst [c] (SRLconst <x.Type> x [d])) 7996 for { 7997 d := v.AuxInt 7998 v_0 := v.Args[0] 7999 if v_0.Op != OpARM64MOVDconst { 8000 break 8001 } 8002 c := v_0.AuxInt 8003 x := v.Args[1] 8004 v.reset(OpARM64ORconst) 8005 v.AuxInt = c 8006 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 8007 v0.AuxInt = d 8008 v0.AddArg(x) 8009 v.AddArg(v0) 8010 return true 8011 } 8012 // match: (ORshiftRL x (MOVDconst [c]) [d]) 8013 // cond: 8014 // result: (ORconst x [int64(uint64(c)>>uint64(d))]) 8015 for { 8016 d := v.AuxInt 8017 x := v.Args[0] 8018 v_1 := v.Args[1] 8019 if v_1.Op != OpARM64MOVDconst { 8020 break 8021 } 8022 c := v_1.AuxInt 8023 v.reset(OpARM64ORconst) 8024 v.AuxInt = int64(uint64(c) >> uint64(d)) 8025 v.AddArg(x) 8026 return true 8027 } 8028 // match: (ORshiftRL x y:(SRLconst x [c]) [d]) 8029 // cond: c==d 8030 // result: y 8031 for { 8032 d := v.AuxInt 8033 x := v.Args[0] 8034 y := v.Args[1] 8035 if y.Op != OpARM64SRLconst { 8036 break 8037 } 8038 c := y.AuxInt 8039 if x != y.Args[0] { 8040 break 8041 } 8042 if !(c == d) { 8043 break 8044 } 8045 v.reset(OpCopy) 8046 v.Type = y.Type 8047 v.AddArg(y) 8048 return true 8049 } 8050 // match: ( ORshiftRL [c] (SLLconst x [64-c]) x) 8051 // cond: 8052 // result: (RORconst [ c] x) 8053 for { 8054 c := v.AuxInt 8055 v_0 := v.Args[0] 8056 if v_0.Op != OpARM64SLLconst { 8057 break 8058 } 8059 if v_0.AuxInt != 64-c { 8060 break 8061 } 8062 x := v_0.Args[0] 8063 if x != v.Args[1] { 8064 break 8065 } 8066 v.reset(OpARM64RORconst) 8067 v.AuxInt = c 8068 v.AddArg(x) 8069 return true 8070 } 8071 // match: ( ORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 8072 // cond: c < 32 && t.Size() == 4 8073 // result: (RORWconst [ c] x) 8074 for { 8075 t := v.Type 8076 c := v.AuxInt 8077 v_0 := v.Args[0] 8078 if v_0.Op != OpARM64SLLconst { 8079 break 8080 } 8081 if v_0.AuxInt != 32-c { 8082 break 8083 } 8084 x := v_0.Args[0] 8085 v_1 := v.Args[1] 8086 if v_1.Op != OpARM64MOVWUreg { 8087 break 8088 } 8089 if x != v_1.Args[0] { 8090 break 8091 } 8092 if !(c < 32 && t.Size() == 4) { 8093 break 8094 } 8095 v.reset(OpARM64RORWconst) 8096 v.AuxInt = c 8097 v.AddArg(x) 8098 return true 8099 } 8100 return false 8101 } 8102 func rewriteValueARM64_OpARM64SLL(v *Value, config *Config) bool { 8103 b := v.Block 8104 _ = b 8105 // match: (SLL x (MOVDconst [c])) 8106 // cond: 8107 // result: (SLLconst x [c&63]) 8108 for { 8109 x := v.Args[0] 8110 v_1 := v.Args[1] 8111 if v_1.Op != OpARM64MOVDconst { 8112 break 8113 } 8114 c := v_1.AuxInt 8115 v.reset(OpARM64SLLconst) 8116 v.AuxInt = c & 63 8117 v.AddArg(x) 8118 return true 8119 } 8120 return false 8121 } 8122 func rewriteValueARM64_OpARM64SLLconst(v *Value, config *Config) bool { 8123 b := v.Block 8124 _ = b 8125 // match: (SLLconst [c] (MOVDconst [d])) 8126 // cond: 8127 // result: (MOVDconst [int64(d)<<uint64(c)]) 8128 for { 8129 c := v.AuxInt 8130 v_0 := v.Args[0] 8131 if v_0.Op != OpARM64MOVDconst { 8132 break 8133 } 8134 d := v_0.AuxInt 8135 v.reset(OpARM64MOVDconst) 8136 v.AuxInt = int64(d) << uint64(c) 8137 return true 8138 } 8139 return false 8140 } 8141 func rewriteValueARM64_OpARM64SRA(v *Value, config *Config) bool { 8142 b := v.Block 8143 _ = b 8144 // match: (SRA x (MOVDconst [c])) 8145 // cond: 8146 // result: (SRAconst x [c&63]) 8147 for { 8148 x := v.Args[0] 8149 v_1 := v.Args[1] 8150 if v_1.Op != OpARM64MOVDconst { 8151 break 8152 } 8153 c := v_1.AuxInt 8154 v.reset(OpARM64SRAconst) 8155 v.AuxInt = c & 63 8156 v.AddArg(x) 8157 return true 8158 } 8159 return false 8160 } 8161 func rewriteValueARM64_OpARM64SRAconst(v *Value, config *Config) bool { 8162 b := v.Block 8163 _ = b 8164 // match: (SRAconst [c] (MOVDconst [d])) 8165 // cond: 8166 // result: (MOVDconst [int64(d)>>uint64(c)]) 8167 for { 8168 c := v.AuxInt 8169 v_0 := v.Args[0] 8170 if v_0.Op != OpARM64MOVDconst { 8171 break 8172 } 8173 d := v_0.AuxInt 8174 v.reset(OpARM64MOVDconst) 8175 v.AuxInt = int64(d) >> uint64(c) 8176 return true 8177 } 8178 return false 8179 } 8180 func rewriteValueARM64_OpARM64SRL(v *Value, config *Config) bool { 8181 b := v.Block 8182 _ = b 8183 // match: (SRL x (MOVDconst [c])) 8184 // cond: 8185 // result: (SRLconst x [c&63]) 8186 for { 8187 x := v.Args[0] 8188 v_1 := v.Args[1] 8189 if v_1.Op != OpARM64MOVDconst { 8190 break 8191 } 8192 c := v_1.AuxInt 8193 v.reset(OpARM64SRLconst) 8194 v.AuxInt = c & 63 8195 v.AddArg(x) 8196 return true 8197 } 8198 return false 8199 } 8200 func rewriteValueARM64_OpARM64SRLconst(v *Value, config *Config) bool { 8201 b := v.Block 8202 _ = b 8203 // match: (SRLconst [c] (MOVDconst [d])) 8204 // cond: 8205 // result: (MOVDconst [int64(uint64(d)>>uint64(c))]) 8206 for { 8207 c := v.AuxInt 8208 v_0 := v.Args[0] 8209 if v_0.Op != OpARM64MOVDconst { 8210 break 8211 } 8212 d := v_0.AuxInt 8213 v.reset(OpARM64MOVDconst) 8214 v.AuxInt = int64(uint64(d) >> uint64(c)) 8215 return true 8216 } 8217 return false 8218 } 8219 func rewriteValueARM64_OpARM64SUB(v *Value, config *Config) bool { 8220 b := v.Block 8221 _ = b 8222 // match: (SUB x (MOVDconst [c])) 8223 // cond: 8224 // result: (SUBconst [c] x) 8225 for { 8226 x := v.Args[0] 8227 v_1 := v.Args[1] 8228 if v_1.Op != OpARM64MOVDconst { 8229 break 8230 } 8231 c := v_1.AuxInt 8232 v.reset(OpARM64SUBconst) 8233 v.AuxInt = c 8234 v.AddArg(x) 8235 return true 8236 } 8237 // match: (SUB x x) 8238 // cond: 8239 // result: (MOVDconst [0]) 8240 for { 8241 x := v.Args[0] 8242 if x != v.Args[1] { 8243 break 8244 } 8245 v.reset(OpARM64MOVDconst) 8246 v.AuxInt = 0 8247 return true 8248 } 8249 // match: (SUB x (SLLconst [c] y)) 8250 // cond: 8251 // result: (SUBshiftLL x y [c]) 8252 for { 8253 x := v.Args[0] 8254 v_1 := v.Args[1] 8255 if v_1.Op != OpARM64SLLconst { 8256 break 8257 } 8258 c := v_1.AuxInt 8259 y := v_1.Args[0] 8260 v.reset(OpARM64SUBshiftLL) 8261 v.AuxInt = c 8262 v.AddArg(x) 8263 v.AddArg(y) 8264 return true 8265 } 8266 // match: (SUB x (SRLconst [c] y)) 8267 // cond: 8268 // result: (SUBshiftRL x y [c]) 8269 for { 8270 x := v.Args[0] 8271 v_1 := v.Args[1] 8272 if v_1.Op != OpARM64SRLconst { 8273 break 8274 } 8275 c := v_1.AuxInt 8276 y := v_1.Args[0] 8277 v.reset(OpARM64SUBshiftRL) 8278 v.AuxInt = c 8279 v.AddArg(x) 8280 v.AddArg(y) 8281 return true 8282 } 8283 // match: (SUB x (SRAconst [c] y)) 8284 // cond: 8285 // result: (SUBshiftRA x y [c]) 8286 for { 8287 x := v.Args[0] 8288 v_1 := v.Args[1] 8289 if v_1.Op != OpARM64SRAconst { 8290 break 8291 } 8292 c := v_1.AuxInt 8293 y := v_1.Args[0] 8294 v.reset(OpARM64SUBshiftRA) 8295 v.AuxInt = c 8296 v.AddArg(x) 8297 v.AddArg(y) 8298 return true 8299 } 8300 return false 8301 } 8302 func rewriteValueARM64_OpARM64SUBconst(v *Value, config *Config) bool { 8303 b := v.Block 8304 _ = b 8305 // match: (SUBconst [0] x) 8306 // cond: 8307 // result: x 8308 for { 8309 if v.AuxInt != 0 { 8310 break 8311 } 8312 x := v.Args[0] 8313 v.reset(OpCopy) 8314 v.Type = x.Type 8315 v.AddArg(x) 8316 return true 8317 } 8318 // match: (SUBconst [c] (MOVDconst [d])) 8319 // cond: 8320 // result: (MOVDconst [d-c]) 8321 for { 8322 c := v.AuxInt 8323 v_0 := v.Args[0] 8324 if v_0.Op != OpARM64MOVDconst { 8325 break 8326 } 8327 d := v_0.AuxInt 8328 v.reset(OpARM64MOVDconst) 8329 v.AuxInt = d - c 8330 return true 8331 } 8332 // match: (SUBconst [c] (SUBconst [d] x)) 8333 // cond: 8334 // result: (ADDconst [-c-d] x) 8335 for { 8336 c := v.AuxInt 8337 v_0 := v.Args[0] 8338 if v_0.Op != OpARM64SUBconst { 8339 break 8340 } 8341 d := v_0.AuxInt 8342 x := v_0.Args[0] 8343 v.reset(OpARM64ADDconst) 8344 v.AuxInt = -c - d 8345 v.AddArg(x) 8346 return true 8347 } 8348 // match: (SUBconst [c] (ADDconst [d] x)) 8349 // cond: 8350 // result: (ADDconst [-c+d] x) 8351 for { 8352 c := v.AuxInt 8353 v_0 := v.Args[0] 8354 if v_0.Op != OpARM64ADDconst { 8355 break 8356 } 8357 d := v_0.AuxInt 8358 x := v_0.Args[0] 8359 v.reset(OpARM64ADDconst) 8360 v.AuxInt = -c + d 8361 v.AddArg(x) 8362 return true 8363 } 8364 return false 8365 } 8366 func rewriteValueARM64_OpARM64SUBshiftLL(v *Value, config *Config) bool { 8367 b := v.Block 8368 _ = b 8369 // match: (SUBshiftLL x (MOVDconst [c]) [d]) 8370 // cond: 8371 // result: (SUBconst x [int64(uint64(c)<<uint64(d))]) 8372 for { 8373 d := v.AuxInt 8374 x := v.Args[0] 8375 v_1 := v.Args[1] 8376 if v_1.Op != OpARM64MOVDconst { 8377 break 8378 } 8379 c := v_1.AuxInt 8380 v.reset(OpARM64SUBconst) 8381 v.AuxInt = int64(uint64(c) << uint64(d)) 8382 v.AddArg(x) 8383 return true 8384 } 8385 // match: (SUBshiftLL x (SLLconst x [c]) [d]) 8386 // cond: c==d 8387 // result: (MOVDconst [0]) 8388 for { 8389 d := v.AuxInt 8390 x := v.Args[0] 8391 v_1 := v.Args[1] 8392 if v_1.Op != OpARM64SLLconst { 8393 break 8394 } 8395 c := v_1.AuxInt 8396 if x != v_1.Args[0] { 8397 break 8398 } 8399 if !(c == d) { 8400 break 8401 } 8402 v.reset(OpARM64MOVDconst) 8403 v.AuxInt = 0 8404 return true 8405 } 8406 return false 8407 } 8408 func rewriteValueARM64_OpARM64SUBshiftRA(v *Value, config *Config) bool { 8409 b := v.Block 8410 _ = b 8411 // match: (SUBshiftRA x (MOVDconst [c]) [d]) 8412 // cond: 8413 // result: (SUBconst x [int64(int64(c)>>uint64(d))]) 8414 for { 8415 d := v.AuxInt 8416 x := v.Args[0] 8417 v_1 := v.Args[1] 8418 if v_1.Op != OpARM64MOVDconst { 8419 break 8420 } 8421 c := v_1.AuxInt 8422 v.reset(OpARM64SUBconst) 8423 v.AuxInt = int64(int64(c) >> uint64(d)) 8424 v.AddArg(x) 8425 return true 8426 } 8427 // match: (SUBshiftRA x (SRAconst x [c]) [d]) 8428 // cond: c==d 8429 // result: (MOVDconst [0]) 8430 for { 8431 d := v.AuxInt 8432 x := v.Args[0] 8433 v_1 := v.Args[1] 8434 if v_1.Op != OpARM64SRAconst { 8435 break 8436 } 8437 c := v_1.AuxInt 8438 if x != v_1.Args[0] { 8439 break 8440 } 8441 if !(c == d) { 8442 break 8443 } 8444 v.reset(OpARM64MOVDconst) 8445 v.AuxInt = 0 8446 return true 8447 } 8448 return false 8449 } 8450 func rewriteValueARM64_OpARM64SUBshiftRL(v *Value, config *Config) bool { 8451 b := v.Block 8452 _ = b 8453 // match: (SUBshiftRL x (MOVDconst [c]) [d]) 8454 // cond: 8455 // result: (SUBconst x [int64(uint64(c)>>uint64(d))]) 8456 for { 8457 d := v.AuxInt 8458 x := v.Args[0] 8459 v_1 := v.Args[1] 8460 if v_1.Op != OpARM64MOVDconst { 8461 break 8462 } 8463 c := v_1.AuxInt 8464 v.reset(OpARM64SUBconst) 8465 v.AuxInt = int64(uint64(c) >> uint64(d)) 8466 v.AddArg(x) 8467 return true 8468 } 8469 // match: (SUBshiftRL x (SRLconst x [c]) [d]) 8470 // cond: c==d 8471 // result: (MOVDconst [0]) 8472 for { 8473 d := v.AuxInt 8474 x := v.Args[0] 8475 v_1 := v.Args[1] 8476 if v_1.Op != OpARM64SRLconst { 8477 break 8478 } 8479 c := v_1.AuxInt 8480 if x != v_1.Args[0] { 8481 break 8482 } 8483 if !(c == d) { 8484 break 8485 } 8486 v.reset(OpARM64MOVDconst) 8487 v.AuxInt = 0 8488 return true 8489 } 8490 return false 8491 } 8492 func rewriteValueARM64_OpARM64UDIV(v *Value, config *Config) bool { 8493 b := v.Block 8494 _ = b 8495 // match: (UDIV x (MOVDconst [1])) 8496 // cond: 8497 // result: x 8498 for { 8499 x := v.Args[0] 8500 v_1 := v.Args[1] 8501 if v_1.Op != OpARM64MOVDconst { 8502 break 8503 } 8504 if v_1.AuxInt != 1 { 8505 break 8506 } 8507 v.reset(OpCopy) 8508 v.Type = x.Type 8509 v.AddArg(x) 8510 return true 8511 } 8512 // match: (UDIV x (MOVDconst [c])) 8513 // cond: isPowerOfTwo(c) 8514 // result: (SRLconst [log2(c)] x) 8515 for { 8516 x := v.Args[0] 8517 v_1 := v.Args[1] 8518 if v_1.Op != OpARM64MOVDconst { 8519 break 8520 } 8521 c := v_1.AuxInt 8522 if !(isPowerOfTwo(c)) { 8523 break 8524 } 8525 v.reset(OpARM64SRLconst) 8526 v.AuxInt = log2(c) 8527 v.AddArg(x) 8528 return true 8529 } 8530 // match: (UDIV (MOVDconst [c]) (MOVDconst [d])) 8531 // cond: 8532 // result: (MOVDconst [int64(uint64(c)/uint64(d))]) 8533 for { 8534 v_0 := v.Args[0] 8535 if v_0.Op != OpARM64MOVDconst { 8536 break 8537 } 8538 c := v_0.AuxInt 8539 v_1 := v.Args[1] 8540 if v_1.Op != OpARM64MOVDconst { 8541 break 8542 } 8543 d := v_1.AuxInt 8544 v.reset(OpARM64MOVDconst) 8545 v.AuxInt = int64(uint64(c) / uint64(d)) 8546 return true 8547 } 8548 return false 8549 } 8550 func rewriteValueARM64_OpARM64UDIVW(v *Value, config *Config) bool { 8551 b := v.Block 8552 _ = b 8553 // match: (UDIVW x (MOVDconst [c])) 8554 // cond: uint32(c)==1 8555 // result: x 8556 for { 8557 x := v.Args[0] 8558 v_1 := v.Args[1] 8559 if v_1.Op != OpARM64MOVDconst { 8560 break 8561 } 8562 c := v_1.AuxInt 8563 if !(uint32(c) == 1) { 8564 break 8565 } 8566 v.reset(OpCopy) 8567 v.Type = x.Type 8568 v.AddArg(x) 8569 return true 8570 } 8571 // match: (UDIVW x (MOVDconst [c])) 8572 // cond: isPowerOfTwo(c) && is32Bit(c) 8573 // result: (SRLconst [log2(c)] x) 8574 for { 8575 x := v.Args[0] 8576 v_1 := v.Args[1] 8577 if v_1.Op != OpARM64MOVDconst { 8578 break 8579 } 8580 c := v_1.AuxInt 8581 if !(isPowerOfTwo(c) && is32Bit(c)) { 8582 break 8583 } 8584 v.reset(OpARM64SRLconst) 8585 v.AuxInt = log2(c) 8586 v.AddArg(x) 8587 return true 8588 } 8589 // match: (UDIVW (MOVDconst [c]) (MOVDconst [d])) 8590 // cond: 8591 // result: (MOVDconst [int64(uint32(c)/uint32(d))]) 8592 for { 8593 v_0 := v.Args[0] 8594 if v_0.Op != OpARM64MOVDconst { 8595 break 8596 } 8597 c := v_0.AuxInt 8598 v_1 := v.Args[1] 8599 if v_1.Op != OpARM64MOVDconst { 8600 break 8601 } 8602 d := v_1.AuxInt 8603 v.reset(OpARM64MOVDconst) 8604 v.AuxInt = int64(uint32(c) / uint32(d)) 8605 return true 8606 } 8607 return false 8608 } 8609 func rewriteValueARM64_OpARM64UMOD(v *Value, config *Config) bool { 8610 b := v.Block 8611 _ = b 8612 // match: (UMOD _ (MOVDconst [1])) 8613 // cond: 8614 // result: (MOVDconst [0]) 8615 for { 8616 v_1 := v.Args[1] 8617 if v_1.Op != OpARM64MOVDconst { 8618 break 8619 } 8620 if v_1.AuxInt != 1 { 8621 break 8622 } 8623 v.reset(OpARM64MOVDconst) 8624 v.AuxInt = 0 8625 return true 8626 } 8627 // match: (UMOD x (MOVDconst [c])) 8628 // cond: isPowerOfTwo(c) 8629 // result: (ANDconst [c-1] x) 8630 for { 8631 x := v.Args[0] 8632 v_1 := v.Args[1] 8633 if v_1.Op != OpARM64MOVDconst { 8634 break 8635 } 8636 c := v_1.AuxInt 8637 if !(isPowerOfTwo(c)) { 8638 break 8639 } 8640 v.reset(OpARM64ANDconst) 8641 v.AuxInt = c - 1 8642 v.AddArg(x) 8643 return true 8644 } 8645 // match: (UMOD (MOVDconst [c]) (MOVDconst [d])) 8646 // cond: 8647 // result: (MOVDconst [int64(uint64(c)%uint64(d))]) 8648 for { 8649 v_0 := v.Args[0] 8650 if v_0.Op != OpARM64MOVDconst { 8651 break 8652 } 8653 c := v_0.AuxInt 8654 v_1 := v.Args[1] 8655 if v_1.Op != OpARM64MOVDconst { 8656 break 8657 } 8658 d := v_1.AuxInt 8659 v.reset(OpARM64MOVDconst) 8660 v.AuxInt = int64(uint64(c) % uint64(d)) 8661 return true 8662 } 8663 return false 8664 } 8665 func rewriteValueARM64_OpARM64UMODW(v *Value, config *Config) bool { 8666 b := v.Block 8667 _ = b 8668 // match: (UMODW _ (MOVDconst [c])) 8669 // cond: uint32(c)==1 8670 // result: (MOVDconst [0]) 8671 for { 8672 v_1 := v.Args[1] 8673 if v_1.Op != OpARM64MOVDconst { 8674 break 8675 } 8676 c := v_1.AuxInt 8677 if !(uint32(c) == 1) { 8678 break 8679 } 8680 v.reset(OpARM64MOVDconst) 8681 v.AuxInt = 0 8682 return true 8683 } 8684 // match: (UMODW x (MOVDconst [c])) 8685 // cond: isPowerOfTwo(c) && is32Bit(c) 8686 // result: (ANDconst [c-1] x) 8687 for { 8688 x := v.Args[0] 8689 v_1 := v.Args[1] 8690 if v_1.Op != OpARM64MOVDconst { 8691 break 8692 } 8693 c := v_1.AuxInt 8694 if !(isPowerOfTwo(c) && is32Bit(c)) { 8695 break 8696 } 8697 v.reset(OpARM64ANDconst) 8698 v.AuxInt = c - 1 8699 v.AddArg(x) 8700 return true 8701 } 8702 // match: (UMODW (MOVDconst [c]) (MOVDconst [d])) 8703 // cond: 8704 // result: (MOVDconst [int64(uint32(c)%uint32(d))]) 8705 for { 8706 v_0 := v.Args[0] 8707 if v_0.Op != OpARM64MOVDconst { 8708 break 8709 } 8710 c := v_0.AuxInt 8711 v_1 := v.Args[1] 8712 if v_1.Op != OpARM64MOVDconst { 8713 break 8714 } 8715 d := v_1.AuxInt 8716 v.reset(OpARM64MOVDconst) 8717 v.AuxInt = int64(uint32(c) % uint32(d)) 8718 return true 8719 } 8720 return false 8721 } 8722 func rewriteValueARM64_OpARM64XOR(v *Value, config *Config) bool { 8723 b := v.Block 8724 _ = b 8725 // match: (XOR (MOVDconst [c]) x) 8726 // cond: 8727 // result: (XORconst [c] x) 8728 for { 8729 v_0 := v.Args[0] 8730 if v_0.Op != OpARM64MOVDconst { 8731 break 8732 } 8733 c := v_0.AuxInt 8734 x := v.Args[1] 8735 v.reset(OpARM64XORconst) 8736 v.AuxInt = c 8737 v.AddArg(x) 8738 return true 8739 } 8740 // match: (XOR x (MOVDconst [c])) 8741 // cond: 8742 // result: (XORconst [c] x) 8743 for { 8744 x := v.Args[0] 8745 v_1 := v.Args[1] 8746 if v_1.Op != OpARM64MOVDconst { 8747 break 8748 } 8749 c := v_1.AuxInt 8750 v.reset(OpARM64XORconst) 8751 v.AuxInt = c 8752 v.AddArg(x) 8753 return true 8754 } 8755 // match: (XOR x x) 8756 // cond: 8757 // result: (MOVDconst [0]) 8758 for { 8759 x := v.Args[0] 8760 if x != v.Args[1] { 8761 break 8762 } 8763 v.reset(OpARM64MOVDconst) 8764 v.AuxInt = 0 8765 return true 8766 } 8767 // match: (XOR x (SLLconst [c] y)) 8768 // cond: 8769 // result: (XORshiftLL x y [c]) 8770 for { 8771 x := v.Args[0] 8772 v_1 := v.Args[1] 8773 if v_1.Op != OpARM64SLLconst { 8774 break 8775 } 8776 c := v_1.AuxInt 8777 y := v_1.Args[0] 8778 v.reset(OpARM64XORshiftLL) 8779 v.AuxInt = c 8780 v.AddArg(x) 8781 v.AddArg(y) 8782 return true 8783 } 8784 // match: (XOR (SLLconst [c] y) x) 8785 // cond: 8786 // result: (XORshiftLL x y [c]) 8787 for { 8788 v_0 := v.Args[0] 8789 if v_0.Op != OpARM64SLLconst { 8790 break 8791 } 8792 c := v_0.AuxInt 8793 y := v_0.Args[0] 8794 x := v.Args[1] 8795 v.reset(OpARM64XORshiftLL) 8796 v.AuxInt = c 8797 v.AddArg(x) 8798 v.AddArg(y) 8799 return true 8800 } 8801 // match: (XOR x (SRLconst [c] y)) 8802 // cond: 8803 // result: (XORshiftRL x y [c]) 8804 for { 8805 x := v.Args[0] 8806 v_1 := v.Args[1] 8807 if v_1.Op != OpARM64SRLconst { 8808 break 8809 } 8810 c := v_1.AuxInt 8811 y := v_1.Args[0] 8812 v.reset(OpARM64XORshiftRL) 8813 v.AuxInt = c 8814 v.AddArg(x) 8815 v.AddArg(y) 8816 return true 8817 } 8818 // match: (XOR (SRLconst [c] y) x) 8819 // cond: 8820 // result: (XORshiftRL x y [c]) 8821 for { 8822 v_0 := v.Args[0] 8823 if v_0.Op != OpARM64SRLconst { 8824 break 8825 } 8826 c := v_0.AuxInt 8827 y := v_0.Args[0] 8828 x := v.Args[1] 8829 v.reset(OpARM64XORshiftRL) 8830 v.AuxInt = c 8831 v.AddArg(x) 8832 v.AddArg(y) 8833 return true 8834 } 8835 // match: (XOR x (SRAconst [c] y)) 8836 // cond: 8837 // result: (XORshiftRA x y [c]) 8838 for { 8839 x := v.Args[0] 8840 v_1 := v.Args[1] 8841 if v_1.Op != OpARM64SRAconst { 8842 break 8843 } 8844 c := v_1.AuxInt 8845 y := v_1.Args[0] 8846 v.reset(OpARM64XORshiftRA) 8847 v.AuxInt = c 8848 v.AddArg(x) 8849 v.AddArg(y) 8850 return true 8851 } 8852 // match: (XOR (SRAconst [c] y) x) 8853 // cond: 8854 // result: (XORshiftRA x y [c]) 8855 for { 8856 v_0 := v.Args[0] 8857 if v_0.Op != OpARM64SRAconst { 8858 break 8859 } 8860 c := v_0.AuxInt 8861 y := v_0.Args[0] 8862 x := v.Args[1] 8863 v.reset(OpARM64XORshiftRA) 8864 v.AuxInt = c 8865 v.AddArg(x) 8866 v.AddArg(y) 8867 return true 8868 } 8869 return false 8870 } 8871 func rewriteValueARM64_OpARM64XORconst(v *Value, config *Config) bool { 8872 b := v.Block 8873 _ = b 8874 // match: (XORconst [0] x) 8875 // cond: 8876 // result: x 8877 for { 8878 if v.AuxInt != 0 { 8879 break 8880 } 8881 x := v.Args[0] 8882 v.reset(OpCopy) 8883 v.Type = x.Type 8884 v.AddArg(x) 8885 return true 8886 } 8887 // match: (XORconst [-1] x) 8888 // cond: 8889 // result: (MVN x) 8890 for { 8891 if v.AuxInt != -1 { 8892 break 8893 } 8894 x := v.Args[0] 8895 v.reset(OpARM64MVN) 8896 v.AddArg(x) 8897 return true 8898 } 8899 // match: (XORconst [c] (MOVDconst [d])) 8900 // cond: 8901 // result: (MOVDconst [c^d]) 8902 for { 8903 c := v.AuxInt 8904 v_0 := v.Args[0] 8905 if v_0.Op != OpARM64MOVDconst { 8906 break 8907 } 8908 d := v_0.AuxInt 8909 v.reset(OpARM64MOVDconst) 8910 v.AuxInt = c ^ d 8911 return true 8912 } 8913 // match: (XORconst [c] (XORconst [d] x)) 8914 // cond: 8915 // result: (XORconst [c^d] x) 8916 for { 8917 c := v.AuxInt 8918 v_0 := v.Args[0] 8919 if v_0.Op != OpARM64XORconst { 8920 break 8921 } 8922 d := v_0.AuxInt 8923 x := v_0.Args[0] 8924 v.reset(OpARM64XORconst) 8925 v.AuxInt = c ^ d 8926 v.AddArg(x) 8927 return true 8928 } 8929 return false 8930 } 8931 func rewriteValueARM64_OpARM64XORshiftLL(v *Value, config *Config) bool { 8932 b := v.Block 8933 _ = b 8934 // match: (XORshiftLL (MOVDconst [c]) x [d]) 8935 // cond: 8936 // result: (XORconst [c] (SLLconst <x.Type> x [d])) 8937 for { 8938 d := v.AuxInt 8939 v_0 := v.Args[0] 8940 if v_0.Op != OpARM64MOVDconst { 8941 break 8942 } 8943 c := v_0.AuxInt 8944 x := v.Args[1] 8945 v.reset(OpARM64XORconst) 8946 v.AuxInt = c 8947 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8948 v0.AuxInt = d 8949 v0.AddArg(x) 8950 v.AddArg(v0) 8951 return true 8952 } 8953 // match: (XORshiftLL x (MOVDconst [c]) [d]) 8954 // cond: 8955 // result: (XORconst x [int64(uint64(c)<<uint64(d))]) 8956 for { 8957 d := v.AuxInt 8958 x := v.Args[0] 8959 v_1 := v.Args[1] 8960 if v_1.Op != OpARM64MOVDconst { 8961 break 8962 } 8963 c := v_1.AuxInt 8964 v.reset(OpARM64XORconst) 8965 v.AuxInt = int64(uint64(c) << uint64(d)) 8966 v.AddArg(x) 8967 return true 8968 } 8969 // match: (XORshiftLL x (SLLconst x [c]) [d]) 8970 // cond: c==d 8971 // result: (MOVDconst [0]) 8972 for { 8973 d := v.AuxInt 8974 x := v.Args[0] 8975 v_1 := v.Args[1] 8976 if v_1.Op != OpARM64SLLconst { 8977 break 8978 } 8979 c := v_1.AuxInt 8980 if x != v_1.Args[0] { 8981 break 8982 } 8983 if !(c == d) { 8984 break 8985 } 8986 v.reset(OpARM64MOVDconst) 8987 v.AuxInt = 0 8988 return true 8989 } 8990 // match: (XORshiftLL [c] (SRLconst x [64-c]) x) 8991 // cond: 8992 // result: (RORconst [64-c] x) 8993 for { 8994 c := v.AuxInt 8995 v_0 := v.Args[0] 8996 if v_0.Op != OpARM64SRLconst { 8997 break 8998 } 8999 if v_0.AuxInt != 64-c { 9000 break 9001 } 9002 x := v_0.Args[0] 9003 if x != v.Args[1] { 9004 break 9005 } 9006 v.reset(OpARM64RORconst) 9007 v.AuxInt = 64 - c 9008 v.AddArg(x) 9009 return true 9010 } 9011 // match: (XORshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x) 9012 // cond: c < 32 && t.Size() == 4 9013 // result: (RORWconst [32-c] x) 9014 for { 9015 t := v.Type 9016 c := v.AuxInt 9017 v_0 := v.Args[0] 9018 if v_0.Op != OpARM64SRLconst { 9019 break 9020 } 9021 if v_0.AuxInt != 32-c { 9022 break 9023 } 9024 v_0_0 := v_0.Args[0] 9025 if v_0_0.Op != OpARM64MOVWUreg { 9026 break 9027 } 9028 x := v_0_0.Args[0] 9029 if x != v.Args[1] { 9030 break 9031 } 9032 if !(c < 32 && t.Size() == 4) { 9033 break 9034 } 9035 v.reset(OpARM64RORWconst) 9036 v.AuxInt = 32 - c 9037 v.AddArg(x) 9038 return true 9039 } 9040 return false 9041 } 9042 func rewriteValueARM64_OpARM64XORshiftRA(v *Value, config *Config) bool { 9043 b := v.Block 9044 _ = b 9045 // match: (XORshiftRA (MOVDconst [c]) x [d]) 9046 // cond: 9047 // result: (XORconst [c] (SRAconst <x.Type> x [d])) 9048 for { 9049 d := v.AuxInt 9050 v_0 := v.Args[0] 9051 if v_0.Op != OpARM64MOVDconst { 9052 break 9053 } 9054 c := v_0.AuxInt 9055 x := v.Args[1] 9056 v.reset(OpARM64XORconst) 9057 v.AuxInt = c 9058 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 9059 v0.AuxInt = d 9060 v0.AddArg(x) 9061 v.AddArg(v0) 9062 return true 9063 } 9064 // match: (XORshiftRA x (MOVDconst [c]) [d]) 9065 // cond: 9066 // result: (XORconst x [int64(int64(c)>>uint64(d))]) 9067 for { 9068 d := v.AuxInt 9069 x := v.Args[0] 9070 v_1 := v.Args[1] 9071 if v_1.Op != OpARM64MOVDconst { 9072 break 9073 } 9074 c := v_1.AuxInt 9075 v.reset(OpARM64XORconst) 9076 v.AuxInt = int64(int64(c) >> uint64(d)) 9077 v.AddArg(x) 9078 return true 9079 } 9080 // match: (XORshiftRA x (SRAconst x [c]) [d]) 9081 // cond: c==d 9082 // result: (MOVDconst [0]) 9083 for { 9084 d := v.AuxInt 9085 x := v.Args[0] 9086 v_1 := v.Args[1] 9087 if v_1.Op != OpARM64SRAconst { 9088 break 9089 } 9090 c := v_1.AuxInt 9091 if x != v_1.Args[0] { 9092 break 9093 } 9094 if !(c == d) { 9095 break 9096 } 9097 v.reset(OpARM64MOVDconst) 9098 v.AuxInt = 0 9099 return true 9100 } 9101 return false 9102 } 9103 func rewriteValueARM64_OpARM64XORshiftRL(v *Value, config *Config) bool { 9104 b := v.Block 9105 _ = b 9106 // match: (XORshiftRL (MOVDconst [c]) x [d]) 9107 // cond: 9108 // result: (XORconst [c] (SRLconst <x.Type> x [d])) 9109 for { 9110 d := v.AuxInt 9111 v_0 := v.Args[0] 9112 if v_0.Op != OpARM64MOVDconst { 9113 break 9114 } 9115 c := v_0.AuxInt 9116 x := v.Args[1] 9117 v.reset(OpARM64XORconst) 9118 v.AuxInt = c 9119 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 9120 v0.AuxInt = d 9121 v0.AddArg(x) 9122 v.AddArg(v0) 9123 return true 9124 } 9125 // match: (XORshiftRL x (MOVDconst [c]) [d]) 9126 // cond: 9127 // result: (XORconst x [int64(uint64(c)>>uint64(d))]) 9128 for { 9129 d := v.AuxInt 9130 x := v.Args[0] 9131 v_1 := v.Args[1] 9132 if v_1.Op != OpARM64MOVDconst { 9133 break 9134 } 9135 c := v_1.AuxInt 9136 v.reset(OpARM64XORconst) 9137 v.AuxInt = int64(uint64(c) >> uint64(d)) 9138 v.AddArg(x) 9139 return true 9140 } 9141 // match: (XORshiftRL x (SRLconst x [c]) [d]) 9142 // cond: c==d 9143 // result: (MOVDconst [0]) 9144 for { 9145 d := v.AuxInt 9146 x := v.Args[0] 9147 v_1 := v.Args[1] 9148 if v_1.Op != OpARM64SRLconst { 9149 break 9150 } 9151 c := v_1.AuxInt 9152 if x != v_1.Args[0] { 9153 break 9154 } 9155 if !(c == d) { 9156 break 9157 } 9158 v.reset(OpARM64MOVDconst) 9159 v.AuxInt = 0 9160 return true 9161 } 9162 // match: (XORshiftRL [c] (SLLconst x [64-c]) x) 9163 // cond: 9164 // result: (RORconst [ c] x) 9165 for { 9166 c := v.AuxInt 9167 v_0 := v.Args[0] 9168 if v_0.Op != OpARM64SLLconst { 9169 break 9170 } 9171 if v_0.AuxInt != 64-c { 9172 break 9173 } 9174 x := v_0.Args[0] 9175 if x != v.Args[1] { 9176 break 9177 } 9178 v.reset(OpARM64RORconst) 9179 v.AuxInt = c 9180 v.AddArg(x) 9181 return true 9182 } 9183 // match: (XORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 9184 // cond: c < 32 && t.Size() == 4 9185 // result: (RORWconst [ c] x) 9186 for { 9187 t := v.Type 9188 c := v.AuxInt 9189 v_0 := v.Args[0] 9190 if v_0.Op != OpARM64SLLconst { 9191 break 9192 } 9193 if v_0.AuxInt != 32-c { 9194 break 9195 } 9196 x := v_0.Args[0] 9197 v_1 := v.Args[1] 9198 if v_1.Op != OpARM64MOVWUreg { 9199 break 9200 } 9201 if x != v_1.Args[0] { 9202 break 9203 } 9204 if !(c < 32 && t.Size() == 4) { 9205 break 9206 } 9207 v.reset(OpARM64RORWconst) 9208 v.AuxInt = c 9209 v.AddArg(x) 9210 return true 9211 } 9212 return false 9213 } 9214 func rewriteValueARM64_OpAdd16(v *Value, config *Config) bool { 9215 b := v.Block 9216 _ = b 9217 // match: (Add16 x y) 9218 // cond: 9219 // result: (ADD x y) 9220 for { 9221 x := v.Args[0] 9222 y := v.Args[1] 9223 v.reset(OpARM64ADD) 9224 v.AddArg(x) 9225 v.AddArg(y) 9226 return true 9227 } 9228 } 9229 func rewriteValueARM64_OpAdd32(v *Value, config *Config) bool { 9230 b := v.Block 9231 _ = b 9232 // match: (Add32 x y) 9233 // cond: 9234 // result: (ADD x y) 9235 for { 9236 x := v.Args[0] 9237 y := v.Args[1] 9238 v.reset(OpARM64ADD) 9239 v.AddArg(x) 9240 v.AddArg(y) 9241 return true 9242 } 9243 } 9244 func rewriteValueARM64_OpAdd32F(v *Value, config *Config) bool { 9245 b := v.Block 9246 _ = b 9247 // match: (Add32F x y) 9248 // cond: 9249 // result: (FADDS x y) 9250 for { 9251 x := v.Args[0] 9252 y := v.Args[1] 9253 v.reset(OpARM64FADDS) 9254 v.AddArg(x) 9255 v.AddArg(y) 9256 return true 9257 } 9258 } 9259 func rewriteValueARM64_OpAdd64(v *Value, config *Config) bool { 9260 b := v.Block 9261 _ = b 9262 // match: (Add64 x y) 9263 // cond: 9264 // result: (ADD x y) 9265 for { 9266 x := v.Args[0] 9267 y := v.Args[1] 9268 v.reset(OpARM64ADD) 9269 v.AddArg(x) 9270 v.AddArg(y) 9271 return true 9272 } 9273 } 9274 func rewriteValueARM64_OpAdd64F(v *Value, config *Config) bool { 9275 b := v.Block 9276 _ = b 9277 // match: (Add64F x y) 9278 // cond: 9279 // result: (FADDD x y) 9280 for { 9281 x := v.Args[0] 9282 y := v.Args[1] 9283 v.reset(OpARM64FADDD) 9284 v.AddArg(x) 9285 v.AddArg(y) 9286 return true 9287 } 9288 } 9289 func rewriteValueARM64_OpAdd8(v *Value, config *Config) bool { 9290 b := v.Block 9291 _ = b 9292 // match: (Add8 x y) 9293 // cond: 9294 // result: (ADD x y) 9295 for { 9296 x := v.Args[0] 9297 y := v.Args[1] 9298 v.reset(OpARM64ADD) 9299 v.AddArg(x) 9300 v.AddArg(y) 9301 return true 9302 } 9303 } 9304 func rewriteValueARM64_OpAddPtr(v *Value, config *Config) bool { 9305 b := v.Block 9306 _ = b 9307 // match: (AddPtr x y) 9308 // cond: 9309 // result: (ADD x y) 9310 for { 9311 x := v.Args[0] 9312 y := v.Args[1] 9313 v.reset(OpARM64ADD) 9314 v.AddArg(x) 9315 v.AddArg(y) 9316 return true 9317 } 9318 } 9319 func rewriteValueARM64_OpAddr(v *Value, config *Config) bool { 9320 b := v.Block 9321 _ = b 9322 // match: (Addr {sym} base) 9323 // cond: 9324 // result: (MOVDaddr {sym} base) 9325 for { 9326 sym := v.Aux 9327 base := v.Args[0] 9328 v.reset(OpARM64MOVDaddr) 9329 v.Aux = sym 9330 v.AddArg(base) 9331 return true 9332 } 9333 } 9334 func rewriteValueARM64_OpAnd16(v *Value, config *Config) bool { 9335 b := v.Block 9336 _ = b 9337 // match: (And16 x y) 9338 // cond: 9339 // result: (AND x y) 9340 for { 9341 x := v.Args[0] 9342 y := v.Args[1] 9343 v.reset(OpARM64AND) 9344 v.AddArg(x) 9345 v.AddArg(y) 9346 return true 9347 } 9348 } 9349 func rewriteValueARM64_OpAnd32(v *Value, config *Config) bool { 9350 b := v.Block 9351 _ = b 9352 // match: (And32 x y) 9353 // cond: 9354 // result: (AND x y) 9355 for { 9356 x := v.Args[0] 9357 y := v.Args[1] 9358 v.reset(OpARM64AND) 9359 v.AddArg(x) 9360 v.AddArg(y) 9361 return true 9362 } 9363 } 9364 func rewriteValueARM64_OpAnd64(v *Value, config *Config) bool { 9365 b := v.Block 9366 _ = b 9367 // match: (And64 x y) 9368 // cond: 9369 // result: (AND x y) 9370 for { 9371 x := v.Args[0] 9372 y := v.Args[1] 9373 v.reset(OpARM64AND) 9374 v.AddArg(x) 9375 v.AddArg(y) 9376 return true 9377 } 9378 } 9379 func rewriteValueARM64_OpAnd8(v *Value, config *Config) bool { 9380 b := v.Block 9381 _ = b 9382 // match: (And8 x y) 9383 // cond: 9384 // result: (AND x y) 9385 for { 9386 x := v.Args[0] 9387 y := v.Args[1] 9388 v.reset(OpARM64AND) 9389 v.AddArg(x) 9390 v.AddArg(y) 9391 return true 9392 } 9393 } 9394 func rewriteValueARM64_OpAndB(v *Value, config *Config) bool { 9395 b := v.Block 9396 _ = b 9397 // match: (AndB x y) 9398 // cond: 9399 // result: (AND x y) 9400 for { 9401 x := v.Args[0] 9402 y := v.Args[1] 9403 v.reset(OpARM64AND) 9404 v.AddArg(x) 9405 v.AddArg(y) 9406 return true 9407 } 9408 } 9409 func rewriteValueARM64_OpAtomicAdd32(v *Value, config *Config) bool { 9410 b := v.Block 9411 _ = b 9412 // match: (AtomicAdd32 ptr val mem) 9413 // cond: 9414 // result: (LoweredAtomicAdd32 ptr val mem) 9415 for { 9416 ptr := v.Args[0] 9417 val := v.Args[1] 9418 mem := v.Args[2] 9419 v.reset(OpARM64LoweredAtomicAdd32) 9420 v.AddArg(ptr) 9421 v.AddArg(val) 9422 v.AddArg(mem) 9423 return true 9424 } 9425 } 9426 func rewriteValueARM64_OpAtomicAdd64(v *Value, config *Config) bool { 9427 b := v.Block 9428 _ = b 9429 // match: (AtomicAdd64 ptr val mem) 9430 // cond: 9431 // result: (LoweredAtomicAdd64 ptr val mem) 9432 for { 9433 ptr := v.Args[0] 9434 val := v.Args[1] 9435 mem := v.Args[2] 9436 v.reset(OpARM64LoweredAtomicAdd64) 9437 v.AddArg(ptr) 9438 v.AddArg(val) 9439 v.AddArg(mem) 9440 return true 9441 } 9442 } 9443 func rewriteValueARM64_OpAtomicAnd8(v *Value, config *Config) bool { 9444 b := v.Block 9445 _ = b 9446 // match: (AtomicAnd8 ptr val mem) 9447 // cond: 9448 // result: (LoweredAtomicAnd8 ptr val mem) 9449 for { 9450 ptr := v.Args[0] 9451 val := v.Args[1] 9452 mem := v.Args[2] 9453 v.reset(OpARM64LoweredAtomicAnd8) 9454 v.AddArg(ptr) 9455 v.AddArg(val) 9456 v.AddArg(mem) 9457 return true 9458 } 9459 } 9460 func rewriteValueARM64_OpAtomicCompareAndSwap32(v *Value, config *Config) bool { 9461 b := v.Block 9462 _ = b 9463 // match: (AtomicCompareAndSwap32 ptr old new_ mem) 9464 // cond: 9465 // result: (LoweredAtomicCas32 ptr old new_ mem) 9466 for { 9467 ptr := v.Args[0] 9468 old := v.Args[1] 9469 new_ := v.Args[2] 9470 mem := v.Args[3] 9471 v.reset(OpARM64LoweredAtomicCas32) 9472 v.AddArg(ptr) 9473 v.AddArg(old) 9474 v.AddArg(new_) 9475 v.AddArg(mem) 9476 return true 9477 } 9478 } 9479 func rewriteValueARM64_OpAtomicCompareAndSwap64(v *Value, config *Config) bool { 9480 b := v.Block 9481 _ = b 9482 // match: (AtomicCompareAndSwap64 ptr old new_ mem) 9483 // cond: 9484 // result: (LoweredAtomicCas64 ptr old new_ mem) 9485 for { 9486 ptr := v.Args[0] 9487 old := v.Args[1] 9488 new_ := v.Args[2] 9489 mem := v.Args[3] 9490 v.reset(OpARM64LoweredAtomicCas64) 9491 v.AddArg(ptr) 9492 v.AddArg(old) 9493 v.AddArg(new_) 9494 v.AddArg(mem) 9495 return true 9496 } 9497 } 9498 func rewriteValueARM64_OpAtomicExchange32(v *Value, config *Config) bool { 9499 b := v.Block 9500 _ = b 9501 // match: (AtomicExchange32 ptr val mem) 9502 // cond: 9503 // result: (LoweredAtomicExchange32 ptr val mem) 9504 for { 9505 ptr := v.Args[0] 9506 val := v.Args[1] 9507 mem := v.Args[2] 9508 v.reset(OpARM64LoweredAtomicExchange32) 9509 v.AddArg(ptr) 9510 v.AddArg(val) 9511 v.AddArg(mem) 9512 return true 9513 } 9514 } 9515 func rewriteValueARM64_OpAtomicExchange64(v *Value, config *Config) bool { 9516 b := v.Block 9517 _ = b 9518 // match: (AtomicExchange64 ptr val mem) 9519 // cond: 9520 // result: (LoweredAtomicExchange64 ptr val mem) 9521 for { 9522 ptr := v.Args[0] 9523 val := v.Args[1] 9524 mem := v.Args[2] 9525 v.reset(OpARM64LoweredAtomicExchange64) 9526 v.AddArg(ptr) 9527 v.AddArg(val) 9528 v.AddArg(mem) 9529 return true 9530 } 9531 } 9532 func rewriteValueARM64_OpAtomicLoad32(v *Value, config *Config) bool { 9533 b := v.Block 9534 _ = b 9535 // match: (AtomicLoad32 ptr mem) 9536 // cond: 9537 // result: (LDARW ptr mem) 9538 for { 9539 ptr := v.Args[0] 9540 mem := v.Args[1] 9541 v.reset(OpARM64LDARW) 9542 v.AddArg(ptr) 9543 v.AddArg(mem) 9544 return true 9545 } 9546 } 9547 func rewriteValueARM64_OpAtomicLoad64(v *Value, config *Config) bool { 9548 b := v.Block 9549 _ = b 9550 // match: (AtomicLoad64 ptr mem) 9551 // cond: 9552 // result: (LDAR ptr mem) 9553 for { 9554 ptr := v.Args[0] 9555 mem := v.Args[1] 9556 v.reset(OpARM64LDAR) 9557 v.AddArg(ptr) 9558 v.AddArg(mem) 9559 return true 9560 } 9561 } 9562 func rewriteValueARM64_OpAtomicLoadPtr(v *Value, config *Config) bool { 9563 b := v.Block 9564 _ = b 9565 // match: (AtomicLoadPtr ptr mem) 9566 // cond: 9567 // result: (LDAR ptr mem) 9568 for { 9569 ptr := v.Args[0] 9570 mem := v.Args[1] 9571 v.reset(OpARM64LDAR) 9572 v.AddArg(ptr) 9573 v.AddArg(mem) 9574 return true 9575 } 9576 } 9577 func rewriteValueARM64_OpAtomicOr8(v *Value, config *Config) bool { 9578 b := v.Block 9579 _ = b 9580 // match: (AtomicOr8 ptr val mem) 9581 // cond: 9582 // result: (LoweredAtomicOr8 ptr val mem) 9583 for { 9584 ptr := v.Args[0] 9585 val := v.Args[1] 9586 mem := v.Args[2] 9587 v.reset(OpARM64LoweredAtomicOr8) 9588 v.AddArg(ptr) 9589 v.AddArg(val) 9590 v.AddArg(mem) 9591 return true 9592 } 9593 } 9594 func rewriteValueARM64_OpAtomicStore32(v *Value, config *Config) bool { 9595 b := v.Block 9596 _ = b 9597 // match: (AtomicStore32 ptr val mem) 9598 // cond: 9599 // result: (STLRW ptr val mem) 9600 for { 9601 ptr := v.Args[0] 9602 val := v.Args[1] 9603 mem := v.Args[2] 9604 v.reset(OpARM64STLRW) 9605 v.AddArg(ptr) 9606 v.AddArg(val) 9607 v.AddArg(mem) 9608 return true 9609 } 9610 } 9611 func rewriteValueARM64_OpAtomicStore64(v *Value, config *Config) bool { 9612 b := v.Block 9613 _ = b 9614 // match: (AtomicStore64 ptr val mem) 9615 // cond: 9616 // result: (STLR ptr val mem) 9617 for { 9618 ptr := v.Args[0] 9619 val := v.Args[1] 9620 mem := v.Args[2] 9621 v.reset(OpARM64STLR) 9622 v.AddArg(ptr) 9623 v.AddArg(val) 9624 v.AddArg(mem) 9625 return true 9626 } 9627 } 9628 func rewriteValueARM64_OpAtomicStorePtrNoWB(v *Value, config *Config) bool { 9629 b := v.Block 9630 _ = b 9631 // match: (AtomicStorePtrNoWB ptr val mem) 9632 // cond: 9633 // result: (STLR ptr val mem) 9634 for { 9635 ptr := v.Args[0] 9636 val := v.Args[1] 9637 mem := v.Args[2] 9638 v.reset(OpARM64STLR) 9639 v.AddArg(ptr) 9640 v.AddArg(val) 9641 v.AddArg(mem) 9642 return true 9643 } 9644 } 9645 func rewriteValueARM64_OpAvg64u(v *Value, config *Config) bool { 9646 b := v.Block 9647 _ = b 9648 // match: (Avg64u <t> x y) 9649 // cond: 9650 // result: (ADD (ADD <t> (SRLconst <t> x [1]) (SRLconst <t> y [1])) (AND <t> (AND <t> x y) (MOVDconst [1]))) 9651 for { 9652 t := v.Type 9653 x := v.Args[0] 9654 y := v.Args[1] 9655 v.reset(OpARM64ADD) 9656 v0 := b.NewValue0(v.Pos, OpARM64ADD, t) 9657 v1 := b.NewValue0(v.Pos, OpARM64SRLconst, t) 9658 v1.AuxInt = 1 9659 v1.AddArg(x) 9660 v0.AddArg(v1) 9661 v2 := b.NewValue0(v.Pos, OpARM64SRLconst, t) 9662 v2.AuxInt = 1 9663 v2.AddArg(y) 9664 v0.AddArg(v2) 9665 v.AddArg(v0) 9666 v3 := b.NewValue0(v.Pos, OpARM64AND, t) 9667 v4 := b.NewValue0(v.Pos, OpARM64AND, t) 9668 v4.AddArg(x) 9669 v4.AddArg(y) 9670 v3.AddArg(v4) 9671 v5 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 9672 v5.AuxInt = 1 9673 v3.AddArg(v5) 9674 v.AddArg(v3) 9675 return true 9676 } 9677 } 9678 func rewriteValueARM64_OpBswap32(v *Value, config *Config) bool { 9679 b := v.Block 9680 _ = b 9681 // match: (Bswap32 x) 9682 // cond: 9683 // result: (REVW x) 9684 for { 9685 x := v.Args[0] 9686 v.reset(OpARM64REVW) 9687 v.AddArg(x) 9688 return true 9689 } 9690 } 9691 func rewriteValueARM64_OpBswap64(v *Value, config *Config) bool { 9692 b := v.Block 9693 _ = b 9694 // match: (Bswap64 x) 9695 // cond: 9696 // result: (REV x) 9697 for { 9698 x := v.Args[0] 9699 v.reset(OpARM64REV) 9700 v.AddArg(x) 9701 return true 9702 } 9703 } 9704 func rewriteValueARM64_OpClosureCall(v *Value, config *Config) bool { 9705 b := v.Block 9706 _ = b 9707 // match: (ClosureCall [argwid] entry closure mem) 9708 // cond: 9709 // result: (CALLclosure [argwid] entry closure mem) 9710 for { 9711 argwid := v.AuxInt 9712 entry := v.Args[0] 9713 closure := v.Args[1] 9714 mem := v.Args[2] 9715 v.reset(OpARM64CALLclosure) 9716 v.AuxInt = argwid 9717 v.AddArg(entry) 9718 v.AddArg(closure) 9719 v.AddArg(mem) 9720 return true 9721 } 9722 } 9723 func rewriteValueARM64_OpCom16(v *Value, config *Config) bool { 9724 b := v.Block 9725 _ = b 9726 // match: (Com16 x) 9727 // cond: 9728 // result: (MVN x) 9729 for { 9730 x := v.Args[0] 9731 v.reset(OpARM64MVN) 9732 v.AddArg(x) 9733 return true 9734 } 9735 } 9736 func rewriteValueARM64_OpCom32(v *Value, config *Config) bool { 9737 b := v.Block 9738 _ = b 9739 // match: (Com32 x) 9740 // cond: 9741 // result: (MVN x) 9742 for { 9743 x := v.Args[0] 9744 v.reset(OpARM64MVN) 9745 v.AddArg(x) 9746 return true 9747 } 9748 } 9749 func rewriteValueARM64_OpCom64(v *Value, config *Config) bool { 9750 b := v.Block 9751 _ = b 9752 // match: (Com64 x) 9753 // cond: 9754 // result: (MVN x) 9755 for { 9756 x := v.Args[0] 9757 v.reset(OpARM64MVN) 9758 v.AddArg(x) 9759 return true 9760 } 9761 } 9762 func rewriteValueARM64_OpCom8(v *Value, config *Config) bool { 9763 b := v.Block 9764 _ = b 9765 // match: (Com8 x) 9766 // cond: 9767 // result: (MVN x) 9768 for { 9769 x := v.Args[0] 9770 v.reset(OpARM64MVN) 9771 v.AddArg(x) 9772 return true 9773 } 9774 } 9775 func rewriteValueARM64_OpConst16(v *Value, config *Config) bool { 9776 b := v.Block 9777 _ = b 9778 // match: (Const16 [val]) 9779 // cond: 9780 // result: (MOVDconst [val]) 9781 for { 9782 val := v.AuxInt 9783 v.reset(OpARM64MOVDconst) 9784 v.AuxInt = val 9785 return true 9786 } 9787 } 9788 func rewriteValueARM64_OpConst32(v *Value, config *Config) bool { 9789 b := v.Block 9790 _ = b 9791 // match: (Const32 [val]) 9792 // cond: 9793 // result: (MOVDconst [val]) 9794 for { 9795 val := v.AuxInt 9796 v.reset(OpARM64MOVDconst) 9797 v.AuxInt = val 9798 return true 9799 } 9800 } 9801 func rewriteValueARM64_OpConst32F(v *Value, config *Config) bool { 9802 b := v.Block 9803 _ = b 9804 // match: (Const32F [val]) 9805 // cond: 9806 // result: (FMOVSconst [val]) 9807 for { 9808 val := v.AuxInt 9809 v.reset(OpARM64FMOVSconst) 9810 v.AuxInt = val 9811 return true 9812 } 9813 } 9814 func rewriteValueARM64_OpConst64(v *Value, config *Config) bool { 9815 b := v.Block 9816 _ = b 9817 // match: (Const64 [val]) 9818 // cond: 9819 // result: (MOVDconst [val]) 9820 for { 9821 val := v.AuxInt 9822 v.reset(OpARM64MOVDconst) 9823 v.AuxInt = val 9824 return true 9825 } 9826 } 9827 func rewriteValueARM64_OpConst64F(v *Value, config *Config) bool { 9828 b := v.Block 9829 _ = b 9830 // match: (Const64F [val]) 9831 // cond: 9832 // result: (FMOVDconst [val]) 9833 for { 9834 val := v.AuxInt 9835 v.reset(OpARM64FMOVDconst) 9836 v.AuxInt = val 9837 return true 9838 } 9839 } 9840 func rewriteValueARM64_OpConst8(v *Value, config *Config) bool { 9841 b := v.Block 9842 _ = b 9843 // match: (Const8 [val]) 9844 // cond: 9845 // result: (MOVDconst [val]) 9846 for { 9847 val := v.AuxInt 9848 v.reset(OpARM64MOVDconst) 9849 v.AuxInt = val 9850 return true 9851 } 9852 } 9853 func rewriteValueARM64_OpConstBool(v *Value, config *Config) bool { 9854 b := v.Block 9855 _ = b 9856 // match: (ConstBool [b]) 9857 // cond: 9858 // result: (MOVDconst [b]) 9859 for { 9860 b := v.AuxInt 9861 v.reset(OpARM64MOVDconst) 9862 v.AuxInt = b 9863 return true 9864 } 9865 } 9866 func rewriteValueARM64_OpConstNil(v *Value, config *Config) bool { 9867 b := v.Block 9868 _ = b 9869 // match: (ConstNil) 9870 // cond: 9871 // result: (MOVDconst [0]) 9872 for { 9873 v.reset(OpARM64MOVDconst) 9874 v.AuxInt = 0 9875 return true 9876 } 9877 } 9878 func rewriteValueARM64_OpConvert(v *Value, config *Config) bool { 9879 b := v.Block 9880 _ = b 9881 // match: (Convert x mem) 9882 // cond: 9883 // result: (MOVDconvert x mem) 9884 for { 9885 x := v.Args[0] 9886 mem := v.Args[1] 9887 v.reset(OpARM64MOVDconvert) 9888 v.AddArg(x) 9889 v.AddArg(mem) 9890 return true 9891 } 9892 } 9893 func rewriteValueARM64_OpCtz32(v *Value, config *Config) bool { 9894 b := v.Block 9895 _ = b 9896 // match: (Ctz32 <t> x) 9897 // cond: 9898 // result: (CLZW (RBITW <t> x)) 9899 for { 9900 t := v.Type 9901 x := v.Args[0] 9902 v.reset(OpARM64CLZW) 9903 v0 := b.NewValue0(v.Pos, OpARM64RBITW, t) 9904 v0.AddArg(x) 9905 v.AddArg(v0) 9906 return true 9907 } 9908 } 9909 func rewriteValueARM64_OpCtz64(v *Value, config *Config) bool { 9910 b := v.Block 9911 _ = b 9912 // match: (Ctz64 <t> x) 9913 // cond: 9914 // result: (CLZ (RBIT <t> x)) 9915 for { 9916 t := v.Type 9917 x := v.Args[0] 9918 v.reset(OpARM64CLZ) 9919 v0 := b.NewValue0(v.Pos, OpARM64RBIT, t) 9920 v0.AddArg(x) 9921 v.AddArg(v0) 9922 return true 9923 } 9924 } 9925 func rewriteValueARM64_OpCvt32Fto32(v *Value, config *Config) bool { 9926 b := v.Block 9927 _ = b 9928 // match: (Cvt32Fto32 x) 9929 // cond: 9930 // result: (FCVTZSSW x) 9931 for { 9932 x := v.Args[0] 9933 v.reset(OpARM64FCVTZSSW) 9934 v.AddArg(x) 9935 return true 9936 } 9937 } 9938 func rewriteValueARM64_OpCvt32Fto32U(v *Value, config *Config) bool { 9939 b := v.Block 9940 _ = b 9941 // match: (Cvt32Fto32U x) 9942 // cond: 9943 // result: (FCVTZUSW x) 9944 for { 9945 x := v.Args[0] 9946 v.reset(OpARM64FCVTZUSW) 9947 v.AddArg(x) 9948 return true 9949 } 9950 } 9951 func rewriteValueARM64_OpCvt32Fto64(v *Value, config *Config) bool { 9952 b := v.Block 9953 _ = b 9954 // match: (Cvt32Fto64 x) 9955 // cond: 9956 // result: (FCVTZSS x) 9957 for { 9958 x := v.Args[0] 9959 v.reset(OpARM64FCVTZSS) 9960 v.AddArg(x) 9961 return true 9962 } 9963 } 9964 func rewriteValueARM64_OpCvt32Fto64F(v *Value, config *Config) bool { 9965 b := v.Block 9966 _ = b 9967 // match: (Cvt32Fto64F x) 9968 // cond: 9969 // result: (FCVTSD x) 9970 for { 9971 x := v.Args[0] 9972 v.reset(OpARM64FCVTSD) 9973 v.AddArg(x) 9974 return true 9975 } 9976 } 9977 func rewriteValueARM64_OpCvt32Fto64U(v *Value, config *Config) bool { 9978 b := v.Block 9979 _ = b 9980 // match: (Cvt32Fto64U x) 9981 // cond: 9982 // result: (FCVTZUS x) 9983 for { 9984 x := v.Args[0] 9985 v.reset(OpARM64FCVTZUS) 9986 v.AddArg(x) 9987 return true 9988 } 9989 } 9990 func rewriteValueARM64_OpCvt32Uto32F(v *Value, config *Config) bool { 9991 b := v.Block 9992 _ = b 9993 // match: (Cvt32Uto32F x) 9994 // cond: 9995 // result: (UCVTFWS x) 9996 for { 9997 x := v.Args[0] 9998 v.reset(OpARM64UCVTFWS) 9999 v.AddArg(x) 10000 return true 10001 } 10002 } 10003 func rewriteValueARM64_OpCvt32Uto64F(v *Value, config *Config) bool { 10004 b := v.Block 10005 _ = b 10006 // match: (Cvt32Uto64F x) 10007 // cond: 10008 // result: (UCVTFWD x) 10009 for { 10010 x := v.Args[0] 10011 v.reset(OpARM64UCVTFWD) 10012 v.AddArg(x) 10013 return true 10014 } 10015 } 10016 func rewriteValueARM64_OpCvt32to32F(v *Value, config *Config) bool { 10017 b := v.Block 10018 _ = b 10019 // match: (Cvt32to32F x) 10020 // cond: 10021 // result: (SCVTFWS x) 10022 for { 10023 x := v.Args[0] 10024 v.reset(OpARM64SCVTFWS) 10025 v.AddArg(x) 10026 return true 10027 } 10028 } 10029 func rewriteValueARM64_OpCvt32to64F(v *Value, config *Config) bool { 10030 b := v.Block 10031 _ = b 10032 // match: (Cvt32to64F x) 10033 // cond: 10034 // result: (SCVTFWD x) 10035 for { 10036 x := v.Args[0] 10037 v.reset(OpARM64SCVTFWD) 10038 v.AddArg(x) 10039 return true 10040 } 10041 } 10042 func rewriteValueARM64_OpCvt64Fto32(v *Value, config *Config) bool { 10043 b := v.Block 10044 _ = b 10045 // match: (Cvt64Fto32 x) 10046 // cond: 10047 // result: (FCVTZSDW x) 10048 for { 10049 x := v.Args[0] 10050 v.reset(OpARM64FCVTZSDW) 10051 v.AddArg(x) 10052 return true 10053 } 10054 } 10055 func rewriteValueARM64_OpCvt64Fto32F(v *Value, config *Config) bool { 10056 b := v.Block 10057 _ = b 10058 // match: (Cvt64Fto32F x) 10059 // cond: 10060 // result: (FCVTDS x) 10061 for { 10062 x := v.Args[0] 10063 v.reset(OpARM64FCVTDS) 10064 v.AddArg(x) 10065 return true 10066 } 10067 } 10068 func rewriteValueARM64_OpCvt64Fto32U(v *Value, config *Config) bool { 10069 b := v.Block 10070 _ = b 10071 // match: (Cvt64Fto32U x) 10072 // cond: 10073 // result: (FCVTZUDW x) 10074 for { 10075 x := v.Args[0] 10076 v.reset(OpARM64FCVTZUDW) 10077 v.AddArg(x) 10078 return true 10079 } 10080 } 10081 func rewriteValueARM64_OpCvt64Fto64(v *Value, config *Config) bool { 10082 b := v.Block 10083 _ = b 10084 // match: (Cvt64Fto64 x) 10085 // cond: 10086 // result: (FCVTZSD x) 10087 for { 10088 x := v.Args[0] 10089 v.reset(OpARM64FCVTZSD) 10090 v.AddArg(x) 10091 return true 10092 } 10093 } 10094 func rewriteValueARM64_OpCvt64Fto64U(v *Value, config *Config) bool { 10095 b := v.Block 10096 _ = b 10097 // match: (Cvt64Fto64U x) 10098 // cond: 10099 // result: (FCVTZUD x) 10100 for { 10101 x := v.Args[0] 10102 v.reset(OpARM64FCVTZUD) 10103 v.AddArg(x) 10104 return true 10105 } 10106 } 10107 func rewriteValueARM64_OpCvt64Uto32F(v *Value, config *Config) bool { 10108 b := v.Block 10109 _ = b 10110 // match: (Cvt64Uto32F x) 10111 // cond: 10112 // result: (UCVTFS x) 10113 for { 10114 x := v.Args[0] 10115 v.reset(OpARM64UCVTFS) 10116 v.AddArg(x) 10117 return true 10118 } 10119 } 10120 func rewriteValueARM64_OpCvt64Uto64F(v *Value, config *Config) bool { 10121 b := v.Block 10122 _ = b 10123 // match: (Cvt64Uto64F x) 10124 // cond: 10125 // result: (UCVTFD x) 10126 for { 10127 x := v.Args[0] 10128 v.reset(OpARM64UCVTFD) 10129 v.AddArg(x) 10130 return true 10131 } 10132 } 10133 func rewriteValueARM64_OpCvt64to32F(v *Value, config *Config) bool { 10134 b := v.Block 10135 _ = b 10136 // match: (Cvt64to32F x) 10137 // cond: 10138 // result: (SCVTFS x) 10139 for { 10140 x := v.Args[0] 10141 v.reset(OpARM64SCVTFS) 10142 v.AddArg(x) 10143 return true 10144 } 10145 } 10146 func rewriteValueARM64_OpCvt64to64F(v *Value, config *Config) bool { 10147 b := v.Block 10148 _ = b 10149 // match: (Cvt64to64F x) 10150 // cond: 10151 // result: (SCVTFD x) 10152 for { 10153 x := v.Args[0] 10154 v.reset(OpARM64SCVTFD) 10155 v.AddArg(x) 10156 return true 10157 } 10158 } 10159 func rewriteValueARM64_OpDeferCall(v *Value, config *Config) bool { 10160 b := v.Block 10161 _ = b 10162 // match: (DeferCall [argwid] mem) 10163 // cond: 10164 // result: (CALLdefer [argwid] mem) 10165 for { 10166 argwid := v.AuxInt 10167 mem := v.Args[0] 10168 v.reset(OpARM64CALLdefer) 10169 v.AuxInt = argwid 10170 v.AddArg(mem) 10171 return true 10172 } 10173 } 10174 func rewriteValueARM64_OpDiv16(v *Value, config *Config) bool { 10175 b := v.Block 10176 _ = b 10177 // match: (Div16 x y) 10178 // cond: 10179 // result: (DIVW (SignExt16to32 x) (SignExt16to32 y)) 10180 for { 10181 x := v.Args[0] 10182 y := v.Args[1] 10183 v.reset(OpARM64DIVW) 10184 v0 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32()) 10185 v0.AddArg(x) 10186 v.AddArg(v0) 10187 v1 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32()) 10188 v1.AddArg(y) 10189 v.AddArg(v1) 10190 return true 10191 } 10192 } 10193 func rewriteValueARM64_OpDiv16u(v *Value, config *Config) bool { 10194 b := v.Block 10195 _ = b 10196 // match: (Div16u x y) 10197 // cond: 10198 // result: (UDIVW (ZeroExt16to32 x) (ZeroExt16to32 y)) 10199 for { 10200 x := v.Args[0] 10201 y := v.Args[1] 10202 v.reset(OpARM64UDIVW) 10203 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 10204 v0.AddArg(x) 10205 v.AddArg(v0) 10206 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 10207 v1.AddArg(y) 10208 v.AddArg(v1) 10209 return true 10210 } 10211 } 10212 func rewriteValueARM64_OpDiv32(v *Value, config *Config) bool { 10213 b := v.Block 10214 _ = b 10215 // match: (Div32 x y) 10216 // cond: 10217 // result: (DIVW x y) 10218 for { 10219 x := v.Args[0] 10220 y := v.Args[1] 10221 v.reset(OpARM64DIVW) 10222 v.AddArg(x) 10223 v.AddArg(y) 10224 return true 10225 } 10226 } 10227 func rewriteValueARM64_OpDiv32F(v *Value, config *Config) bool { 10228 b := v.Block 10229 _ = b 10230 // match: (Div32F x y) 10231 // cond: 10232 // result: (FDIVS x y) 10233 for { 10234 x := v.Args[0] 10235 y := v.Args[1] 10236 v.reset(OpARM64FDIVS) 10237 v.AddArg(x) 10238 v.AddArg(y) 10239 return true 10240 } 10241 } 10242 func rewriteValueARM64_OpDiv32u(v *Value, config *Config) bool { 10243 b := v.Block 10244 _ = b 10245 // match: (Div32u x y) 10246 // cond: 10247 // result: (UDIVW x y) 10248 for { 10249 x := v.Args[0] 10250 y := v.Args[1] 10251 v.reset(OpARM64UDIVW) 10252 v.AddArg(x) 10253 v.AddArg(y) 10254 return true 10255 } 10256 } 10257 func rewriteValueARM64_OpDiv64(v *Value, config *Config) bool { 10258 b := v.Block 10259 _ = b 10260 // match: (Div64 x y) 10261 // cond: 10262 // result: (DIV x y) 10263 for { 10264 x := v.Args[0] 10265 y := v.Args[1] 10266 v.reset(OpARM64DIV) 10267 v.AddArg(x) 10268 v.AddArg(y) 10269 return true 10270 } 10271 } 10272 func rewriteValueARM64_OpDiv64F(v *Value, config *Config) bool { 10273 b := v.Block 10274 _ = b 10275 // match: (Div64F x y) 10276 // cond: 10277 // result: (FDIVD x y) 10278 for { 10279 x := v.Args[0] 10280 y := v.Args[1] 10281 v.reset(OpARM64FDIVD) 10282 v.AddArg(x) 10283 v.AddArg(y) 10284 return true 10285 } 10286 } 10287 func rewriteValueARM64_OpDiv64u(v *Value, config *Config) bool { 10288 b := v.Block 10289 _ = b 10290 // match: (Div64u x y) 10291 // cond: 10292 // result: (UDIV x y) 10293 for { 10294 x := v.Args[0] 10295 y := v.Args[1] 10296 v.reset(OpARM64UDIV) 10297 v.AddArg(x) 10298 v.AddArg(y) 10299 return true 10300 } 10301 } 10302 func rewriteValueARM64_OpDiv8(v *Value, config *Config) bool { 10303 b := v.Block 10304 _ = b 10305 // match: (Div8 x y) 10306 // cond: 10307 // result: (DIVW (SignExt8to32 x) (SignExt8to32 y)) 10308 for { 10309 x := v.Args[0] 10310 y := v.Args[1] 10311 v.reset(OpARM64DIVW) 10312 v0 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32()) 10313 v0.AddArg(x) 10314 v.AddArg(v0) 10315 v1 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32()) 10316 v1.AddArg(y) 10317 v.AddArg(v1) 10318 return true 10319 } 10320 } 10321 func rewriteValueARM64_OpDiv8u(v *Value, config *Config) bool { 10322 b := v.Block 10323 _ = b 10324 // match: (Div8u x y) 10325 // cond: 10326 // result: (UDIVW (ZeroExt8to32 x) (ZeroExt8to32 y)) 10327 for { 10328 x := v.Args[0] 10329 y := v.Args[1] 10330 v.reset(OpARM64UDIVW) 10331 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 10332 v0.AddArg(x) 10333 v.AddArg(v0) 10334 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 10335 v1.AddArg(y) 10336 v.AddArg(v1) 10337 return true 10338 } 10339 } 10340 func rewriteValueARM64_OpEq16(v *Value, config *Config) bool { 10341 b := v.Block 10342 _ = b 10343 // match: (Eq16 x y) 10344 // cond: 10345 // result: (Equal (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 10346 for { 10347 x := v.Args[0] 10348 y := v.Args[1] 10349 v.reset(OpARM64Equal) 10350 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10351 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 10352 v1.AddArg(x) 10353 v0.AddArg(v1) 10354 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 10355 v2.AddArg(y) 10356 v0.AddArg(v2) 10357 v.AddArg(v0) 10358 return true 10359 } 10360 } 10361 func rewriteValueARM64_OpEq32(v *Value, config *Config) bool { 10362 b := v.Block 10363 _ = b 10364 // match: (Eq32 x y) 10365 // cond: 10366 // result: (Equal (CMPW x y)) 10367 for { 10368 x := v.Args[0] 10369 y := v.Args[1] 10370 v.reset(OpARM64Equal) 10371 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10372 v0.AddArg(x) 10373 v0.AddArg(y) 10374 v.AddArg(v0) 10375 return true 10376 } 10377 } 10378 func rewriteValueARM64_OpEq32F(v *Value, config *Config) bool { 10379 b := v.Block 10380 _ = b 10381 // match: (Eq32F x y) 10382 // cond: 10383 // result: (Equal (FCMPS x y)) 10384 for { 10385 x := v.Args[0] 10386 y := v.Args[1] 10387 v.reset(OpARM64Equal) 10388 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, TypeFlags) 10389 v0.AddArg(x) 10390 v0.AddArg(y) 10391 v.AddArg(v0) 10392 return true 10393 } 10394 } 10395 func rewriteValueARM64_OpEq64(v *Value, config *Config) bool { 10396 b := v.Block 10397 _ = b 10398 // match: (Eq64 x y) 10399 // cond: 10400 // result: (Equal (CMP x y)) 10401 for { 10402 x := v.Args[0] 10403 y := v.Args[1] 10404 v.reset(OpARM64Equal) 10405 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 10406 v0.AddArg(x) 10407 v0.AddArg(y) 10408 v.AddArg(v0) 10409 return true 10410 } 10411 } 10412 func rewriteValueARM64_OpEq64F(v *Value, config *Config) bool { 10413 b := v.Block 10414 _ = b 10415 // match: (Eq64F x y) 10416 // cond: 10417 // result: (Equal (FCMPD x y)) 10418 for { 10419 x := v.Args[0] 10420 y := v.Args[1] 10421 v.reset(OpARM64Equal) 10422 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, TypeFlags) 10423 v0.AddArg(x) 10424 v0.AddArg(y) 10425 v.AddArg(v0) 10426 return true 10427 } 10428 } 10429 func rewriteValueARM64_OpEq8(v *Value, config *Config) bool { 10430 b := v.Block 10431 _ = b 10432 // match: (Eq8 x y) 10433 // cond: 10434 // result: (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 10435 for { 10436 x := v.Args[0] 10437 y := v.Args[1] 10438 v.reset(OpARM64Equal) 10439 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10440 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 10441 v1.AddArg(x) 10442 v0.AddArg(v1) 10443 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 10444 v2.AddArg(y) 10445 v0.AddArg(v2) 10446 v.AddArg(v0) 10447 return true 10448 } 10449 } 10450 func rewriteValueARM64_OpEqB(v *Value, config *Config) bool { 10451 b := v.Block 10452 _ = b 10453 // match: (EqB x y) 10454 // cond: 10455 // result: (XOR (MOVDconst [1]) (XOR <config.fe.TypeBool()> x y)) 10456 for { 10457 x := v.Args[0] 10458 y := v.Args[1] 10459 v.reset(OpARM64XOR) 10460 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 10461 v0.AuxInt = 1 10462 v.AddArg(v0) 10463 v1 := b.NewValue0(v.Pos, OpARM64XOR, config.fe.TypeBool()) 10464 v1.AddArg(x) 10465 v1.AddArg(y) 10466 v.AddArg(v1) 10467 return true 10468 } 10469 } 10470 func rewriteValueARM64_OpEqPtr(v *Value, config *Config) bool { 10471 b := v.Block 10472 _ = b 10473 // match: (EqPtr x y) 10474 // cond: 10475 // result: (Equal (CMP x y)) 10476 for { 10477 x := v.Args[0] 10478 y := v.Args[1] 10479 v.reset(OpARM64Equal) 10480 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 10481 v0.AddArg(x) 10482 v0.AddArg(y) 10483 v.AddArg(v0) 10484 return true 10485 } 10486 } 10487 func rewriteValueARM64_OpGeq16(v *Value, config *Config) bool { 10488 b := v.Block 10489 _ = b 10490 // match: (Geq16 x y) 10491 // cond: 10492 // result: (GreaterEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) 10493 for { 10494 x := v.Args[0] 10495 y := v.Args[1] 10496 v.reset(OpARM64GreaterEqual) 10497 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10498 v1 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32()) 10499 v1.AddArg(x) 10500 v0.AddArg(v1) 10501 v2 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32()) 10502 v2.AddArg(y) 10503 v0.AddArg(v2) 10504 v.AddArg(v0) 10505 return true 10506 } 10507 } 10508 func rewriteValueARM64_OpGeq16U(v *Value, config *Config) bool { 10509 b := v.Block 10510 _ = b 10511 // match: (Geq16U x y) 10512 // cond: 10513 // result: (GreaterEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 10514 for { 10515 x := v.Args[0] 10516 y := v.Args[1] 10517 v.reset(OpARM64GreaterEqualU) 10518 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10519 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 10520 v1.AddArg(x) 10521 v0.AddArg(v1) 10522 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 10523 v2.AddArg(y) 10524 v0.AddArg(v2) 10525 v.AddArg(v0) 10526 return true 10527 } 10528 } 10529 func rewriteValueARM64_OpGeq32(v *Value, config *Config) bool { 10530 b := v.Block 10531 _ = b 10532 // match: (Geq32 x y) 10533 // cond: 10534 // result: (GreaterEqual (CMPW x y)) 10535 for { 10536 x := v.Args[0] 10537 y := v.Args[1] 10538 v.reset(OpARM64GreaterEqual) 10539 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10540 v0.AddArg(x) 10541 v0.AddArg(y) 10542 v.AddArg(v0) 10543 return true 10544 } 10545 } 10546 func rewriteValueARM64_OpGeq32F(v *Value, config *Config) bool { 10547 b := v.Block 10548 _ = b 10549 // match: (Geq32F x y) 10550 // cond: 10551 // result: (GreaterEqual (FCMPS x y)) 10552 for { 10553 x := v.Args[0] 10554 y := v.Args[1] 10555 v.reset(OpARM64GreaterEqual) 10556 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, TypeFlags) 10557 v0.AddArg(x) 10558 v0.AddArg(y) 10559 v.AddArg(v0) 10560 return true 10561 } 10562 } 10563 func rewriteValueARM64_OpGeq32U(v *Value, config *Config) bool { 10564 b := v.Block 10565 _ = b 10566 // match: (Geq32U x y) 10567 // cond: 10568 // result: (GreaterEqualU (CMPW x y)) 10569 for { 10570 x := v.Args[0] 10571 y := v.Args[1] 10572 v.reset(OpARM64GreaterEqualU) 10573 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10574 v0.AddArg(x) 10575 v0.AddArg(y) 10576 v.AddArg(v0) 10577 return true 10578 } 10579 } 10580 func rewriteValueARM64_OpGeq64(v *Value, config *Config) bool { 10581 b := v.Block 10582 _ = b 10583 // match: (Geq64 x y) 10584 // cond: 10585 // result: (GreaterEqual (CMP x y)) 10586 for { 10587 x := v.Args[0] 10588 y := v.Args[1] 10589 v.reset(OpARM64GreaterEqual) 10590 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 10591 v0.AddArg(x) 10592 v0.AddArg(y) 10593 v.AddArg(v0) 10594 return true 10595 } 10596 } 10597 func rewriteValueARM64_OpGeq64F(v *Value, config *Config) bool { 10598 b := v.Block 10599 _ = b 10600 // match: (Geq64F x y) 10601 // cond: 10602 // result: (GreaterEqual (FCMPD x y)) 10603 for { 10604 x := v.Args[0] 10605 y := v.Args[1] 10606 v.reset(OpARM64GreaterEqual) 10607 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, TypeFlags) 10608 v0.AddArg(x) 10609 v0.AddArg(y) 10610 v.AddArg(v0) 10611 return true 10612 } 10613 } 10614 func rewriteValueARM64_OpGeq64U(v *Value, config *Config) bool { 10615 b := v.Block 10616 _ = b 10617 // match: (Geq64U x y) 10618 // cond: 10619 // result: (GreaterEqualU (CMP x y)) 10620 for { 10621 x := v.Args[0] 10622 y := v.Args[1] 10623 v.reset(OpARM64GreaterEqualU) 10624 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 10625 v0.AddArg(x) 10626 v0.AddArg(y) 10627 v.AddArg(v0) 10628 return true 10629 } 10630 } 10631 func rewriteValueARM64_OpGeq8(v *Value, config *Config) bool { 10632 b := v.Block 10633 _ = b 10634 // match: (Geq8 x y) 10635 // cond: 10636 // result: (GreaterEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) 10637 for { 10638 x := v.Args[0] 10639 y := v.Args[1] 10640 v.reset(OpARM64GreaterEqual) 10641 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10642 v1 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32()) 10643 v1.AddArg(x) 10644 v0.AddArg(v1) 10645 v2 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32()) 10646 v2.AddArg(y) 10647 v0.AddArg(v2) 10648 v.AddArg(v0) 10649 return true 10650 } 10651 } 10652 func rewriteValueARM64_OpGeq8U(v *Value, config *Config) bool { 10653 b := v.Block 10654 _ = b 10655 // match: (Geq8U x y) 10656 // cond: 10657 // result: (GreaterEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 10658 for { 10659 x := v.Args[0] 10660 y := v.Args[1] 10661 v.reset(OpARM64GreaterEqualU) 10662 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10663 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 10664 v1.AddArg(x) 10665 v0.AddArg(v1) 10666 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 10667 v2.AddArg(y) 10668 v0.AddArg(v2) 10669 v.AddArg(v0) 10670 return true 10671 } 10672 } 10673 func rewriteValueARM64_OpGetClosurePtr(v *Value, config *Config) bool { 10674 b := v.Block 10675 _ = b 10676 // match: (GetClosurePtr) 10677 // cond: 10678 // result: (LoweredGetClosurePtr) 10679 for { 10680 v.reset(OpARM64LoweredGetClosurePtr) 10681 return true 10682 } 10683 } 10684 func rewriteValueARM64_OpGoCall(v *Value, config *Config) bool { 10685 b := v.Block 10686 _ = b 10687 // match: (GoCall [argwid] mem) 10688 // cond: 10689 // result: (CALLgo [argwid] mem) 10690 for { 10691 argwid := v.AuxInt 10692 mem := v.Args[0] 10693 v.reset(OpARM64CALLgo) 10694 v.AuxInt = argwid 10695 v.AddArg(mem) 10696 return true 10697 } 10698 } 10699 func rewriteValueARM64_OpGreater16(v *Value, config *Config) bool { 10700 b := v.Block 10701 _ = b 10702 // match: (Greater16 x y) 10703 // cond: 10704 // result: (GreaterThan (CMPW (SignExt16to32 x) (SignExt16to32 y))) 10705 for { 10706 x := v.Args[0] 10707 y := v.Args[1] 10708 v.reset(OpARM64GreaterThan) 10709 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10710 v1 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32()) 10711 v1.AddArg(x) 10712 v0.AddArg(v1) 10713 v2 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32()) 10714 v2.AddArg(y) 10715 v0.AddArg(v2) 10716 v.AddArg(v0) 10717 return true 10718 } 10719 } 10720 func rewriteValueARM64_OpGreater16U(v *Value, config *Config) bool { 10721 b := v.Block 10722 _ = b 10723 // match: (Greater16U x y) 10724 // cond: 10725 // result: (GreaterThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 10726 for { 10727 x := v.Args[0] 10728 y := v.Args[1] 10729 v.reset(OpARM64GreaterThanU) 10730 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10731 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 10732 v1.AddArg(x) 10733 v0.AddArg(v1) 10734 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 10735 v2.AddArg(y) 10736 v0.AddArg(v2) 10737 v.AddArg(v0) 10738 return true 10739 } 10740 } 10741 func rewriteValueARM64_OpGreater32(v *Value, config *Config) bool { 10742 b := v.Block 10743 _ = b 10744 // match: (Greater32 x y) 10745 // cond: 10746 // result: (GreaterThan (CMPW x y)) 10747 for { 10748 x := v.Args[0] 10749 y := v.Args[1] 10750 v.reset(OpARM64GreaterThan) 10751 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10752 v0.AddArg(x) 10753 v0.AddArg(y) 10754 v.AddArg(v0) 10755 return true 10756 } 10757 } 10758 func rewriteValueARM64_OpGreater32F(v *Value, config *Config) bool { 10759 b := v.Block 10760 _ = b 10761 // match: (Greater32F x y) 10762 // cond: 10763 // result: (GreaterThan (FCMPS x y)) 10764 for { 10765 x := v.Args[0] 10766 y := v.Args[1] 10767 v.reset(OpARM64GreaterThan) 10768 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, TypeFlags) 10769 v0.AddArg(x) 10770 v0.AddArg(y) 10771 v.AddArg(v0) 10772 return true 10773 } 10774 } 10775 func rewriteValueARM64_OpGreater32U(v *Value, config *Config) bool { 10776 b := v.Block 10777 _ = b 10778 // match: (Greater32U x y) 10779 // cond: 10780 // result: (GreaterThanU (CMPW x y)) 10781 for { 10782 x := v.Args[0] 10783 y := v.Args[1] 10784 v.reset(OpARM64GreaterThanU) 10785 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10786 v0.AddArg(x) 10787 v0.AddArg(y) 10788 v.AddArg(v0) 10789 return true 10790 } 10791 } 10792 func rewriteValueARM64_OpGreater64(v *Value, config *Config) bool { 10793 b := v.Block 10794 _ = b 10795 // match: (Greater64 x y) 10796 // cond: 10797 // result: (GreaterThan (CMP x y)) 10798 for { 10799 x := v.Args[0] 10800 y := v.Args[1] 10801 v.reset(OpARM64GreaterThan) 10802 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 10803 v0.AddArg(x) 10804 v0.AddArg(y) 10805 v.AddArg(v0) 10806 return true 10807 } 10808 } 10809 func rewriteValueARM64_OpGreater64F(v *Value, config *Config) bool { 10810 b := v.Block 10811 _ = b 10812 // match: (Greater64F x y) 10813 // cond: 10814 // result: (GreaterThan (FCMPD x y)) 10815 for { 10816 x := v.Args[0] 10817 y := v.Args[1] 10818 v.reset(OpARM64GreaterThan) 10819 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, TypeFlags) 10820 v0.AddArg(x) 10821 v0.AddArg(y) 10822 v.AddArg(v0) 10823 return true 10824 } 10825 } 10826 func rewriteValueARM64_OpGreater64U(v *Value, config *Config) bool { 10827 b := v.Block 10828 _ = b 10829 // match: (Greater64U x y) 10830 // cond: 10831 // result: (GreaterThanU (CMP x y)) 10832 for { 10833 x := v.Args[0] 10834 y := v.Args[1] 10835 v.reset(OpARM64GreaterThanU) 10836 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 10837 v0.AddArg(x) 10838 v0.AddArg(y) 10839 v.AddArg(v0) 10840 return true 10841 } 10842 } 10843 func rewriteValueARM64_OpGreater8(v *Value, config *Config) bool { 10844 b := v.Block 10845 _ = b 10846 // match: (Greater8 x y) 10847 // cond: 10848 // result: (GreaterThan (CMPW (SignExt8to32 x) (SignExt8to32 y))) 10849 for { 10850 x := v.Args[0] 10851 y := v.Args[1] 10852 v.reset(OpARM64GreaterThan) 10853 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10854 v1 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32()) 10855 v1.AddArg(x) 10856 v0.AddArg(v1) 10857 v2 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32()) 10858 v2.AddArg(y) 10859 v0.AddArg(v2) 10860 v.AddArg(v0) 10861 return true 10862 } 10863 } 10864 func rewriteValueARM64_OpGreater8U(v *Value, config *Config) bool { 10865 b := v.Block 10866 _ = b 10867 // match: (Greater8U x y) 10868 // cond: 10869 // result: (GreaterThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 10870 for { 10871 x := v.Args[0] 10872 y := v.Args[1] 10873 v.reset(OpARM64GreaterThanU) 10874 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10875 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 10876 v1.AddArg(x) 10877 v0.AddArg(v1) 10878 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 10879 v2.AddArg(y) 10880 v0.AddArg(v2) 10881 v.AddArg(v0) 10882 return true 10883 } 10884 } 10885 func rewriteValueARM64_OpHmul16(v *Value, config *Config) bool { 10886 b := v.Block 10887 _ = b 10888 // match: (Hmul16 x y) 10889 // cond: 10890 // result: (SRAconst (MULW <config.fe.TypeInt32()> (SignExt16to32 x) (SignExt16to32 y)) [16]) 10891 for { 10892 x := v.Args[0] 10893 y := v.Args[1] 10894 v.reset(OpARM64SRAconst) 10895 v.AuxInt = 16 10896 v0 := b.NewValue0(v.Pos, OpARM64MULW, config.fe.TypeInt32()) 10897 v1 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32()) 10898 v1.AddArg(x) 10899 v0.AddArg(v1) 10900 v2 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32()) 10901 v2.AddArg(y) 10902 v0.AddArg(v2) 10903 v.AddArg(v0) 10904 return true 10905 } 10906 } 10907 func rewriteValueARM64_OpHmul16u(v *Value, config *Config) bool { 10908 b := v.Block 10909 _ = b 10910 // match: (Hmul16u x y) 10911 // cond: 10912 // result: (SRLconst (MUL <config.fe.TypeUInt32()> (ZeroExt16to32 x) (ZeroExt16to32 y)) [16]) 10913 for { 10914 x := v.Args[0] 10915 y := v.Args[1] 10916 v.reset(OpARM64SRLconst) 10917 v.AuxInt = 16 10918 v0 := b.NewValue0(v.Pos, OpARM64MUL, config.fe.TypeUInt32()) 10919 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 10920 v1.AddArg(x) 10921 v0.AddArg(v1) 10922 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 10923 v2.AddArg(y) 10924 v0.AddArg(v2) 10925 v.AddArg(v0) 10926 return true 10927 } 10928 } 10929 func rewriteValueARM64_OpHmul32(v *Value, config *Config) bool { 10930 b := v.Block 10931 _ = b 10932 // match: (Hmul32 x y) 10933 // cond: 10934 // result: (SRAconst (MULL <config.fe.TypeInt64()> x y) [32]) 10935 for { 10936 x := v.Args[0] 10937 y := v.Args[1] 10938 v.reset(OpARM64SRAconst) 10939 v.AuxInt = 32 10940 v0 := b.NewValue0(v.Pos, OpARM64MULL, config.fe.TypeInt64()) 10941 v0.AddArg(x) 10942 v0.AddArg(y) 10943 v.AddArg(v0) 10944 return true 10945 } 10946 } 10947 func rewriteValueARM64_OpHmul32u(v *Value, config *Config) bool { 10948 b := v.Block 10949 _ = b 10950 // match: (Hmul32u x y) 10951 // cond: 10952 // result: (SRAconst (UMULL <config.fe.TypeUInt64()> x y) [32]) 10953 for { 10954 x := v.Args[0] 10955 y := v.Args[1] 10956 v.reset(OpARM64SRAconst) 10957 v.AuxInt = 32 10958 v0 := b.NewValue0(v.Pos, OpARM64UMULL, config.fe.TypeUInt64()) 10959 v0.AddArg(x) 10960 v0.AddArg(y) 10961 v.AddArg(v0) 10962 return true 10963 } 10964 } 10965 func rewriteValueARM64_OpHmul64(v *Value, config *Config) bool { 10966 b := v.Block 10967 _ = b 10968 // match: (Hmul64 x y) 10969 // cond: 10970 // result: (MULH x y) 10971 for { 10972 x := v.Args[0] 10973 y := v.Args[1] 10974 v.reset(OpARM64MULH) 10975 v.AddArg(x) 10976 v.AddArg(y) 10977 return true 10978 } 10979 } 10980 func rewriteValueARM64_OpHmul64u(v *Value, config *Config) bool { 10981 b := v.Block 10982 _ = b 10983 // match: (Hmul64u x y) 10984 // cond: 10985 // result: (UMULH x y) 10986 for { 10987 x := v.Args[0] 10988 y := v.Args[1] 10989 v.reset(OpARM64UMULH) 10990 v.AddArg(x) 10991 v.AddArg(y) 10992 return true 10993 } 10994 } 10995 func rewriteValueARM64_OpHmul8(v *Value, config *Config) bool { 10996 b := v.Block 10997 _ = b 10998 // match: (Hmul8 x y) 10999 // cond: 11000 // result: (SRAconst (MULW <config.fe.TypeInt16()> (SignExt8to32 x) (SignExt8to32 y)) [8]) 11001 for { 11002 x := v.Args[0] 11003 y := v.Args[1] 11004 v.reset(OpARM64SRAconst) 11005 v.AuxInt = 8 11006 v0 := b.NewValue0(v.Pos, OpARM64MULW, config.fe.TypeInt16()) 11007 v1 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32()) 11008 v1.AddArg(x) 11009 v0.AddArg(v1) 11010 v2 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32()) 11011 v2.AddArg(y) 11012 v0.AddArg(v2) 11013 v.AddArg(v0) 11014 return true 11015 } 11016 } 11017 func rewriteValueARM64_OpHmul8u(v *Value, config *Config) bool { 11018 b := v.Block 11019 _ = b 11020 // match: (Hmul8u x y) 11021 // cond: 11022 // result: (SRLconst (MUL <config.fe.TypeUInt16()> (ZeroExt8to32 x) (ZeroExt8to32 y)) [8]) 11023 for { 11024 x := v.Args[0] 11025 y := v.Args[1] 11026 v.reset(OpARM64SRLconst) 11027 v.AuxInt = 8 11028 v0 := b.NewValue0(v.Pos, OpARM64MUL, config.fe.TypeUInt16()) 11029 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 11030 v1.AddArg(x) 11031 v0.AddArg(v1) 11032 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 11033 v2.AddArg(y) 11034 v0.AddArg(v2) 11035 v.AddArg(v0) 11036 return true 11037 } 11038 } 11039 func rewriteValueARM64_OpInterCall(v *Value, config *Config) bool { 11040 b := v.Block 11041 _ = b 11042 // match: (InterCall [argwid] entry mem) 11043 // cond: 11044 // result: (CALLinter [argwid] entry mem) 11045 for { 11046 argwid := v.AuxInt 11047 entry := v.Args[0] 11048 mem := v.Args[1] 11049 v.reset(OpARM64CALLinter) 11050 v.AuxInt = argwid 11051 v.AddArg(entry) 11052 v.AddArg(mem) 11053 return true 11054 } 11055 } 11056 func rewriteValueARM64_OpIsInBounds(v *Value, config *Config) bool { 11057 b := v.Block 11058 _ = b 11059 // match: (IsInBounds idx len) 11060 // cond: 11061 // result: (LessThanU (CMP idx len)) 11062 for { 11063 idx := v.Args[0] 11064 len := v.Args[1] 11065 v.reset(OpARM64LessThanU) 11066 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 11067 v0.AddArg(idx) 11068 v0.AddArg(len) 11069 v.AddArg(v0) 11070 return true 11071 } 11072 } 11073 func rewriteValueARM64_OpIsNonNil(v *Value, config *Config) bool { 11074 b := v.Block 11075 _ = b 11076 // match: (IsNonNil ptr) 11077 // cond: 11078 // result: (NotEqual (CMPconst [0] ptr)) 11079 for { 11080 ptr := v.Args[0] 11081 v.reset(OpARM64NotEqual) 11082 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 11083 v0.AuxInt = 0 11084 v0.AddArg(ptr) 11085 v.AddArg(v0) 11086 return true 11087 } 11088 } 11089 func rewriteValueARM64_OpIsSliceInBounds(v *Value, config *Config) bool { 11090 b := v.Block 11091 _ = b 11092 // match: (IsSliceInBounds idx len) 11093 // cond: 11094 // result: (LessEqualU (CMP idx len)) 11095 for { 11096 idx := v.Args[0] 11097 len := v.Args[1] 11098 v.reset(OpARM64LessEqualU) 11099 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 11100 v0.AddArg(idx) 11101 v0.AddArg(len) 11102 v.AddArg(v0) 11103 return true 11104 } 11105 } 11106 func rewriteValueARM64_OpLeq16(v *Value, config *Config) bool { 11107 b := v.Block 11108 _ = b 11109 // match: (Leq16 x y) 11110 // cond: 11111 // result: (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) 11112 for { 11113 x := v.Args[0] 11114 y := v.Args[1] 11115 v.reset(OpARM64LessEqual) 11116 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11117 v1 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32()) 11118 v1.AddArg(x) 11119 v0.AddArg(v1) 11120 v2 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32()) 11121 v2.AddArg(y) 11122 v0.AddArg(v2) 11123 v.AddArg(v0) 11124 return true 11125 } 11126 } 11127 func rewriteValueARM64_OpLeq16U(v *Value, config *Config) bool { 11128 b := v.Block 11129 _ = b 11130 // match: (Leq16U x y) 11131 // cond: 11132 // result: (LessEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 11133 for { 11134 x := v.Args[0] 11135 y := v.Args[1] 11136 v.reset(OpARM64LessEqualU) 11137 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11138 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 11139 v1.AddArg(x) 11140 v0.AddArg(v1) 11141 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 11142 v2.AddArg(y) 11143 v0.AddArg(v2) 11144 v.AddArg(v0) 11145 return true 11146 } 11147 } 11148 func rewriteValueARM64_OpLeq32(v *Value, config *Config) bool { 11149 b := v.Block 11150 _ = b 11151 // match: (Leq32 x y) 11152 // cond: 11153 // result: (LessEqual (CMPW x y)) 11154 for { 11155 x := v.Args[0] 11156 y := v.Args[1] 11157 v.reset(OpARM64LessEqual) 11158 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11159 v0.AddArg(x) 11160 v0.AddArg(y) 11161 v.AddArg(v0) 11162 return true 11163 } 11164 } 11165 func rewriteValueARM64_OpLeq32F(v *Value, config *Config) bool { 11166 b := v.Block 11167 _ = b 11168 // match: (Leq32F x y) 11169 // cond: 11170 // result: (GreaterEqual (FCMPS y x)) 11171 for { 11172 x := v.Args[0] 11173 y := v.Args[1] 11174 v.reset(OpARM64GreaterEqual) 11175 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, TypeFlags) 11176 v0.AddArg(y) 11177 v0.AddArg(x) 11178 v.AddArg(v0) 11179 return true 11180 } 11181 } 11182 func rewriteValueARM64_OpLeq32U(v *Value, config *Config) bool { 11183 b := v.Block 11184 _ = b 11185 // match: (Leq32U x y) 11186 // cond: 11187 // result: (LessEqualU (CMPW x y)) 11188 for { 11189 x := v.Args[0] 11190 y := v.Args[1] 11191 v.reset(OpARM64LessEqualU) 11192 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11193 v0.AddArg(x) 11194 v0.AddArg(y) 11195 v.AddArg(v0) 11196 return true 11197 } 11198 } 11199 func rewriteValueARM64_OpLeq64(v *Value, config *Config) bool { 11200 b := v.Block 11201 _ = b 11202 // match: (Leq64 x y) 11203 // cond: 11204 // result: (LessEqual (CMP x y)) 11205 for { 11206 x := v.Args[0] 11207 y := v.Args[1] 11208 v.reset(OpARM64LessEqual) 11209 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 11210 v0.AddArg(x) 11211 v0.AddArg(y) 11212 v.AddArg(v0) 11213 return true 11214 } 11215 } 11216 func rewriteValueARM64_OpLeq64F(v *Value, config *Config) bool { 11217 b := v.Block 11218 _ = b 11219 // match: (Leq64F x y) 11220 // cond: 11221 // result: (GreaterEqual (FCMPD y x)) 11222 for { 11223 x := v.Args[0] 11224 y := v.Args[1] 11225 v.reset(OpARM64GreaterEqual) 11226 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, TypeFlags) 11227 v0.AddArg(y) 11228 v0.AddArg(x) 11229 v.AddArg(v0) 11230 return true 11231 } 11232 } 11233 func rewriteValueARM64_OpLeq64U(v *Value, config *Config) bool { 11234 b := v.Block 11235 _ = b 11236 // match: (Leq64U x y) 11237 // cond: 11238 // result: (LessEqualU (CMP x y)) 11239 for { 11240 x := v.Args[0] 11241 y := v.Args[1] 11242 v.reset(OpARM64LessEqualU) 11243 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 11244 v0.AddArg(x) 11245 v0.AddArg(y) 11246 v.AddArg(v0) 11247 return true 11248 } 11249 } 11250 func rewriteValueARM64_OpLeq8(v *Value, config *Config) bool { 11251 b := v.Block 11252 _ = b 11253 // match: (Leq8 x y) 11254 // cond: 11255 // result: (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) 11256 for { 11257 x := v.Args[0] 11258 y := v.Args[1] 11259 v.reset(OpARM64LessEqual) 11260 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11261 v1 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32()) 11262 v1.AddArg(x) 11263 v0.AddArg(v1) 11264 v2 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32()) 11265 v2.AddArg(y) 11266 v0.AddArg(v2) 11267 v.AddArg(v0) 11268 return true 11269 } 11270 } 11271 func rewriteValueARM64_OpLeq8U(v *Value, config *Config) bool { 11272 b := v.Block 11273 _ = b 11274 // match: (Leq8U x y) 11275 // cond: 11276 // result: (LessEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 11277 for { 11278 x := v.Args[0] 11279 y := v.Args[1] 11280 v.reset(OpARM64LessEqualU) 11281 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11282 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 11283 v1.AddArg(x) 11284 v0.AddArg(v1) 11285 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 11286 v2.AddArg(y) 11287 v0.AddArg(v2) 11288 v.AddArg(v0) 11289 return true 11290 } 11291 } 11292 func rewriteValueARM64_OpLess16(v *Value, config *Config) bool { 11293 b := v.Block 11294 _ = b 11295 // match: (Less16 x y) 11296 // cond: 11297 // result: (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y))) 11298 for { 11299 x := v.Args[0] 11300 y := v.Args[1] 11301 v.reset(OpARM64LessThan) 11302 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11303 v1 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32()) 11304 v1.AddArg(x) 11305 v0.AddArg(v1) 11306 v2 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32()) 11307 v2.AddArg(y) 11308 v0.AddArg(v2) 11309 v.AddArg(v0) 11310 return true 11311 } 11312 } 11313 func rewriteValueARM64_OpLess16U(v *Value, config *Config) bool { 11314 b := v.Block 11315 _ = b 11316 // match: (Less16U x y) 11317 // cond: 11318 // result: (LessThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 11319 for { 11320 x := v.Args[0] 11321 y := v.Args[1] 11322 v.reset(OpARM64LessThanU) 11323 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11324 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 11325 v1.AddArg(x) 11326 v0.AddArg(v1) 11327 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 11328 v2.AddArg(y) 11329 v0.AddArg(v2) 11330 v.AddArg(v0) 11331 return true 11332 } 11333 } 11334 func rewriteValueARM64_OpLess32(v *Value, config *Config) bool { 11335 b := v.Block 11336 _ = b 11337 // match: (Less32 x y) 11338 // cond: 11339 // result: (LessThan (CMPW x y)) 11340 for { 11341 x := v.Args[0] 11342 y := v.Args[1] 11343 v.reset(OpARM64LessThan) 11344 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11345 v0.AddArg(x) 11346 v0.AddArg(y) 11347 v.AddArg(v0) 11348 return true 11349 } 11350 } 11351 func rewriteValueARM64_OpLess32F(v *Value, config *Config) bool { 11352 b := v.Block 11353 _ = b 11354 // match: (Less32F x y) 11355 // cond: 11356 // result: (GreaterThan (FCMPS y x)) 11357 for { 11358 x := v.Args[0] 11359 y := v.Args[1] 11360 v.reset(OpARM64GreaterThan) 11361 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, TypeFlags) 11362 v0.AddArg(y) 11363 v0.AddArg(x) 11364 v.AddArg(v0) 11365 return true 11366 } 11367 } 11368 func rewriteValueARM64_OpLess32U(v *Value, config *Config) bool { 11369 b := v.Block 11370 _ = b 11371 // match: (Less32U x y) 11372 // cond: 11373 // result: (LessThanU (CMPW x y)) 11374 for { 11375 x := v.Args[0] 11376 y := v.Args[1] 11377 v.reset(OpARM64LessThanU) 11378 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11379 v0.AddArg(x) 11380 v0.AddArg(y) 11381 v.AddArg(v0) 11382 return true 11383 } 11384 } 11385 func rewriteValueARM64_OpLess64(v *Value, config *Config) bool { 11386 b := v.Block 11387 _ = b 11388 // match: (Less64 x y) 11389 // cond: 11390 // result: (LessThan (CMP x y)) 11391 for { 11392 x := v.Args[0] 11393 y := v.Args[1] 11394 v.reset(OpARM64LessThan) 11395 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 11396 v0.AddArg(x) 11397 v0.AddArg(y) 11398 v.AddArg(v0) 11399 return true 11400 } 11401 } 11402 func rewriteValueARM64_OpLess64F(v *Value, config *Config) bool { 11403 b := v.Block 11404 _ = b 11405 // match: (Less64F x y) 11406 // cond: 11407 // result: (GreaterThan (FCMPD y x)) 11408 for { 11409 x := v.Args[0] 11410 y := v.Args[1] 11411 v.reset(OpARM64GreaterThan) 11412 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, TypeFlags) 11413 v0.AddArg(y) 11414 v0.AddArg(x) 11415 v.AddArg(v0) 11416 return true 11417 } 11418 } 11419 func rewriteValueARM64_OpLess64U(v *Value, config *Config) bool { 11420 b := v.Block 11421 _ = b 11422 // match: (Less64U x y) 11423 // cond: 11424 // result: (LessThanU (CMP x y)) 11425 for { 11426 x := v.Args[0] 11427 y := v.Args[1] 11428 v.reset(OpARM64LessThanU) 11429 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 11430 v0.AddArg(x) 11431 v0.AddArg(y) 11432 v.AddArg(v0) 11433 return true 11434 } 11435 } 11436 func rewriteValueARM64_OpLess8(v *Value, config *Config) bool { 11437 b := v.Block 11438 _ = b 11439 // match: (Less8 x y) 11440 // cond: 11441 // result: (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y))) 11442 for { 11443 x := v.Args[0] 11444 y := v.Args[1] 11445 v.reset(OpARM64LessThan) 11446 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11447 v1 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32()) 11448 v1.AddArg(x) 11449 v0.AddArg(v1) 11450 v2 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32()) 11451 v2.AddArg(y) 11452 v0.AddArg(v2) 11453 v.AddArg(v0) 11454 return true 11455 } 11456 } 11457 func rewriteValueARM64_OpLess8U(v *Value, config *Config) bool { 11458 b := v.Block 11459 _ = b 11460 // match: (Less8U x y) 11461 // cond: 11462 // result: (LessThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 11463 for { 11464 x := v.Args[0] 11465 y := v.Args[1] 11466 v.reset(OpARM64LessThanU) 11467 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11468 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 11469 v1.AddArg(x) 11470 v0.AddArg(v1) 11471 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 11472 v2.AddArg(y) 11473 v0.AddArg(v2) 11474 v.AddArg(v0) 11475 return true 11476 } 11477 } 11478 func rewriteValueARM64_OpLoad(v *Value, config *Config) bool { 11479 b := v.Block 11480 _ = b 11481 // match: (Load <t> ptr mem) 11482 // cond: t.IsBoolean() 11483 // result: (MOVBUload ptr mem) 11484 for { 11485 t := v.Type 11486 ptr := v.Args[0] 11487 mem := v.Args[1] 11488 if !(t.IsBoolean()) { 11489 break 11490 } 11491 v.reset(OpARM64MOVBUload) 11492 v.AddArg(ptr) 11493 v.AddArg(mem) 11494 return true 11495 } 11496 // match: (Load <t> ptr mem) 11497 // cond: (is8BitInt(t) && isSigned(t)) 11498 // result: (MOVBload ptr mem) 11499 for { 11500 t := v.Type 11501 ptr := v.Args[0] 11502 mem := v.Args[1] 11503 if !(is8BitInt(t) && isSigned(t)) { 11504 break 11505 } 11506 v.reset(OpARM64MOVBload) 11507 v.AddArg(ptr) 11508 v.AddArg(mem) 11509 return true 11510 } 11511 // match: (Load <t> ptr mem) 11512 // cond: (is8BitInt(t) && !isSigned(t)) 11513 // result: (MOVBUload ptr mem) 11514 for { 11515 t := v.Type 11516 ptr := v.Args[0] 11517 mem := v.Args[1] 11518 if !(is8BitInt(t) && !isSigned(t)) { 11519 break 11520 } 11521 v.reset(OpARM64MOVBUload) 11522 v.AddArg(ptr) 11523 v.AddArg(mem) 11524 return true 11525 } 11526 // match: (Load <t> ptr mem) 11527 // cond: (is16BitInt(t) && isSigned(t)) 11528 // result: (MOVHload ptr mem) 11529 for { 11530 t := v.Type 11531 ptr := v.Args[0] 11532 mem := v.Args[1] 11533 if !(is16BitInt(t) && isSigned(t)) { 11534 break 11535 } 11536 v.reset(OpARM64MOVHload) 11537 v.AddArg(ptr) 11538 v.AddArg(mem) 11539 return true 11540 } 11541 // match: (Load <t> ptr mem) 11542 // cond: (is16BitInt(t) && !isSigned(t)) 11543 // result: (MOVHUload ptr mem) 11544 for { 11545 t := v.Type 11546 ptr := v.Args[0] 11547 mem := v.Args[1] 11548 if !(is16BitInt(t) && !isSigned(t)) { 11549 break 11550 } 11551 v.reset(OpARM64MOVHUload) 11552 v.AddArg(ptr) 11553 v.AddArg(mem) 11554 return true 11555 } 11556 // match: (Load <t> ptr mem) 11557 // cond: (is32BitInt(t) && isSigned(t)) 11558 // result: (MOVWload ptr mem) 11559 for { 11560 t := v.Type 11561 ptr := v.Args[0] 11562 mem := v.Args[1] 11563 if !(is32BitInt(t) && isSigned(t)) { 11564 break 11565 } 11566 v.reset(OpARM64MOVWload) 11567 v.AddArg(ptr) 11568 v.AddArg(mem) 11569 return true 11570 } 11571 // match: (Load <t> ptr mem) 11572 // cond: (is32BitInt(t) && !isSigned(t)) 11573 // result: (MOVWUload ptr mem) 11574 for { 11575 t := v.Type 11576 ptr := v.Args[0] 11577 mem := v.Args[1] 11578 if !(is32BitInt(t) && !isSigned(t)) { 11579 break 11580 } 11581 v.reset(OpARM64MOVWUload) 11582 v.AddArg(ptr) 11583 v.AddArg(mem) 11584 return true 11585 } 11586 // match: (Load <t> ptr mem) 11587 // cond: (is64BitInt(t) || isPtr(t)) 11588 // result: (MOVDload ptr mem) 11589 for { 11590 t := v.Type 11591 ptr := v.Args[0] 11592 mem := v.Args[1] 11593 if !(is64BitInt(t) || isPtr(t)) { 11594 break 11595 } 11596 v.reset(OpARM64MOVDload) 11597 v.AddArg(ptr) 11598 v.AddArg(mem) 11599 return true 11600 } 11601 // match: (Load <t> ptr mem) 11602 // cond: is32BitFloat(t) 11603 // result: (FMOVSload ptr mem) 11604 for { 11605 t := v.Type 11606 ptr := v.Args[0] 11607 mem := v.Args[1] 11608 if !(is32BitFloat(t)) { 11609 break 11610 } 11611 v.reset(OpARM64FMOVSload) 11612 v.AddArg(ptr) 11613 v.AddArg(mem) 11614 return true 11615 } 11616 // match: (Load <t> ptr mem) 11617 // cond: is64BitFloat(t) 11618 // result: (FMOVDload ptr mem) 11619 for { 11620 t := v.Type 11621 ptr := v.Args[0] 11622 mem := v.Args[1] 11623 if !(is64BitFloat(t)) { 11624 break 11625 } 11626 v.reset(OpARM64FMOVDload) 11627 v.AddArg(ptr) 11628 v.AddArg(mem) 11629 return true 11630 } 11631 return false 11632 } 11633 func rewriteValueARM64_OpLsh16x16(v *Value, config *Config) bool { 11634 b := v.Block 11635 _ = b 11636 // match: (Lsh16x16 <t> x y) 11637 // cond: 11638 // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 11639 for { 11640 t := v.Type 11641 x := v.Args[0] 11642 y := v.Args[1] 11643 v.reset(OpARM64CSELULT) 11644 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 11645 v0.AddArg(x) 11646 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 11647 v1.AddArg(y) 11648 v0.AddArg(v1) 11649 v.AddArg(v0) 11650 v2 := b.NewValue0(v.Pos, OpConst64, t) 11651 v2.AuxInt = 0 11652 v.AddArg(v2) 11653 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 11654 v3.AuxInt = 64 11655 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 11656 v4.AddArg(y) 11657 v3.AddArg(v4) 11658 v.AddArg(v3) 11659 return true 11660 } 11661 } 11662 func rewriteValueARM64_OpLsh16x32(v *Value, config *Config) bool { 11663 b := v.Block 11664 _ = b 11665 // match: (Lsh16x32 <t> x y) 11666 // cond: 11667 // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 11668 for { 11669 t := v.Type 11670 x := v.Args[0] 11671 y := v.Args[1] 11672 v.reset(OpARM64CSELULT) 11673 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 11674 v0.AddArg(x) 11675 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 11676 v1.AddArg(y) 11677 v0.AddArg(v1) 11678 v.AddArg(v0) 11679 v2 := b.NewValue0(v.Pos, OpConst64, t) 11680 v2.AuxInt = 0 11681 v.AddArg(v2) 11682 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 11683 v3.AuxInt = 64 11684 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 11685 v4.AddArg(y) 11686 v3.AddArg(v4) 11687 v.AddArg(v3) 11688 return true 11689 } 11690 } 11691 func rewriteValueARM64_OpLsh16x64(v *Value, config *Config) bool { 11692 b := v.Block 11693 _ = b 11694 // match: (Lsh16x64 x (MOVDconst [c])) 11695 // cond: uint64(c) < 16 11696 // result: (SLLconst x [c]) 11697 for { 11698 x := v.Args[0] 11699 v_1 := v.Args[1] 11700 if v_1.Op != OpARM64MOVDconst { 11701 break 11702 } 11703 c := v_1.AuxInt 11704 if !(uint64(c) < 16) { 11705 break 11706 } 11707 v.reset(OpARM64SLLconst) 11708 v.AuxInt = c 11709 v.AddArg(x) 11710 return true 11711 } 11712 // match: (Lsh16x64 _ (MOVDconst [c])) 11713 // cond: uint64(c) >= 16 11714 // result: (MOVDconst [0]) 11715 for { 11716 v_1 := v.Args[1] 11717 if v_1.Op != OpARM64MOVDconst { 11718 break 11719 } 11720 c := v_1.AuxInt 11721 if !(uint64(c) >= 16) { 11722 break 11723 } 11724 v.reset(OpARM64MOVDconst) 11725 v.AuxInt = 0 11726 return true 11727 } 11728 // match: (Lsh16x64 <t> x y) 11729 // cond: 11730 // result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 11731 for { 11732 t := v.Type 11733 x := v.Args[0] 11734 y := v.Args[1] 11735 v.reset(OpARM64CSELULT) 11736 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 11737 v0.AddArg(x) 11738 v0.AddArg(y) 11739 v.AddArg(v0) 11740 v1 := b.NewValue0(v.Pos, OpConst64, t) 11741 v1.AuxInt = 0 11742 v.AddArg(v1) 11743 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 11744 v2.AuxInt = 64 11745 v2.AddArg(y) 11746 v.AddArg(v2) 11747 return true 11748 } 11749 } 11750 func rewriteValueARM64_OpLsh16x8(v *Value, config *Config) bool { 11751 b := v.Block 11752 _ = b 11753 // match: (Lsh16x8 <t> x y) 11754 // cond: 11755 // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 11756 for { 11757 t := v.Type 11758 x := v.Args[0] 11759 y := v.Args[1] 11760 v.reset(OpARM64CSELULT) 11761 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 11762 v0.AddArg(x) 11763 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 11764 v1.AddArg(y) 11765 v0.AddArg(v1) 11766 v.AddArg(v0) 11767 v2 := b.NewValue0(v.Pos, OpConst64, t) 11768 v2.AuxInt = 0 11769 v.AddArg(v2) 11770 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 11771 v3.AuxInt = 64 11772 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 11773 v4.AddArg(y) 11774 v3.AddArg(v4) 11775 v.AddArg(v3) 11776 return true 11777 } 11778 } 11779 func rewriteValueARM64_OpLsh32x16(v *Value, config *Config) bool { 11780 b := v.Block 11781 _ = b 11782 // match: (Lsh32x16 <t> x y) 11783 // cond: 11784 // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 11785 for { 11786 t := v.Type 11787 x := v.Args[0] 11788 y := v.Args[1] 11789 v.reset(OpARM64CSELULT) 11790 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 11791 v0.AddArg(x) 11792 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 11793 v1.AddArg(y) 11794 v0.AddArg(v1) 11795 v.AddArg(v0) 11796 v2 := b.NewValue0(v.Pos, OpConst64, t) 11797 v2.AuxInt = 0 11798 v.AddArg(v2) 11799 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 11800 v3.AuxInt = 64 11801 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 11802 v4.AddArg(y) 11803 v3.AddArg(v4) 11804 v.AddArg(v3) 11805 return true 11806 } 11807 } 11808 func rewriteValueARM64_OpLsh32x32(v *Value, config *Config) bool { 11809 b := v.Block 11810 _ = b 11811 // match: (Lsh32x32 <t> x y) 11812 // cond: 11813 // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 11814 for { 11815 t := v.Type 11816 x := v.Args[0] 11817 y := v.Args[1] 11818 v.reset(OpARM64CSELULT) 11819 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 11820 v0.AddArg(x) 11821 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 11822 v1.AddArg(y) 11823 v0.AddArg(v1) 11824 v.AddArg(v0) 11825 v2 := b.NewValue0(v.Pos, OpConst64, t) 11826 v2.AuxInt = 0 11827 v.AddArg(v2) 11828 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 11829 v3.AuxInt = 64 11830 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 11831 v4.AddArg(y) 11832 v3.AddArg(v4) 11833 v.AddArg(v3) 11834 return true 11835 } 11836 } 11837 func rewriteValueARM64_OpLsh32x64(v *Value, config *Config) bool { 11838 b := v.Block 11839 _ = b 11840 // match: (Lsh32x64 x (MOVDconst [c])) 11841 // cond: uint64(c) < 32 11842 // result: (SLLconst x [c]) 11843 for { 11844 x := v.Args[0] 11845 v_1 := v.Args[1] 11846 if v_1.Op != OpARM64MOVDconst { 11847 break 11848 } 11849 c := v_1.AuxInt 11850 if !(uint64(c) < 32) { 11851 break 11852 } 11853 v.reset(OpARM64SLLconst) 11854 v.AuxInt = c 11855 v.AddArg(x) 11856 return true 11857 } 11858 // match: (Lsh32x64 _ (MOVDconst [c])) 11859 // cond: uint64(c) >= 32 11860 // result: (MOVDconst [0]) 11861 for { 11862 v_1 := v.Args[1] 11863 if v_1.Op != OpARM64MOVDconst { 11864 break 11865 } 11866 c := v_1.AuxInt 11867 if !(uint64(c) >= 32) { 11868 break 11869 } 11870 v.reset(OpARM64MOVDconst) 11871 v.AuxInt = 0 11872 return true 11873 } 11874 // match: (Lsh32x64 <t> x y) 11875 // cond: 11876 // result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 11877 for { 11878 t := v.Type 11879 x := v.Args[0] 11880 y := v.Args[1] 11881 v.reset(OpARM64CSELULT) 11882 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 11883 v0.AddArg(x) 11884 v0.AddArg(y) 11885 v.AddArg(v0) 11886 v1 := b.NewValue0(v.Pos, OpConst64, t) 11887 v1.AuxInt = 0 11888 v.AddArg(v1) 11889 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 11890 v2.AuxInt = 64 11891 v2.AddArg(y) 11892 v.AddArg(v2) 11893 return true 11894 } 11895 } 11896 func rewriteValueARM64_OpLsh32x8(v *Value, config *Config) bool { 11897 b := v.Block 11898 _ = b 11899 // match: (Lsh32x8 <t> x y) 11900 // cond: 11901 // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 11902 for { 11903 t := v.Type 11904 x := v.Args[0] 11905 y := v.Args[1] 11906 v.reset(OpARM64CSELULT) 11907 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 11908 v0.AddArg(x) 11909 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 11910 v1.AddArg(y) 11911 v0.AddArg(v1) 11912 v.AddArg(v0) 11913 v2 := b.NewValue0(v.Pos, OpConst64, t) 11914 v2.AuxInt = 0 11915 v.AddArg(v2) 11916 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 11917 v3.AuxInt = 64 11918 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 11919 v4.AddArg(y) 11920 v3.AddArg(v4) 11921 v.AddArg(v3) 11922 return true 11923 } 11924 } 11925 func rewriteValueARM64_OpLsh64x16(v *Value, config *Config) bool { 11926 b := v.Block 11927 _ = b 11928 // match: (Lsh64x16 <t> x y) 11929 // cond: 11930 // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 11931 for { 11932 t := v.Type 11933 x := v.Args[0] 11934 y := v.Args[1] 11935 v.reset(OpARM64CSELULT) 11936 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 11937 v0.AddArg(x) 11938 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 11939 v1.AddArg(y) 11940 v0.AddArg(v1) 11941 v.AddArg(v0) 11942 v2 := b.NewValue0(v.Pos, OpConst64, t) 11943 v2.AuxInt = 0 11944 v.AddArg(v2) 11945 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 11946 v3.AuxInt = 64 11947 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 11948 v4.AddArg(y) 11949 v3.AddArg(v4) 11950 v.AddArg(v3) 11951 return true 11952 } 11953 } 11954 func rewriteValueARM64_OpLsh64x32(v *Value, config *Config) bool { 11955 b := v.Block 11956 _ = b 11957 // match: (Lsh64x32 <t> x y) 11958 // cond: 11959 // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 11960 for { 11961 t := v.Type 11962 x := v.Args[0] 11963 y := v.Args[1] 11964 v.reset(OpARM64CSELULT) 11965 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 11966 v0.AddArg(x) 11967 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 11968 v1.AddArg(y) 11969 v0.AddArg(v1) 11970 v.AddArg(v0) 11971 v2 := b.NewValue0(v.Pos, OpConst64, t) 11972 v2.AuxInt = 0 11973 v.AddArg(v2) 11974 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 11975 v3.AuxInt = 64 11976 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 11977 v4.AddArg(y) 11978 v3.AddArg(v4) 11979 v.AddArg(v3) 11980 return true 11981 } 11982 } 11983 func rewriteValueARM64_OpLsh64x64(v *Value, config *Config) bool { 11984 b := v.Block 11985 _ = b 11986 // match: (Lsh64x64 x (MOVDconst [c])) 11987 // cond: uint64(c) < 64 11988 // result: (SLLconst x [c]) 11989 for { 11990 x := v.Args[0] 11991 v_1 := v.Args[1] 11992 if v_1.Op != OpARM64MOVDconst { 11993 break 11994 } 11995 c := v_1.AuxInt 11996 if !(uint64(c) < 64) { 11997 break 11998 } 11999 v.reset(OpARM64SLLconst) 12000 v.AuxInt = c 12001 v.AddArg(x) 12002 return true 12003 } 12004 // match: (Lsh64x64 _ (MOVDconst [c])) 12005 // cond: uint64(c) >= 64 12006 // result: (MOVDconst [0]) 12007 for { 12008 v_1 := v.Args[1] 12009 if v_1.Op != OpARM64MOVDconst { 12010 break 12011 } 12012 c := v_1.AuxInt 12013 if !(uint64(c) >= 64) { 12014 break 12015 } 12016 v.reset(OpARM64MOVDconst) 12017 v.AuxInt = 0 12018 return true 12019 } 12020 // match: (Lsh64x64 <t> x y) 12021 // cond: 12022 // result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 12023 for { 12024 t := v.Type 12025 x := v.Args[0] 12026 y := v.Args[1] 12027 v.reset(OpARM64CSELULT) 12028 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12029 v0.AddArg(x) 12030 v0.AddArg(y) 12031 v.AddArg(v0) 12032 v1 := b.NewValue0(v.Pos, OpConst64, t) 12033 v1.AuxInt = 0 12034 v.AddArg(v1) 12035 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12036 v2.AuxInt = 64 12037 v2.AddArg(y) 12038 v.AddArg(v2) 12039 return true 12040 } 12041 } 12042 func rewriteValueARM64_OpLsh64x8(v *Value, config *Config) bool { 12043 b := v.Block 12044 _ = b 12045 // match: (Lsh64x8 <t> x y) 12046 // cond: 12047 // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 12048 for { 12049 t := v.Type 12050 x := v.Args[0] 12051 y := v.Args[1] 12052 v.reset(OpARM64CSELULT) 12053 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12054 v0.AddArg(x) 12055 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 12056 v1.AddArg(y) 12057 v0.AddArg(v1) 12058 v.AddArg(v0) 12059 v2 := b.NewValue0(v.Pos, OpConst64, t) 12060 v2.AuxInt = 0 12061 v.AddArg(v2) 12062 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12063 v3.AuxInt = 64 12064 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 12065 v4.AddArg(y) 12066 v3.AddArg(v4) 12067 v.AddArg(v3) 12068 return true 12069 } 12070 } 12071 func rewriteValueARM64_OpLsh8x16(v *Value, config *Config) bool { 12072 b := v.Block 12073 _ = b 12074 // match: (Lsh8x16 <t> x y) 12075 // cond: 12076 // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 12077 for { 12078 t := v.Type 12079 x := v.Args[0] 12080 y := v.Args[1] 12081 v.reset(OpARM64CSELULT) 12082 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12083 v0.AddArg(x) 12084 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 12085 v1.AddArg(y) 12086 v0.AddArg(v1) 12087 v.AddArg(v0) 12088 v2 := b.NewValue0(v.Pos, OpConst64, t) 12089 v2.AuxInt = 0 12090 v.AddArg(v2) 12091 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12092 v3.AuxInt = 64 12093 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 12094 v4.AddArg(y) 12095 v3.AddArg(v4) 12096 v.AddArg(v3) 12097 return true 12098 } 12099 } 12100 func rewriteValueARM64_OpLsh8x32(v *Value, config *Config) bool { 12101 b := v.Block 12102 _ = b 12103 // match: (Lsh8x32 <t> x y) 12104 // cond: 12105 // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 12106 for { 12107 t := v.Type 12108 x := v.Args[0] 12109 y := v.Args[1] 12110 v.reset(OpARM64CSELULT) 12111 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12112 v0.AddArg(x) 12113 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 12114 v1.AddArg(y) 12115 v0.AddArg(v1) 12116 v.AddArg(v0) 12117 v2 := b.NewValue0(v.Pos, OpConst64, t) 12118 v2.AuxInt = 0 12119 v.AddArg(v2) 12120 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12121 v3.AuxInt = 64 12122 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 12123 v4.AddArg(y) 12124 v3.AddArg(v4) 12125 v.AddArg(v3) 12126 return true 12127 } 12128 } 12129 func rewriteValueARM64_OpLsh8x64(v *Value, config *Config) bool { 12130 b := v.Block 12131 _ = b 12132 // match: (Lsh8x64 x (MOVDconst [c])) 12133 // cond: uint64(c) < 8 12134 // result: (SLLconst x [c]) 12135 for { 12136 x := v.Args[0] 12137 v_1 := v.Args[1] 12138 if v_1.Op != OpARM64MOVDconst { 12139 break 12140 } 12141 c := v_1.AuxInt 12142 if !(uint64(c) < 8) { 12143 break 12144 } 12145 v.reset(OpARM64SLLconst) 12146 v.AuxInt = c 12147 v.AddArg(x) 12148 return true 12149 } 12150 // match: (Lsh8x64 _ (MOVDconst [c])) 12151 // cond: uint64(c) >= 8 12152 // result: (MOVDconst [0]) 12153 for { 12154 v_1 := v.Args[1] 12155 if v_1.Op != OpARM64MOVDconst { 12156 break 12157 } 12158 c := v_1.AuxInt 12159 if !(uint64(c) >= 8) { 12160 break 12161 } 12162 v.reset(OpARM64MOVDconst) 12163 v.AuxInt = 0 12164 return true 12165 } 12166 // match: (Lsh8x64 <t> x y) 12167 // cond: 12168 // result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 12169 for { 12170 t := v.Type 12171 x := v.Args[0] 12172 y := v.Args[1] 12173 v.reset(OpARM64CSELULT) 12174 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12175 v0.AddArg(x) 12176 v0.AddArg(y) 12177 v.AddArg(v0) 12178 v1 := b.NewValue0(v.Pos, OpConst64, t) 12179 v1.AuxInt = 0 12180 v.AddArg(v1) 12181 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12182 v2.AuxInt = 64 12183 v2.AddArg(y) 12184 v.AddArg(v2) 12185 return true 12186 } 12187 } 12188 func rewriteValueARM64_OpLsh8x8(v *Value, config *Config) bool { 12189 b := v.Block 12190 _ = b 12191 // match: (Lsh8x8 <t> x y) 12192 // cond: 12193 // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 12194 for { 12195 t := v.Type 12196 x := v.Args[0] 12197 y := v.Args[1] 12198 v.reset(OpARM64CSELULT) 12199 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12200 v0.AddArg(x) 12201 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 12202 v1.AddArg(y) 12203 v0.AddArg(v1) 12204 v.AddArg(v0) 12205 v2 := b.NewValue0(v.Pos, OpConst64, t) 12206 v2.AuxInt = 0 12207 v.AddArg(v2) 12208 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12209 v3.AuxInt = 64 12210 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 12211 v4.AddArg(y) 12212 v3.AddArg(v4) 12213 v.AddArg(v3) 12214 return true 12215 } 12216 } 12217 func rewriteValueARM64_OpMod16(v *Value, config *Config) bool { 12218 b := v.Block 12219 _ = b 12220 // match: (Mod16 x y) 12221 // cond: 12222 // result: (MODW (SignExt16to32 x) (SignExt16to32 y)) 12223 for { 12224 x := v.Args[0] 12225 y := v.Args[1] 12226 v.reset(OpARM64MODW) 12227 v0 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32()) 12228 v0.AddArg(x) 12229 v.AddArg(v0) 12230 v1 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32()) 12231 v1.AddArg(y) 12232 v.AddArg(v1) 12233 return true 12234 } 12235 } 12236 func rewriteValueARM64_OpMod16u(v *Value, config *Config) bool { 12237 b := v.Block 12238 _ = b 12239 // match: (Mod16u x y) 12240 // cond: 12241 // result: (UMODW (ZeroExt16to32 x) (ZeroExt16to32 y)) 12242 for { 12243 x := v.Args[0] 12244 y := v.Args[1] 12245 v.reset(OpARM64UMODW) 12246 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 12247 v0.AddArg(x) 12248 v.AddArg(v0) 12249 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 12250 v1.AddArg(y) 12251 v.AddArg(v1) 12252 return true 12253 } 12254 } 12255 func rewriteValueARM64_OpMod32(v *Value, config *Config) bool { 12256 b := v.Block 12257 _ = b 12258 // match: (Mod32 x y) 12259 // cond: 12260 // result: (MODW x y) 12261 for { 12262 x := v.Args[0] 12263 y := v.Args[1] 12264 v.reset(OpARM64MODW) 12265 v.AddArg(x) 12266 v.AddArg(y) 12267 return true 12268 } 12269 } 12270 func rewriteValueARM64_OpMod32u(v *Value, config *Config) bool { 12271 b := v.Block 12272 _ = b 12273 // match: (Mod32u x y) 12274 // cond: 12275 // result: (UMODW x y) 12276 for { 12277 x := v.Args[0] 12278 y := v.Args[1] 12279 v.reset(OpARM64UMODW) 12280 v.AddArg(x) 12281 v.AddArg(y) 12282 return true 12283 } 12284 } 12285 func rewriteValueARM64_OpMod64(v *Value, config *Config) bool { 12286 b := v.Block 12287 _ = b 12288 // match: (Mod64 x y) 12289 // cond: 12290 // result: (MOD x y) 12291 for { 12292 x := v.Args[0] 12293 y := v.Args[1] 12294 v.reset(OpARM64MOD) 12295 v.AddArg(x) 12296 v.AddArg(y) 12297 return true 12298 } 12299 } 12300 func rewriteValueARM64_OpMod64u(v *Value, config *Config) bool { 12301 b := v.Block 12302 _ = b 12303 // match: (Mod64u x y) 12304 // cond: 12305 // result: (UMOD x y) 12306 for { 12307 x := v.Args[0] 12308 y := v.Args[1] 12309 v.reset(OpARM64UMOD) 12310 v.AddArg(x) 12311 v.AddArg(y) 12312 return true 12313 } 12314 } 12315 func rewriteValueARM64_OpMod8(v *Value, config *Config) bool { 12316 b := v.Block 12317 _ = b 12318 // match: (Mod8 x y) 12319 // cond: 12320 // result: (MODW (SignExt8to32 x) (SignExt8to32 y)) 12321 for { 12322 x := v.Args[0] 12323 y := v.Args[1] 12324 v.reset(OpARM64MODW) 12325 v0 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32()) 12326 v0.AddArg(x) 12327 v.AddArg(v0) 12328 v1 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32()) 12329 v1.AddArg(y) 12330 v.AddArg(v1) 12331 return true 12332 } 12333 } 12334 func rewriteValueARM64_OpMod8u(v *Value, config *Config) bool { 12335 b := v.Block 12336 _ = b 12337 // match: (Mod8u x y) 12338 // cond: 12339 // result: (UMODW (ZeroExt8to32 x) (ZeroExt8to32 y)) 12340 for { 12341 x := v.Args[0] 12342 y := v.Args[1] 12343 v.reset(OpARM64UMODW) 12344 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 12345 v0.AddArg(x) 12346 v.AddArg(v0) 12347 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 12348 v1.AddArg(y) 12349 v.AddArg(v1) 12350 return true 12351 } 12352 } 12353 func rewriteValueARM64_OpMove(v *Value, config *Config) bool { 12354 b := v.Block 12355 _ = b 12356 // match: (Move [s] _ _ mem) 12357 // cond: SizeAndAlign(s).Size() == 0 12358 // result: mem 12359 for { 12360 s := v.AuxInt 12361 mem := v.Args[2] 12362 if !(SizeAndAlign(s).Size() == 0) { 12363 break 12364 } 12365 v.reset(OpCopy) 12366 v.Type = mem.Type 12367 v.AddArg(mem) 12368 return true 12369 } 12370 // match: (Move [s] dst src mem) 12371 // cond: SizeAndAlign(s).Size() == 1 12372 // result: (MOVBstore dst (MOVBUload src mem) mem) 12373 for { 12374 s := v.AuxInt 12375 dst := v.Args[0] 12376 src := v.Args[1] 12377 mem := v.Args[2] 12378 if !(SizeAndAlign(s).Size() == 1) { 12379 break 12380 } 12381 v.reset(OpARM64MOVBstore) 12382 v.AddArg(dst) 12383 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, config.fe.TypeUInt8()) 12384 v0.AddArg(src) 12385 v0.AddArg(mem) 12386 v.AddArg(v0) 12387 v.AddArg(mem) 12388 return true 12389 } 12390 // match: (Move [s] dst src mem) 12391 // cond: SizeAndAlign(s).Size() == 2 12392 // result: (MOVHstore dst (MOVHUload src mem) mem) 12393 for { 12394 s := v.AuxInt 12395 dst := v.Args[0] 12396 src := v.Args[1] 12397 mem := v.Args[2] 12398 if !(SizeAndAlign(s).Size() == 2) { 12399 break 12400 } 12401 v.reset(OpARM64MOVHstore) 12402 v.AddArg(dst) 12403 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, config.fe.TypeUInt16()) 12404 v0.AddArg(src) 12405 v0.AddArg(mem) 12406 v.AddArg(v0) 12407 v.AddArg(mem) 12408 return true 12409 } 12410 // match: (Move [s] dst src mem) 12411 // cond: SizeAndAlign(s).Size() == 4 12412 // result: (MOVWstore dst (MOVWUload src mem) mem) 12413 for { 12414 s := v.AuxInt 12415 dst := v.Args[0] 12416 src := v.Args[1] 12417 mem := v.Args[2] 12418 if !(SizeAndAlign(s).Size() == 4) { 12419 break 12420 } 12421 v.reset(OpARM64MOVWstore) 12422 v.AddArg(dst) 12423 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, config.fe.TypeUInt32()) 12424 v0.AddArg(src) 12425 v0.AddArg(mem) 12426 v.AddArg(v0) 12427 v.AddArg(mem) 12428 return true 12429 } 12430 // match: (Move [s] dst src mem) 12431 // cond: SizeAndAlign(s).Size() == 8 12432 // result: (MOVDstore dst (MOVDload src mem) mem) 12433 for { 12434 s := v.AuxInt 12435 dst := v.Args[0] 12436 src := v.Args[1] 12437 mem := v.Args[2] 12438 if !(SizeAndAlign(s).Size() == 8) { 12439 break 12440 } 12441 v.reset(OpARM64MOVDstore) 12442 v.AddArg(dst) 12443 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, config.fe.TypeUInt64()) 12444 v0.AddArg(src) 12445 v0.AddArg(mem) 12446 v.AddArg(v0) 12447 v.AddArg(mem) 12448 return true 12449 } 12450 // match: (Move [s] dst src mem) 12451 // cond: SizeAndAlign(s).Size() == 3 12452 // result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem)) 12453 for { 12454 s := v.AuxInt 12455 dst := v.Args[0] 12456 src := v.Args[1] 12457 mem := v.Args[2] 12458 if !(SizeAndAlign(s).Size() == 3) { 12459 break 12460 } 12461 v.reset(OpARM64MOVBstore) 12462 v.AuxInt = 2 12463 v.AddArg(dst) 12464 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, config.fe.TypeUInt8()) 12465 v0.AuxInt = 2 12466 v0.AddArg(src) 12467 v0.AddArg(mem) 12468 v.AddArg(v0) 12469 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, TypeMem) 12470 v1.AddArg(dst) 12471 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, config.fe.TypeUInt16()) 12472 v2.AddArg(src) 12473 v2.AddArg(mem) 12474 v1.AddArg(v2) 12475 v1.AddArg(mem) 12476 v.AddArg(v1) 12477 return true 12478 } 12479 // match: (Move [s] dst src mem) 12480 // cond: SizeAndAlign(s).Size() == 5 12481 // result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 12482 for { 12483 s := v.AuxInt 12484 dst := v.Args[0] 12485 src := v.Args[1] 12486 mem := v.Args[2] 12487 if !(SizeAndAlign(s).Size() == 5) { 12488 break 12489 } 12490 v.reset(OpARM64MOVBstore) 12491 v.AuxInt = 4 12492 v.AddArg(dst) 12493 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, config.fe.TypeUInt8()) 12494 v0.AuxInt = 4 12495 v0.AddArg(src) 12496 v0.AddArg(mem) 12497 v.AddArg(v0) 12498 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, TypeMem) 12499 v1.AddArg(dst) 12500 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, config.fe.TypeUInt32()) 12501 v2.AddArg(src) 12502 v2.AddArg(mem) 12503 v1.AddArg(v2) 12504 v1.AddArg(mem) 12505 v.AddArg(v1) 12506 return true 12507 } 12508 // match: (Move [s] dst src mem) 12509 // cond: SizeAndAlign(s).Size() == 6 12510 // result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 12511 for { 12512 s := v.AuxInt 12513 dst := v.Args[0] 12514 src := v.Args[1] 12515 mem := v.Args[2] 12516 if !(SizeAndAlign(s).Size() == 6) { 12517 break 12518 } 12519 v.reset(OpARM64MOVHstore) 12520 v.AuxInt = 4 12521 v.AddArg(dst) 12522 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, config.fe.TypeUInt16()) 12523 v0.AuxInt = 4 12524 v0.AddArg(src) 12525 v0.AddArg(mem) 12526 v.AddArg(v0) 12527 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, TypeMem) 12528 v1.AddArg(dst) 12529 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, config.fe.TypeUInt32()) 12530 v2.AddArg(src) 12531 v2.AddArg(mem) 12532 v1.AddArg(v2) 12533 v1.AddArg(mem) 12534 v.AddArg(v1) 12535 return true 12536 } 12537 // match: (Move [s] dst src mem) 12538 // cond: SizeAndAlign(s).Size() == 7 12539 // result: (MOVBstore [6] dst (MOVBUload [6] src mem) (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))) 12540 for { 12541 s := v.AuxInt 12542 dst := v.Args[0] 12543 src := v.Args[1] 12544 mem := v.Args[2] 12545 if !(SizeAndAlign(s).Size() == 7) { 12546 break 12547 } 12548 v.reset(OpARM64MOVBstore) 12549 v.AuxInt = 6 12550 v.AddArg(dst) 12551 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, config.fe.TypeUInt8()) 12552 v0.AuxInt = 6 12553 v0.AddArg(src) 12554 v0.AddArg(mem) 12555 v.AddArg(v0) 12556 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, TypeMem) 12557 v1.AuxInt = 4 12558 v1.AddArg(dst) 12559 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, config.fe.TypeUInt16()) 12560 v2.AuxInt = 4 12561 v2.AddArg(src) 12562 v2.AddArg(mem) 12563 v1.AddArg(v2) 12564 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, TypeMem) 12565 v3.AddArg(dst) 12566 v4 := b.NewValue0(v.Pos, OpARM64MOVWUload, config.fe.TypeUInt32()) 12567 v4.AddArg(src) 12568 v4.AddArg(mem) 12569 v3.AddArg(v4) 12570 v3.AddArg(mem) 12571 v1.AddArg(v3) 12572 v.AddArg(v1) 12573 return true 12574 } 12575 // match: (Move [s] dst src mem) 12576 // cond: SizeAndAlign(s).Size() == 12 12577 // result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 12578 for { 12579 s := v.AuxInt 12580 dst := v.Args[0] 12581 src := v.Args[1] 12582 mem := v.Args[2] 12583 if !(SizeAndAlign(s).Size() == 12) { 12584 break 12585 } 12586 v.reset(OpARM64MOVWstore) 12587 v.AuxInt = 8 12588 v.AddArg(dst) 12589 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, config.fe.TypeUInt32()) 12590 v0.AuxInt = 8 12591 v0.AddArg(src) 12592 v0.AddArg(mem) 12593 v.AddArg(v0) 12594 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, TypeMem) 12595 v1.AddArg(dst) 12596 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, config.fe.TypeUInt64()) 12597 v2.AddArg(src) 12598 v2.AddArg(mem) 12599 v1.AddArg(v2) 12600 v1.AddArg(mem) 12601 v.AddArg(v1) 12602 return true 12603 } 12604 // match: (Move [s] dst src mem) 12605 // cond: SizeAndAlign(s).Size() == 16 12606 // result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 12607 for { 12608 s := v.AuxInt 12609 dst := v.Args[0] 12610 src := v.Args[1] 12611 mem := v.Args[2] 12612 if !(SizeAndAlign(s).Size() == 16) { 12613 break 12614 } 12615 v.reset(OpARM64MOVDstore) 12616 v.AuxInt = 8 12617 v.AddArg(dst) 12618 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, config.fe.TypeUInt64()) 12619 v0.AuxInt = 8 12620 v0.AddArg(src) 12621 v0.AddArg(mem) 12622 v.AddArg(v0) 12623 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, TypeMem) 12624 v1.AddArg(dst) 12625 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, config.fe.TypeUInt64()) 12626 v2.AddArg(src) 12627 v2.AddArg(mem) 12628 v1.AddArg(v2) 12629 v1.AddArg(mem) 12630 v.AddArg(v1) 12631 return true 12632 } 12633 // match: (Move [s] dst src mem) 12634 // cond: SizeAndAlign(s).Size() == 24 12635 // result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))) 12636 for { 12637 s := v.AuxInt 12638 dst := v.Args[0] 12639 src := v.Args[1] 12640 mem := v.Args[2] 12641 if !(SizeAndAlign(s).Size() == 24) { 12642 break 12643 } 12644 v.reset(OpARM64MOVDstore) 12645 v.AuxInt = 16 12646 v.AddArg(dst) 12647 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, config.fe.TypeUInt64()) 12648 v0.AuxInt = 16 12649 v0.AddArg(src) 12650 v0.AddArg(mem) 12651 v.AddArg(v0) 12652 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, TypeMem) 12653 v1.AuxInt = 8 12654 v1.AddArg(dst) 12655 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, config.fe.TypeUInt64()) 12656 v2.AuxInt = 8 12657 v2.AddArg(src) 12658 v2.AddArg(mem) 12659 v1.AddArg(v2) 12660 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, TypeMem) 12661 v3.AddArg(dst) 12662 v4 := b.NewValue0(v.Pos, OpARM64MOVDload, config.fe.TypeUInt64()) 12663 v4.AddArg(src) 12664 v4.AddArg(mem) 12665 v3.AddArg(v4) 12666 v3.AddArg(mem) 12667 v1.AddArg(v3) 12668 v.AddArg(v1) 12669 return true 12670 } 12671 // match: (Move [s] dst src mem) 12672 // cond: SizeAndAlign(s).Size()%8 != 0 && SizeAndAlign(s).Size() > 8 12673 // result: (Move [MakeSizeAndAlign(SizeAndAlign(s).Size()%8, 1).Int64()] (OffPtr <dst.Type> dst [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8]) (OffPtr <src.Type> src [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8]) (Move [MakeSizeAndAlign(SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8, 1).Int64()] dst src mem)) 12674 for { 12675 s := v.AuxInt 12676 dst := v.Args[0] 12677 src := v.Args[1] 12678 mem := v.Args[2] 12679 if !(SizeAndAlign(s).Size()%8 != 0 && SizeAndAlign(s).Size() > 8) { 12680 break 12681 } 12682 v.reset(OpMove) 12683 v.AuxInt = MakeSizeAndAlign(SizeAndAlign(s).Size()%8, 1).Int64() 12684 v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type) 12685 v0.AuxInt = SizeAndAlign(s).Size() - SizeAndAlign(s).Size()%8 12686 v0.AddArg(dst) 12687 v.AddArg(v0) 12688 v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type) 12689 v1.AuxInt = SizeAndAlign(s).Size() - SizeAndAlign(s).Size()%8 12690 v1.AddArg(src) 12691 v.AddArg(v1) 12692 v2 := b.NewValue0(v.Pos, OpMove, TypeMem) 12693 v2.AuxInt = MakeSizeAndAlign(SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8, 1).Int64() 12694 v2.AddArg(dst) 12695 v2.AddArg(src) 12696 v2.AddArg(mem) 12697 v.AddArg(v2) 12698 return true 12699 } 12700 // match: (Move [s] dst src mem) 12701 // cond: SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size() <= 8*128 && !config.noDuffDevice 12702 // result: (DUFFCOPY [8 * (128 - int64(SizeAndAlign(s).Size()/8))] dst src mem) 12703 for { 12704 s := v.AuxInt 12705 dst := v.Args[0] 12706 src := v.Args[1] 12707 mem := v.Args[2] 12708 if !(SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size() <= 8*128 && !config.noDuffDevice) { 12709 break 12710 } 12711 v.reset(OpARM64DUFFCOPY) 12712 v.AuxInt = 8 * (128 - int64(SizeAndAlign(s).Size()/8)) 12713 v.AddArg(dst) 12714 v.AddArg(src) 12715 v.AddArg(mem) 12716 return true 12717 } 12718 // match: (Move [s] dst src mem) 12719 // cond: SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size()%8 == 0 12720 // result: (LoweredMove dst src (ADDconst <src.Type> src [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) mem) 12721 for { 12722 s := v.AuxInt 12723 dst := v.Args[0] 12724 src := v.Args[1] 12725 mem := v.Args[2] 12726 if !(SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size()%8 == 0) { 12727 break 12728 } 12729 v.reset(OpARM64LoweredMove) 12730 v.AddArg(dst) 12731 v.AddArg(src) 12732 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, src.Type) 12733 v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config) 12734 v0.AddArg(src) 12735 v.AddArg(v0) 12736 v.AddArg(mem) 12737 return true 12738 } 12739 return false 12740 } 12741 func rewriteValueARM64_OpMul16(v *Value, config *Config) bool { 12742 b := v.Block 12743 _ = b 12744 // match: (Mul16 x y) 12745 // cond: 12746 // result: (MULW x y) 12747 for { 12748 x := v.Args[0] 12749 y := v.Args[1] 12750 v.reset(OpARM64MULW) 12751 v.AddArg(x) 12752 v.AddArg(y) 12753 return true 12754 } 12755 } 12756 func rewriteValueARM64_OpMul32(v *Value, config *Config) bool { 12757 b := v.Block 12758 _ = b 12759 // match: (Mul32 x y) 12760 // cond: 12761 // result: (MULW x y) 12762 for { 12763 x := v.Args[0] 12764 y := v.Args[1] 12765 v.reset(OpARM64MULW) 12766 v.AddArg(x) 12767 v.AddArg(y) 12768 return true 12769 } 12770 } 12771 func rewriteValueARM64_OpMul32F(v *Value, config *Config) bool { 12772 b := v.Block 12773 _ = b 12774 // match: (Mul32F x y) 12775 // cond: 12776 // result: (FMULS x y) 12777 for { 12778 x := v.Args[0] 12779 y := v.Args[1] 12780 v.reset(OpARM64FMULS) 12781 v.AddArg(x) 12782 v.AddArg(y) 12783 return true 12784 } 12785 } 12786 func rewriteValueARM64_OpMul64(v *Value, config *Config) bool { 12787 b := v.Block 12788 _ = b 12789 // match: (Mul64 x y) 12790 // cond: 12791 // result: (MUL x y) 12792 for { 12793 x := v.Args[0] 12794 y := v.Args[1] 12795 v.reset(OpARM64MUL) 12796 v.AddArg(x) 12797 v.AddArg(y) 12798 return true 12799 } 12800 } 12801 func rewriteValueARM64_OpMul64F(v *Value, config *Config) bool { 12802 b := v.Block 12803 _ = b 12804 // match: (Mul64F x y) 12805 // cond: 12806 // result: (FMULD x y) 12807 for { 12808 x := v.Args[0] 12809 y := v.Args[1] 12810 v.reset(OpARM64FMULD) 12811 v.AddArg(x) 12812 v.AddArg(y) 12813 return true 12814 } 12815 } 12816 func rewriteValueARM64_OpMul8(v *Value, config *Config) bool { 12817 b := v.Block 12818 _ = b 12819 // match: (Mul8 x y) 12820 // cond: 12821 // result: (MULW x y) 12822 for { 12823 x := v.Args[0] 12824 y := v.Args[1] 12825 v.reset(OpARM64MULW) 12826 v.AddArg(x) 12827 v.AddArg(y) 12828 return true 12829 } 12830 } 12831 func rewriteValueARM64_OpNeg16(v *Value, config *Config) bool { 12832 b := v.Block 12833 _ = b 12834 // match: (Neg16 x) 12835 // cond: 12836 // result: (NEG x) 12837 for { 12838 x := v.Args[0] 12839 v.reset(OpARM64NEG) 12840 v.AddArg(x) 12841 return true 12842 } 12843 } 12844 func rewriteValueARM64_OpNeg32(v *Value, config *Config) bool { 12845 b := v.Block 12846 _ = b 12847 // match: (Neg32 x) 12848 // cond: 12849 // result: (NEG x) 12850 for { 12851 x := v.Args[0] 12852 v.reset(OpARM64NEG) 12853 v.AddArg(x) 12854 return true 12855 } 12856 } 12857 func rewriteValueARM64_OpNeg32F(v *Value, config *Config) bool { 12858 b := v.Block 12859 _ = b 12860 // match: (Neg32F x) 12861 // cond: 12862 // result: (FNEGS x) 12863 for { 12864 x := v.Args[0] 12865 v.reset(OpARM64FNEGS) 12866 v.AddArg(x) 12867 return true 12868 } 12869 } 12870 func rewriteValueARM64_OpNeg64(v *Value, config *Config) bool { 12871 b := v.Block 12872 _ = b 12873 // match: (Neg64 x) 12874 // cond: 12875 // result: (NEG x) 12876 for { 12877 x := v.Args[0] 12878 v.reset(OpARM64NEG) 12879 v.AddArg(x) 12880 return true 12881 } 12882 } 12883 func rewriteValueARM64_OpNeg64F(v *Value, config *Config) bool { 12884 b := v.Block 12885 _ = b 12886 // match: (Neg64F x) 12887 // cond: 12888 // result: (FNEGD x) 12889 for { 12890 x := v.Args[0] 12891 v.reset(OpARM64FNEGD) 12892 v.AddArg(x) 12893 return true 12894 } 12895 } 12896 func rewriteValueARM64_OpNeg8(v *Value, config *Config) bool { 12897 b := v.Block 12898 _ = b 12899 // match: (Neg8 x) 12900 // cond: 12901 // result: (NEG x) 12902 for { 12903 x := v.Args[0] 12904 v.reset(OpARM64NEG) 12905 v.AddArg(x) 12906 return true 12907 } 12908 } 12909 func rewriteValueARM64_OpNeq16(v *Value, config *Config) bool { 12910 b := v.Block 12911 _ = b 12912 // match: (Neq16 x y) 12913 // cond: 12914 // result: (NotEqual (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 12915 for { 12916 x := v.Args[0] 12917 y := v.Args[1] 12918 v.reset(OpARM64NotEqual) 12919 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 12920 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 12921 v1.AddArg(x) 12922 v0.AddArg(v1) 12923 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32()) 12924 v2.AddArg(y) 12925 v0.AddArg(v2) 12926 v.AddArg(v0) 12927 return true 12928 } 12929 } 12930 func rewriteValueARM64_OpNeq32(v *Value, config *Config) bool { 12931 b := v.Block 12932 _ = b 12933 // match: (Neq32 x y) 12934 // cond: 12935 // result: (NotEqual (CMPW x y)) 12936 for { 12937 x := v.Args[0] 12938 y := v.Args[1] 12939 v.reset(OpARM64NotEqual) 12940 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 12941 v0.AddArg(x) 12942 v0.AddArg(y) 12943 v.AddArg(v0) 12944 return true 12945 } 12946 } 12947 func rewriteValueARM64_OpNeq32F(v *Value, config *Config) bool { 12948 b := v.Block 12949 _ = b 12950 // match: (Neq32F x y) 12951 // cond: 12952 // result: (NotEqual (FCMPS x y)) 12953 for { 12954 x := v.Args[0] 12955 y := v.Args[1] 12956 v.reset(OpARM64NotEqual) 12957 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, TypeFlags) 12958 v0.AddArg(x) 12959 v0.AddArg(y) 12960 v.AddArg(v0) 12961 return true 12962 } 12963 } 12964 func rewriteValueARM64_OpNeq64(v *Value, config *Config) bool { 12965 b := v.Block 12966 _ = b 12967 // match: (Neq64 x y) 12968 // cond: 12969 // result: (NotEqual (CMP x y)) 12970 for { 12971 x := v.Args[0] 12972 y := v.Args[1] 12973 v.reset(OpARM64NotEqual) 12974 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 12975 v0.AddArg(x) 12976 v0.AddArg(y) 12977 v.AddArg(v0) 12978 return true 12979 } 12980 } 12981 func rewriteValueARM64_OpNeq64F(v *Value, config *Config) bool { 12982 b := v.Block 12983 _ = b 12984 // match: (Neq64F x y) 12985 // cond: 12986 // result: (NotEqual (FCMPD x y)) 12987 for { 12988 x := v.Args[0] 12989 y := v.Args[1] 12990 v.reset(OpARM64NotEqual) 12991 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, TypeFlags) 12992 v0.AddArg(x) 12993 v0.AddArg(y) 12994 v.AddArg(v0) 12995 return true 12996 } 12997 } 12998 func rewriteValueARM64_OpNeq8(v *Value, config *Config) bool { 12999 b := v.Block 13000 _ = b 13001 // match: (Neq8 x y) 13002 // cond: 13003 // result: (NotEqual (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 13004 for { 13005 x := v.Args[0] 13006 y := v.Args[1] 13007 v.reset(OpARM64NotEqual) 13008 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 13009 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 13010 v1.AddArg(x) 13011 v0.AddArg(v1) 13012 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32()) 13013 v2.AddArg(y) 13014 v0.AddArg(v2) 13015 v.AddArg(v0) 13016 return true 13017 } 13018 } 13019 func rewriteValueARM64_OpNeqB(v *Value, config *Config) bool { 13020 b := v.Block 13021 _ = b 13022 // match: (NeqB x y) 13023 // cond: 13024 // result: (XOR x y) 13025 for { 13026 x := v.Args[0] 13027 y := v.Args[1] 13028 v.reset(OpARM64XOR) 13029 v.AddArg(x) 13030 v.AddArg(y) 13031 return true 13032 } 13033 } 13034 func rewriteValueARM64_OpNeqPtr(v *Value, config *Config) bool { 13035 b := v.Block 13036 _ = b 13037 // match: (NeqPtr x y) 13038 // cond: 13039 // result: (NotEqual (CMP x y)) 13040 for { 13041 x := v.Args[0] 13042 y := v.Args[1] 13043 v.reset(OpARM64NotEqual) 13044 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 13045 v0.AddArg(x) 13046 v0.AddArg(y) 13047 v.AddArg(v0) 13048 return true 13049 } 13050 } 13051 func rewriteValueARM64_OpNilCheck(v *Value, config *Config) bool { 13052 b := v.Block 13053 _ = b 13054 // match: (NilCheck ptr mem) 13055 // cond: 13056 // result: (LoweredNilCheck ptr mem) 13057 for { 13058 ptr := v.Args[0] 13059 mem := v.Args[1] 13060 v.reset(OpARM64LoweredNilCheck) 13061 v.AddArg(ptr) 13062 v.AddArg(mem) 13063 return true 13064 } 13065 } 13066 func rewriteValueARM64_OpNot(v *Value, config *Config) bool { 13067 b := v.Block 13068 _ = b 13069 // match: (Not x) 13070 // cond: 13071 // result: (XOR (MOVDconst [1]) x) 13072 for { 13073 x := v.Args[0] 13074 v.reset(OpARM64XOR) 13075 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 13076 v0.AuxInt = 1 13077 v.AddArg(v0) 13078 v.AddArg(x) 13079 return true 13080 } 13081 } 13082 func rewriteValueARM64_OpOffPtr(v *Value, config *Config) bool { 13083 b := v.Block 13084 _ = b 13085 // match: (OffPtr [off] ptr:(SP)) 13086 // cond: 13087 // result: (MOVDaddr [off] ptr) 13088 for { 13089 off := v.AuxInt 13090 ptr := v.Args[0] 13091 if ptr.Op != OpSP { 13092 break 13093 } 13094 v.reset(OpARM64MOVDaddr) 13095 v.AuxInt = off 13096 v.AddArg(ptr) 13097 return true 13098 } 13099 // match: (OffPtr [off] ptr) 13100 // cond: 13101 // result: (ADDconst [off] ptr) 13102 for { 13103 off := v.AuxInt 13104 ptr := v.Args[0] 13105 v.reset(OpARM64ADDconst) 13106 v.AuxInt = off 13107 v.AddArg(ptr) 13108 return true 13109 } 13110 } 13111 func rewriteValueARM64_OpOr16(v *Value, config *Config) bool { 13112 b := v.Block 13113 _ = b 13114 // match: (Or16 x y) 13115 // cond: 13116 // result: (OR x y) 13117 for { 13118 x := v.Args[0] 13119 y := v.Args[1] 13120 v.reset(OpARM64OR) 13121 v.AddArg(x) 13122 v.AddArg(y) 13123 return true 13124 } 13125 } 13126 func rewriteValueARM64_OpOr32(v *Value, config *Config) bool { 13127 b := v.Block 13128 _ = b 13129 // match: (Or32 x y) 13130 // cond: 13131 // result: (OR x y) 13132 for { 13133 x := v.Args[0] 13134 y := v.Args[1] 13135 v.reset(OpARM64OR) 13136 v.AddArg(x) 13137 v.AddArg(y) 13138 return true 13139 } 13140 } 13141 func rewriteValueARM64_OpOr64(v *Value, config *Config) bool { 13142 b := v.Block 13143 _ = b 13144 // match: (Or64 x y) 13145 // cond: 13146 // result: (OR x y) 13147 for { 13148 x := v.Args[0] 13149 y := v.Args[1] 13150 v.reset(OpARM64OR) 13151 v.AddArg(x) 13152 v.AddArg(y) 13153 return true 13154 } 13155 } 13156 func rewriteValueARM64_OpOr8(v *Value, config *Config) bool { 13157 b := v.Block 13158 _ = b 13159 // match: (Or8 x y) 13160 // cond: 13161 // result: (OR x y) 13162 for { 13163 x := v.Args[0] 13164 y := v.Args[1] 13165 v.reset(OpARM64OR) 13166 v.AddArg(x) 13167 v.AddArg(y) 13168 return true 13169 } 13170 } 13171 func rewriteValueARM64_OpOrB(v *Value, config *Config) bool { 13172 b := v.Block 13173 _ = b 13174 // match: (OrB x y) 13175 // cond: 13176 // result: (OR x y) 13177 for { 13178 x := v.Args[0] 13179 y := v.Args[1] 13180 v.reset(OpARM64OR) 13181 v.AddArg(x) 13182 v.AddArg(y) 13183 return true 13184 } 13185 } 13186 func rewriteValueARM64_OpRsh16Ux16(v *Value, config *Config) bool { 13187 b := v.Block 13188 _ = b 13189 // match: (Rsh16Ux16 <t> x y) 13190 // cond: 13191 // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 13192 for { 13193 t := v.Type 13194 x := v.Args[0] 13195 y := v.Args[1] 13196 v.reset(OpARM64CSELULT) 13197 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13198 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 13199 v1.AddArg(x) 13200 v0.AddArg(v1) 13201 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 13202 v2.AddArg(y) 13203 v0.AddArg(v2) 13204 v.AddArg(v0) 13205 v3 := b.NewValue0(v.Pos, OpConst64, t) 13206 v3.AuxInt = 0 13207 v.AddArg(v3) 13208 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13209 v4.AuxInt = 64 13210 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 13211 v5.AddArg(y) 13212 v4.AddArg(v5) 13213 v.AddArg(v4) 13214 return true 13215 } 13216 } 13217 func rewriteValueARM64_OpRsh16Ux32(v *Value, config *Config) bool { 13218 b := v.Block 13219 _ = b 13220 // match: (Rsh16Ux32 <t> x y) 13221 // cond: 13222 // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 13223 for { 13224 t := v.Type 13225 x := v.Args[0] 13226 y := v.Args[1] 13227 v.reset(OpARM64CSELULT) 13228 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13229 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 13230 v1.AddArg(x) 13231 v0.AddArg(v1) 13232 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 13233 v2.AddArg(y) 13234 v0.AddArg(v2) 13235 v.AddArg(v0) 13236 v3 := b.NewValue0(v.Pos, OpConst64, t) 13237 v3.AuxInt = 0 13238 v.AddArg(v3) 13239 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13240 v4.AuxInt = 64 13241 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 13242 v5.AddArg(y) 13243 v4.AddArg(v5) 13244 v.AddArg(v4) 13245 return true 13246 } 13247 } 13248 func rewriteValueARM64_OpRsh16Ux64(v *Value, config *Config) bool { 13249 b := v.Block 13250 _ = b 13251 // match: (Rsh16Ux64 x (MOVDconst [c])) 13252 // cond: uint64(c) < 16 13253 // result: (SRLconst (ZeroExt16to64 x) [c]) 13254 for { 13255 x := v.Args[0] 13256 v_1 := v.Args[1] 13257 if v_1.Op != OpARM64MOVDconst { 13258 break 13259 } 13260 c := v_1.AuxInt 13261 if !(uint64(c) < 16) { 13262 break 13263 } 13264 v.reset(OpARM64SRLconst) 13265 v.AuxInt = c 13266 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 13267 v0.AddArg(x) 13268 v.AddArg(v0) 13269 return true 13270 } 13271 // match: (Rsh16Ux64 _ (MOVDconst [c])) 13272 // cond: uint64(c) >= 16 13273 // result: (MOVDconst [0]) 13274 for { 13275 v_1 := v.Args[1] 13276 if v_1.Op != OpARM64MOVDconst { 13277 break 13278 } 13279 c := v_1.AuxInt 13280 if !(uint64(c) >= 16) { 13281 break 13282 } 13283 v.reset(OpARM64MOVDconst) 13284 v.AuxInt = 0 13285 return true 13286 } 13287 // match: (Rsh16Ux64 <t> x y) 13288 // cond: 13289 // result: (CSELULT (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 13290 for { 13291 t := v.Type 13292 x := v.Args[0] 13293 y := v.Args[1] 13294 v.reset(OpARM64CSELULT) 13295 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13296 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 13297 v1.AddArg(x) 13298 v0.AddArg(v1) 13299 v0.AddArg(y) 13300 v.AddArg(v0) 13301 v2 := b.NewValue0(v.Pos, OpConst64, t) 13302 v2.AuxInt = 0 13303 v.AddArg(v2) 13304 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13305 v3.AuxInt = 64 13306 v3.AddArg(y) 13307 v.AddArg(v3) 13308 return true 13309 } 13310 } 13311 func rewriteValueARM64_OpRsh16Ux8(v *Value, config *Config) bool { 13312 b := v.Block 13313 _ = b 13314 // match: (Rsh16Ux8 <t> x y) 13315 // cond: 13316 // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 13317 for { 13318 t := v.Type 13319 x := v.Args[0] 13320 y := v.Args[1] 13321 v.reset(OpARM64CSELULT) 13322 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13323 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 13324 v1.AddArg(x) 13325 v0.AddArg(v1) 13326 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 13327 v2.AddArg(y) 13328 v0.AddArg(v2) 13329 v.AddArg(v0) 13330 v3 := b.NewValue0(v.Pos, OpConst64, t) 13331 v3.AuxInt = 0 13332 v.AddArg(v3) 13333 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13334 v4.AuxInt = 64 13335 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 13336 v5.AddArg(y) 13337 v4.AddArg(v5) 13338 v.AddArg(v4) 13339 return true 13340 } 13341 } 13342 func rewriteValueARM64_OpRsh16x16(v *Value, config *Config) bool { 13343 b := v.Block 13344 _ = b 13345 // match: (Rsh16x16 x y) 13346 // cond: 13347 // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 13348 for { 13349 x := v.Args[0] 13350 y := v.Args[1] 13351 v.reset(OpARM64SRA) 13352 v0 := b.NewValue0(v.Pos, OpSignExt16to64, config.fe.TypeInt64()) 13353 v0.AddArg(x) 13354 v.AddArg(v0) 13355 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 13356 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 13357 v2.AddArg(y) 13358 v1.AddArg(v2) 13359 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 13360 v3.AuxInt = 63 13361 v1.AddArg(v3) 13362 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13363 v4.AuxInt = 64 13364 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 13365 v5.AddArg(y) 13366 v4.AddArg(v5) 13367 v1.AddArg(v4) 13368 v.AddArg(v1) 13369 return true 13370 } 13371 } 13372 func rewriteValueARM64_OpRsh16x32(v *Value, config *Config) bool { 13373 b := v.Block 13374 _ = b 13375 // match: (Rsh16x32 x y) 13376 // cond: 13377 // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 13378 for { 13379 x := v.Args[0] 13380 y := v.Args[1] 13381 v.reset(OpARM64SRA) 13382 v0 := b.NewValue0(v.Pos, OpSignExt16to64, config.fe.TypeInt64()) 13383 v0.AddArg(x) 13384 v.AddArg(v0) 13385 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 13386 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 13387 v2.AddArg(y) 13388 v1.AddArg(v2) 13389 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 13390 v3.AuxInt = 63 13391 v1.AddArg(v3) 13392 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13393 v4.AuxInt = 64 13394 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 13395 v5.AddArg(y) 13396 v4.AddArg(v5) 13397 v1.AddArg(v4) 13398 v.AddArg(v1) 13399 return true 13400 } 13401 } 13402 func rewriteValueARM64_OpRsh16x64(v *Value, config *Config) bool { 13403 b := v.Block 13404 _ = b 13405 // match: (Rsh16x64 x (MOVDconst [c])) 13406 // cond: uint64(c) < 16 13407 // result: (SRAconst (SignExt16to64 x) [c]) 13408 for { 13409 x := v.Args[0] 13410 v_1 := v.Args[1] 13411 if v_1.Op != OpARM64MOVDconst { 13412 break 13413 } 13414 c := v_1.AuxInt 13415 if !(uint64(c) < 16) { 13416 break 13417 } 13418 v.reset(OpARM64SRAconst) 13419 v.AuxInt = c 13420 v0 := b.NewValue0(v.Pos, OpSignExt16to64, config.fe.TypeInt64()) 13421 v0.AddArg(x) 13422 v.AddArg(v0) 13423 return true 13424 } 13425 // match: (Rsh16x64 x (MOVDconst [c])) 13426 // cond: uint64(c) >= 16 13427 // result: (SRAconst (SignExt16to64 x) [63]) 13428 for { 13429 x := v.Args[0] 13430 v_1 := v.Args[1] 13431 if v_1.Op != OpARM64MOVDconst { 13432 break 13433 } 13434 c := v_1.AuxInt 13435 if !(uint64(c) >= 16) { 13436 break 13437 } 13438 v.reset(OpARM64SRAconst) 13439 v.AuxInt = 63 13440 v0 := b.NewValue0(v.Pos, OpSignExt16to64, config.fe.TypeInt64()) 13441 v0.AddArg(x) 13442 v.AddArg(v0) 13443 return true 13444 } 13445 // match: (Rsh16x64 x y) 13446 // cond: 13447 // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 13448 for { 13449 x := v.Args[0] 13450 y := v.Args[1] 13451 v.reset(OpARM64SRA) 13452 v0 := b.NewValue0(v.Pos, OpSignExt16to64, config.fe.TypeInt64()) 13453 v0.AddArg(x) 13454 v.AddArg(v0) 13455 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 13456 v1.AddArg(y) 13457 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 13458 v2.AuxInt = 63 13459 v1.AddArg(v2) 13460 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13461 v3.AuxInt = 64 13462 v3.AddArg(y) 13463 v1.AddArg(v3) 13464 v.AddArg(v1) 13465 return true 13466 } 13467 } 13468 func rewriteValueARM64_OpRsh16x8(v *Value, config *Config) bool { 13469 b := v.Block 13470 _ = b 13471 // match: (Rsh16x8 x y) 13472 // cond: 13473 // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 13474 for { 13475 x := v.Args[0] 13476 y := v.Args[1] 13477 v.reset(OpARM64SRA) 13478 v0 := b.NewValue0(v.Pos, OpSignExt16to64, config.fe.TypeInt64()) 13479 v0.AddArg(x) 13480 v.AddArg(v0) 13481 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 13482 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 13483 v2.AddArg(y) 13484 v1.AddArg(v2) 13485 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 13486 v3.AuxInt = 63 13487 v1.AddArg(v3) 13488 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13489 v4.AuxInt = 64 13490 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 13491 v5.AddArg(y) 13492 v4.AddArg(v5) 13493 v1.AddArg(v4) 13494 v.AddArg(v1) 13495 return true 13496 } 13497 } 13498 func rewriteValueARM64_OpRsh32Ux16(v *Value, config *Config) bool { 13499 b := v.Block 13500 _ = b 13501 // match: (Rsh32Ux16 <t> x y) 13502 // cond: 13503 // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 13504 for { 13505 t := v.Type 13506 x := v.Args[0] 13507 y := v.Args[1] 13508 v.reset(OpARM64CSELULT) 13509 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13510 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 13511 v1.AddArg(x) 13512 v0.AddArg(v1) 13513 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 13514 v2.AddArg(y) 13515 v0.AddArg(v2) 13516 v.AddArg(v0) 13517 v3 := b.NewValue0(v.Pos, OpConst64, t) 13518 v3.AuxInt = 0 13519 v.AddArg(v3) 13520 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13521 v4.AuxInt = 64 13522 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 13523 v5.AddArg(y) 13524 v4.AddArg(v5) 13525 v.AddArg(v4) 13526 return true 13527 } 13528 } 13529 func rewriteValueARM64_OpRsh32Ux32(v *Value, config *Config) bool { 13530 b := v.Block 13531 _ = b 13532 // match: (Rsh32Ux32 <t> x y) 13533 // cond: 13534 // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 13535 for { 13536 t := v.Type 13537 x := v.Args[0] 13538 y := v.Args[1] 13539 v.reset(OpARM64CSELULT) 13540 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13541 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 13542 v1.AddArg(x) 13543 v0.AddArg(v1) 13544 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 13545 v2.AddArg(y) 13546 v0.AddArg(v2) 13547 v.AddArg(v0) 13548 v3 := b.NewValue0(v.Pos, OpConst64, t) 13549 v3.AuxInt = 0 13550 v.AddArg(v3) 13551 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13552 v4.AuxInt = 64 13553 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 13554 v5.AddArg(y) 13555 v4.AddArg(v5) 13556 v.AddArg(v4) 13557 return true 13558 } 13559 } 13560 func rewriteValueARM64_OpRsh32Ux64(v *Value, config *Config) bool { 13561 b := v.Block 13562 _ = b 13563 // match: (Rsh32Ux64 x (MOVDconst [c])) 13564 // cond: uint64(c) < 32 13565 // result: (SRLconst (ZeroExt32to64 x) [c]) 13566 for { 13567 x := v.Args[0] 13568 v_1 := v.Args[1] 13569 if v_1.Op != OpARM64MOVDconst { 13570 break 13571 } 13572 c := v_1.AuxInt 13573 if !(uint64(c) < 32) { 13574 break 13575 } 13576 v.reset(OpARM64SRLconst) 13577 v.AuxInt = c 13578 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 13579 v0.AddArg(x) 13580 v.AddArg(v0) 13581 return true 13582 } 13583 // match: (Rsh32Ux64 _ (MOVDconst [c])) 13584 // cond: uint64(c) >= 32 13585 // result: (MOVDconst [0]) 13586 for { 13587 v_1 := v.Args[1] 13588 if v_1.Op != OpARM64MOVDconst { 13589 break 13590 } 13591 c := v_1.AuxInt 13592 if !(uint64(c) >= 32) { 13593 break 13594 } 13595 v.reset(OpARM64MOVDconst) 13596 v.AuxInt = 0 13597 return true 13598 } 13599 // match: (Rsh32Ux64 <t> x y) 13600 // cond: 13601 // result: (CSELULT (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 13602 for { 13603 t := v.Type 13604 x := v.Args[0] 13605 y := v.Args[1] 13606 v.reset(OpARM64CSELULT) 13607 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13608 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 13609 v1.AddArg(x) 13610 v0.AddArg(v1) 13611 v0.AddArg(y) 13612 v.AddArg(v0) 13613 v2 := b.NewValue0(v.Pos, OpConst64, t) 13614 v2.AuxInt = 0 13615 v.AddArg(v2) 13616 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13617 v3.AuxInt = 64 13618 v3.AddArg(y) 13619 v.AddArg(v3) 13620 return true 13621 } 13622 } 13623 func rewriteValueARM64_OpRsh32Ux8(v *Value, config *Config) bool { 13624 b := v.Block 13625 _ = b 13626 // match: (Rsh32Ux8 <t> x y) 13627 // cond: 13628 // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 13629 for { 13630 t := v.Type 13631 x := v.Args[0] 13632 y := v.Args[1] 13633 v.reset(OpARM64CSELULT) 13634 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13635 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 13636 v1.AddArg(x) 13637 v0.AddArg(v1) 13638 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 13639 v2.AddArg(y) 13640 v0.AddArg(v2) 13641 v.AddArg(v0) 13642 v3 := b.NewValue0(v.Pos, OpConst64, t) 13643 v3.AuxInt = 0 13644 v.AddArg(v3) 13645 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13646 v4.AuxInt = 64 13647 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 13648 v5.AddArg(y) 13649 v4.AddArg(v5) 13650 v.AddArg(v4) 13651 return true 13652 } 13653 } 13654 func rewriteValueARM64_OpRsh32x16(v *Value, config *Config) bool { 13655 b := v.Block 13656 _ = b 13657 // match: (Rsh32x16 x y) 13658 // cond: 13659 // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 13660 for { 13661 x := v.Args[0] 13662 y := v.Args[1] 13663 v.reset(OpARM64SRA) 13664 v0 := b.NewValue0(v.Pos, OpSignExt32to64, config.fe.TypeInt64()) 13665 v0.AddArg(x) 13666 v.AddArg(v0) 13667 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 13668 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 13669 v2.AddArg(y) 13670 v1.AddArg(v2) 13671 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 13672 v3.AuxInt = 63 13673 v1.AddArg(v3) 13674 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13675 v4.AuxInt = 64 13676 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 13677 v5.AddArg(y) 13678 v4.AddArg(v5) 13679 v1.AddArg(v4) 13680 v.AddArg(v1) 13681 return true 13682 } 13683 } 13684 func rewriteValueARM64_OpRsh32x32(v *Value, config *Config) bool { 13685 b := v.Block 13686 _ = b 13687 // match: (Rsh32x32 x y) 13688 // cond: 13689 // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 13690 for { 13691 x := v.Args[0] 13692 y := v.Args[1] 13693 v.reset(OpARM64SRA) 13694 v0 := b.NewValue0(v.Pos, OpSignExt32to64, config.fe.TypeInt64()) 13695 v0.AddArg(x) 13696 v.AddArg(v0) 13697 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 13698 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 13699 v2.AddArg(y) 13700 v1.AddArg(v2) 13701 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 13702 v3.AuxInt = 63 13703 v1.AddArg(v3) 13704 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13705 v4.AuxInt = 64 13706 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 13707 v5.AddArg(y) 13708 v4.AddArg(v5) 13709 v1.AddArg(v4) 13710 v.AddArg(v1) 13711 return true 13712 } 13713 } 13714 func rewriteValueARM64_OpRsh32x64(v *Value, config *Config) bool { 13715 b := v.Block 13716 _ = b 13717 // match: (Rsh32x64 x (MOVDconst [c])) 13718 // cond: uint64(c) < 32 13719 // result: (SRAconst (SignExt32to64 x) [c]) 13720 for { 13721 x := v.Args[0] 13722 v_1 := v.Args[1] 13723 if v_1.Op != OpARM64MOVDconst { 13724 break 13725 } 13726 c := v_1.AuxInt 13727 if !(uint64(c) < 32) { 13728 break 13729 } 13730 v.reset(OpARM64SRAconst) 13731 v.AuxInt = c 13732 v0 := b.NewValue0(v.Pos, OpSignExt32to64, config.fe.TypeInt64()) 13733 v0.AddArg(x) 13734 v.AddArg(v0) 13735 return true 13736 } 13737 // match: (Rsh32x64 x (MOVDconst [c])) 13738 // cond: uint64(c) >= 32 13739 // result: (SRAconst (SignExt32to64 x) [63]) 13740 for { 13741 x := v.Args[0] 13742 v_1 := v.Args[1] 13743 if v_1.Op != OpARM64MOVDconst { 13744 break 13745 } 13746 c := v_1.AuxInt 13747 if !(uint64(c) >= 32) { 13748 break 13749 } 13750 v.reset(OpARM64SRAconst) 13751 v.AuxInt = 63 13752 v0 := b.NewValue0(v.Pos, OpSignExt32to64, config.fe.TypeInt64()) 13753 v0.AddArg(x) 13754 v.AddArg(v0) 13755 return true 13756 } 13757 // match: (Rsh32x64 x y) 13758 // cond: 13759 // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 13760 for { 13761 x := v.Args[0] 13762 y := v.Args[1] 13763 v.reset(OpARM64SRA) 13764 v0 := b.NewValue0(v.Pos, OpSignExt32to64, config.fe.TypeInt64()) 13765 v0.AddArg(x) 13766 v.AddArg(v0) 13767 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 13768 v1.AddArg(y) 13769 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 13770 v2.AuxInt = 63 13771 v1.AddArg(v2) 13772 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13773 v3.AuxInt = 64 13774 v3.AddArg(y) 13775 v1.AddArg(v3) 13776 v.AddArg(v1) 13777 return true 13778 } 13779 } 13780 func rewriteValueARM64_OpRsh32x8(v *Value, config *Config) bool { 13781 b := v.Block 13782 _ = b 13783 // match: (Rsh32x8 x y) 13784 // cond: 13785 // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 13786 for { 13787 x := v.Args[0] 13788 y := v.Args[1] 13789 v.reset(OpARM64SRA) 13790 v0 := b.NewValue0(v.Pos, OpSignExt32to64, config.fe.TypeInt64()) 13791 v0.AddArg(x) 13792 v.AddArg(v0) 13793 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 13794 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 13795 v2.AddArg(y) 13796 v1.AddArg(v2) 13797 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 13798 v3.AuxInt = 63 13799 v1.AddArg(v3) 13800 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13801 v4.AuxInt = 64 13802 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 13803 v5.AddArg(y) 13804 v4.AddArg(v5) 13805 v1.AddArg(v4) 13806 v.AddArg(v1) 13807 return true 13808 } 13809 } 13810 func rewriteValueARM64_OpRsh64Ux16(v *Value, config *Config) bool { 13811 b := v.Block 13812 _ = b 13813 // match: (Rsh64Ux16 <t> x y) 13814 // cond: 13815 // result: (CSELULT (SRL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 13816 for { 13817 t := v.Type 13818 x := v.Args[0] 13819 y := v.Args[1] 13820 v.reset(OpARM64CSELULT) 13821 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13822 v0.AddArg(x) 13823 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 13824 v1.AddArg(y) 13825 v0.AddArg(v1) 13826 v.AddArg(v0) 13827 v2 := b.NewValue0(v.Pos, OpConst64, t) 13828 v2.AuxInt = 0 13829 v.AddArg(v2) 13830 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13831 v3.AuxInt = 64 13832 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 13833 v4.AddArg(y) 13834 v3.AddArg(v4) 13835 v.AddArg(v3) 13836 return true 13837 } 13838 } 13839 func rewriteValueARM64_OpRsh64Ux32(v *Value, config *Config) bool { 13840 b := v.Block 13841 _ = b 13842 // match: (Rsh64Ux32 <t> x y) 13843 // cond: 13844 // result: (CSELULT (SRL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 13845 for { 13846 t := v.Type 13847 x := v.Args[0] 13848 y := v.Args[1] 13849 v.reset(OpARM64CSELULT) 13850 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13851 v0.AddArg(x) 13852 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 13853 v1.AddArg(y) 13854 v0.AddArg(v1) 13855 v.AddArg(v0) 13856 v2 := b.NewValue0(v.Pos, OpConst64, t) 13857 v2.AuxInt = 0 13858 v.AddArg(v2) 13859 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13860 v3.AuxInt = 64 13861 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 13862 v4.AddArg(y) 13863 v3.AddArg(v4) 13864 v.AddArg(v3) 13865 return true 13866 } 13867 } 13868 func rewriteValueARM64_OpRsh64Ux64(v *Value, config *Config) bool { 13869 b := v.Block 13870 _ = b 13871 // match: (Rsh64Ux64 x (MOVDconst [c])) 13872 // cond: uint64(c) < 64 13873 // result: (SRLconst x [c]) 13874 for { 13875 x := v.Args[0] 13876 v_1 := v.Args[1] 13877 if v_1.Op != OpARM64MOVDconst { 13878 break 13879 } 13880 c := v_1.AuxInt 13881 if !(uint64(c) < 64) { 13882 break 13883 } 13884 v.reset(OpARM64SRLconst) 13885 v.AuxInt = c 13886 v.AddArg(x) 13887 return true 13888 } 13889 // match: (Rsh64Ux64 _ (MOVDconst [c])) 13890 // cond: uint64(c) >= 64 13891 // result: (MOVDconst [0]) 13892 for { 13893 v_1 := v.Args[1] 13894 if v_1.Op != OpARM64MOVDconst { 13895 break 13896 } 13897 c := v_1.AuxInt 13898 if !(uint64(c) >= 64) { 13899 break 13900 } 13901 v.reset(OpARM64MOVDconst) 13902 v.AuxInt = 0 13903 return true 13904 } 13905 // match: (Rsh64Ux64 <t> x y) 13906 // cond: 13907 // result: (CSELULT (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 13908 for { 13909 t := v.Type 13910 x := v.Args[0] 13911 y := v.Args[1] 13912 v.reset(OpARM64CSELULT) 13913 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13914 v0.AddArg(x) 13915 v0.AddArg(y) 13916 v.AddArg(v0) 13917 v1 := b.NewValue0(v.Pos, OpConst64, t) 13918 v1.AuxInt = 0 13919 v.AddArg(v1) 13920 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13921 v2.AuxInt = 64 13922 v2.AddArg(y) 13923 v.AddArg(v2) 13924 return true 13925 } 13926 } 13927 func rewriteValueARM64_OpRsh64Ux8(v *Value, config *Config) bool { 13928 b := v.Block 13929 _ = b 13930 // match: (Rsh64Ux8 <t> x y) 13931 // cond: 13932 // result: (CSELULT (SRL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 13933 for { 13934 t := v.Type 13935 x := v.Args[0] 13936 y := v.Args[1] 13937 v.reset(OpARM64CSELULT) 13938 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13939 v0.AddArg(x) 13940 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 13941 v1.AddArg(y) 13942 v0.AddArg(v1) 13943 v.AddArg(v0) 13944 v2 := b.NewValue0(v.Pos, OpConst64, t) 13945 v2.AuxInt = 0 13946 v.AddArg(v2) 13947 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13948 v3.AuxInt = 64 13949 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 13950 v4.AddArg(y) 13951 v3.AddArg(v4) 13952 v.AddArg(v3) 13953 return true 13954 } 13955 } 13956 func rewriteValueARM64_OpRsh64x16(v *Value, config *Config) bool { 13957 b := v.Block 13958 _ = b 13959 // match: (Rsh64x16 x y) 13960 // cond: 13961 // result: (SRA x (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 13962 for { 13963 x := v.Args[0] 13964 y := v.Args[1] 13965 v.reset(OpARM64SRA) 13966 v.AddArg(x) 13967 v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 13968 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 13969 v1.AddArg(y) 13970 v0.AddArg(v1) 13971 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 13972 v2.AuxInt = 63 13973 v0.AddArg(v2) 13974 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13975 v3.AuxInt = 64 13976 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 13977 v4.AddArg(y) 13978 v3.AddArg(v4) 13979 v0.AddArg(v3) 13980 v.AddArg(v0) 13981 return true 13982 } 13983 } 13984 func rewriteValueARM64_OpRsh64x32(v *Value, config *Config) bool { 13985 b := v.Block 13986 _ = b 13987 // match: (Rsh64x32 x y) 13988 // cond: 13989 // result: (SRA x (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 13990 for { 13991 x := v.Args[0] 13992 y := v.Args[1] 13993 v.reset(OpARM64SRA) 13994 v.AddArg(x) 13995 v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 13996 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 13997 v1.AddArg(y) 13998 v0.AddArg(v1) 13999 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 14000 v2.AuxInt = 63 14001 v0.AddArg(v2) 14002 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14003 v3.AuxInt = 64 14004 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 14005 v4.AddArg(y) 14006 v3.AddArg(v4) 14007 v0.AddArg(v3) 14008 v.AddArg(v0) 14009 return true 14010 } 14011 } 14012 func rewriteValueARM64_OpRsh64x64(v *Value, config *Config) bool { 14013 b := v.Block 14014 _ = b 14015 // match: (Rsh64x64 x (MOVDconst [c])) 14016 // cond: uint64(c) < 64 14017 // result: (SRAconst x [c]) 14018 for { 14019 x := v.Args[0] 14020 v_1 := v.Args[1] 14021 if v_1.Op != OpARM64MOVDconst { 14022 break 14023 } 14024 c := v_1.AuxInt 14025 if !(uint64(c) < 64) { 14026 break 14027 } 14028 v.reset(OpARM64SRAconst) 14029 v.AuxInt = c 14030 v.AddArg(x) 14031 return true 14032 } 14033 // match: (Rsh64x64 x (MOVDconst [c])) 14034 // cond: uint64(c) >= 64 14035 // result: (SRAconst x [63]) 14036 for { 14037 x := v.Args[0] 14038 v_1 := v.Args[1] 14039 if v_1.Op != OpARM64MOVDconst { 14040 break 14041 } 14042 c := v_1.AuxInt 14043 if !(uint64(c) >= 64) { 14044 break 14045 } 14046 v.reset(OpARM64SRAconst) 14047 v.AuxInt = 63 14048 v.AddArg(x) 14049 return true 14050 } 14051 // match: (Rsh64x64 x y) 14052 // cond: 14053 // result: (SRA x (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 14054 for { 14055 x := v.Args[0] 14056 y := v.Args[1] 14057 v.reset(OpARM64SRA) 14058 v.AddArg(x) 14059 v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14060 v0.AddArg(y) 14061 v1 := b.NewValue0(v.Pos, OpConst64, y.Type) 14062 v1.AuxInt = 63 14063 v0.AddArg(v1) 14064 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14065 v2.AuxInt = 64 14066 v2.AddArg(y) 14067 v0.AddArg(v2) 14068 v.AddArg(v0) 14069 return true 14070 } 14071 } 14072 func rewriteValueARM64_OpRsh64x8(v *Value, config *Config) bool { 14073 b := v.Block 14074 _ = b 14075 // match: (Rsh64x8 x y) 14076 // cond: 14077 // result: (SRA x (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 14078 for { 14079 x := v.Args[0] 14080 y := v.Args[1] 14081 v.reset(OpARM64SRA) 14082 v.AddArg(x) 14083 v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14084 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 14085 v1.AddArg(y) 14086 v0.AddArg(v1) 14087 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 14088 v2.AuxInt = 63 14089 v0.AddArg(v2) 14090 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14091 v3.AuxInt = 64 14092 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 14093 v4.AddArg(y) 14094 v3.AddArg(v4) 14095 v0.AddArg(v3) 14096 v.AddArg(v0) 14097 return true 14098 } 14099 } 14100 func rewriteValueARM64_OpRsh8Ux16(v *Value, config *Config) bool { 14101 b := v.Block 14102 _ = b 14103 // match: (Rsh8Ux16 <t> x y) 14104 // cond: 14105 // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 14106 for { 14107 t := v.Type 14108 x := v.Args[0] 14109 y := v.Args[1] 14110 v.reset(OpARM64CSELULT) 14111 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14112 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 14113 v1.AddArg(x) 14114 v0.AddArg(v1) 14115 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 14116 v2.AddArg(y) 14117 v0.AddArg(v2) 14118 v.AddArg(v0) 14119 v3 := b.NewValue0(v.Pos, OpConst64, t) 14120 v3.AuxInt = 0 14121 v.AddArg(v3) 14122 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14123 v4.AuxInt = 64 14124 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 14125 v5.AddArg(y) 14126 v4.AddArg(v5) 14127 v.AddArg(v4) 14128 return true 14129 } 14130 } 14131 func rewriteValueARM64_OpRsh8Ux32(v *Value, config *Config) bool { 14132 b := v.Block 14133 _ = b 14134 // match: (Rsh8Ux32 <t> x y) 14135 // cond: 14136 // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 14137 for { 14138 t := v.Type 14139 x := v.Args[0] 14140 y := v.Args[1] 14141 v.reset(OpARM64CSELULT) 14142 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14143 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 14144 v1.AddArg(x) 14145 v0.AddArg(v1) 14146 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 14147 v2.AddArg(y) 14148 v0.AddArg(v2) 14149 v.AddArg(v0) 14150 v3 := b.NewValue0(v.Pos, OpConst64, t) 14151 v3.AuxInt = 0 14152 v.AddArg(v3) 14153 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14154 v4.AuxInt = 64 14155 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 14156 v5.AddArg(y) 14157 v4.AddArg(v5) 14158 v.AddArg(v4) 14159 return true 14160 } 14161 } 14162 func rewriteValueARM64_OpRsh8Ux64(v *Value, config *Config) bool { 14163 b := v.Block 14164 _ = b 14165 // match: (Rsh8Ux64 x (MOVDconst [c])) 14166 // cond: uint64(c) < 8 14167 // result: (SRLconst (ZeroExt8to64 x) [c]) 14168 for { 14169 x := v.Args[0] 14170 v_1 := v.Args[1] 14171 if v_1.Op != OpARM64MOVDconst { 14172 break 14173 } 14174 c := v_1.AuxInt 14175 if !(uint64(c) < 8) { 14176 break 14177 } 14178 v.reset(OpARM64SRLconst) 14179 v.AuxInt = c 14180 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 14181 v0.AddArg(x) 14182 v.AddArg(v0) 14183 return true 14184 } 14185 // match: (Rsh8Ux64 _ (MOVDconst [c])) 14186 // cond: uint64(c) >= 8 14187 // result: (MOVDconst [0]) 14188 for { 14189 v_1 := v.Args[1] 14190 if v_1.Op != OpARM64MOVDconst { 14191 break 14192 } 14193 c := v_1.AuxInt 14194 if !(uint64(c) >= 8) { 14195 break 14196 } 14197 v.reset(OpARM64MOVDconst) 14198 v.AuxInt = 0 14199 return true 14200 } 14201 // match: (Rsh8Ux64 <t> x y) 14202 // cond: 14203 // result: (CSELULT (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 14204 for { 14205 t := v.Type 14206 x := v.Args[0] 14207 y := v.Args[1] 14208 v.reset(OpARM64CSELULT) 14209 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14210 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 14211 v1.AddArg(x) 14212 v0.AddArg(v1) 14213 v0.AddArg(y) 14214 v.AddArg(v0) 14215 v2 := b.NewValue0(v.Pos, OpConst64, t) 14216 v2.AuxInt = 0 14217 v.AddArg(v2) 14218 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14219 v3.AuxInt = 64 14220 v3.AddArg(y) 14221 v.AddArg(v3) 14222 return true 14223 } 14224 } 14225 func rewriteValueARM64_OpRsh8Ux8(v *Value, config *Config) bool { 14226 b := v.Block 14227 _ = b 14228 // match: (Rsh8Ux8 <t> x y) 14229 // cond: 14230 // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 14231 for { 14232 t := v.Type 14233 x := v.Args[0] 14234 y := v.Args[1] 14235 v.reset(OpARM64CSELULT) 14236 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14237 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 14238 v1.AddArg(x) 14239 v0.AddArg(v1) 14240 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 14241 v2.AddArg(y) 14242 v0.AddArg(v2) 14243 v.AddArg(v0) 14244 v3 := b.NewValue0(v.Pos, OpConst64, t) 14245 v3.AuxInt = 0 14246 v.AddArg(v3) 14247 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14248 v4.AuxInt = 64 14249 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 14250 v5.AddArg(y) 14251 v4.AddArg(v5) 14252 v.AddArg(v4) 14253 return true 14254 } 14255 } 14256 func rewriteValueARM64_OpRsh8x16(v *Value, config *Config) bool { 14257 b := v.Block 14258 _ = b 14259 // match: (Rsh8x16 x y) 14260 // cond: 14261 // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 14262 for { 14263 x := v.Args[0] 14264 y := v.Args[1] 14265 v.reset(OpARM64SRA) 14266 v0 := b.NewValue0(v.Pos, OpSignExt8to64, config.fe.TypeInt64()) 14267 v0.AddArg(x) 14268 v.AddArg(v0) 14269 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14270 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 14271 v2.AddArg(y) 14272 v1.AddArg(v2) 14273 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 14274 v3.AuxInt = 63 14275 v1.AddArg(v3) 14276 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14277 v4.AuxInt = 64 14278 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, config.fe.TypeUInt64()) 14279 v5.AddArg(y) 14280 v4.AddArg(v5) 14281 v1.AddArg(v4) 14282 v.AddArg(v1) 14283 return true 14284 } 14285 } 14286 func rewriteValueARM64_OpRsh8x32(v *Value, config *Config) bool { 14287 b := v.Block 14288 _ = b 14289 // match: (Rsh8x32 x y) 14290 // cond: 14291 // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 14292 for { 14293 x := v.Args[0] 14294 y := v.Args[1] 14295 v.reset(OpARM64SRA) 14296 v0 := b.NewValue0(v.Pos, OpSignExt8to64, config.fe.TypeInt64()) 14297 v0.AddArg(x) 14298 v.AddArg(v0) 14299 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14300 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 14301 v2.AddArg(y) 14302 v1.AddArg(v2) 14303 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 14304 v3.AuxInt = 63 14305 v1.AddArg(v3) 14306 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14307 v4.AuxInt = 64 14308 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, config.fe.TypeUInt64()) 14309 v5.AddArg(y) 14310 v4.AddArg(v5) 14311 v1.AddArg(v4) 14312 v.AddArg(v1) 14313 return true 14314 } 14315 } 14316 func rewriteValueARM64_OpRsh8x64(v *Value, config *Config) bool { 14317 b := v.Block 14318 _ = b 14319 // match: (Rsh8x64 x (MOVDconst [c])) 14320 // cond: uint64(c) < 8 14321 // result: (SRAconst (SignExt8to64 x) [c]) 14322 for { 14323 x := v.Args[0] 14324 v_1 := v.Args[1] 14325 if v_1.Op != OpARM64MOVDconst { 14326 break 14327 } 14328 c := v_1.AuxInt 14329 if !(uint64(c) < 8) { 14330 break 14331 } 14332 v.reset(OpARM64SRAconst) 14333 v.AuxInt = c 14334 v0 := b.NewValue0(v.Pos, OpSignExt8to64, config.fe.TypeInt64()) 14335 v0.AddArg(x) 14336 v.AddArg(v0) 14337 return true 14338 } 14339 // match: (Rsh8x64 x (MOVDconst [c])) 14340 // cond: uint64(c) >= 8 14341 // result: (SRAconst (SignExt8to64 x) [63]) 14342 for { 14343 x := v.Args[0] 14344 v_1 := v.Args[1] 14345 if v_1.Op != OpARM64MOVDconst { 14346 break 14347 } 14348 c := v_1.AuxInt 14349 if !(uint64(c) >= 8) { 14350 break 14351 } 14352 v.reset(OpARM64SRAconst) 14353 v.AuxInt = 63 14354 v0 := b.NewValue0(v.Pos, OpSignExt8to64, config.fe.TypeInt64()) 14355 v0.AddArg(x) 14356 v.AddArg(v0) 14357 return true 14358 } 14359 // match: (Rsh8x64 x y) 14360 // cond: 14361 // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 14362 for { 14363 x := v.Args[0] 14364 y := v.Args[1] 14365 v.reset(OpARM64SRA) 14366 v0 := b.NewValue0(v.Pos, OpSignExt8to64, config.fe.TypeInt64()) 14367 v0.AddArg(x) 14368 v.AddArg(v0) 14369 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14370 v1.AddArg(y) 14371 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 14372 v2.AuxInt = 63 14373 v1.AddArg(v2) 14374 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14375 v3.AuxInt = 64 14376 v3.AddArg(y) 14377 v1.AddArg(v3) 14378 v.AddArg(v1) 14379 return true 14380 } 14381 } 14382 func rewriteValueARM64_OpRsh8x8(v *Value, config *Config) bool { 14383 b := v.Block 14384 _ = b 14385 // match: (Rsh8x8 x y) 14386 // cond: 14387 // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 14388 for { 14389 x := v.Args[0] 14390 y := v.Args[1] 14391 v.reset(OpARM64SRA) 14392 v0 := b.NewValue0(v.Pos, OpSignExt8to64, config.fe.TypeInt64()) 14393 v0.AddArg(x) 14394 v.AddArg(v0) 14395 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14396 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 14397 v2.AddArg(y) 14398 v1.AddArg(v2) 14399 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 14400 v3.AuxInt = 63 14401 v1.AddArg(v3) 14402 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14403 v4.AuxInt = 64 14404 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, config.fe.TypeUInt64()) 14405 v5.AddArg(y) 14406 v4.AddArg(v5) 14407 v1.AddArg(v4) 14408 v.AddArg(v1) 14409 return true 14410 } 14411 } 14412 func rewriteValueARM64_OpSignExt16to32(v *Value, config *Config) bool { 14413 b := v.Block 14414 _ = b 14415 // match: (SignExt16to32 x) 14416 // cond: 14417 // result: (MOVHreg x) 14418 for { 14419 x := v.Args[0] 14420 v.reset(OpARM64MOVHreg) 14421 v.AddArg(x) 14422 return true 14423 } 14424 } 14425 func rewriteValueARM64_OpSignExt16to64(v *Value, config *Config) bool { 14426 b := v.Block 14427 _ = b 14428 // match: (SignExt16to64 x) 14429 // cond: 14430 // result: (MOVHreg x) 14431 for { 14432 x := v.Args[0] 14433 v.reset(OpARM64MOVHreg) 14434 v.AddArg(x) 14435 return true 14436 } 14437 } 14438 func rewriteValueARM64_OpSignExt32to64(v *Value, config *Config) bool { 14439 b := v.Block 14440 _ = b 14441 // match: (SignExt32to64 x) 14442 // cond: 14443 // result: (MOVWreg x) 14444 for { 14445 x := v.Args[0] 14446 v.reset(OpARM64MOVWreg) 14447 v.AddArg(x) 14448 return true 14449 } 14450 } 14451 func rewriteValueARM64_OpSignExt8to16(v *Value, config *Config) bool { 14452 b := v.Block 14453 _ = b 14454 // match: (SignExt8to16 x) 14455 // cond: 14456 // result: (MOVBreg x) 14457 for { 14458 x := v.Args[0] 14459 v.reset(OpARM64MOVBreg) 14460 v.AddArg(x) 14461 return true 14462 } 14463 } 14464 func rewriteValueARM64_OpSignExt8to32(v *Value, config *Config) bool { 14465 b := v.Block 14466 _ = b 14467 // match: (SignExt8to32 x) 14468 // cond: 14469 // result: (MOVBreg x) 14470 for { 14471 x := v.Args[0] 14472 v.reset(OpARM64MOVBreg) 14473 v.AddArg(x) 14474 return true 14475 } 14476 } 14477 func rewriteValueARM64_OpSignExt8to64(v *Value, config *Config) bool { 14478 b := v.Block 14479 _ = b 14480 // match: (SignExt8to64 x) 14481 // cond: 14482 // result: (MOVBreg x) 14483 for { 14484 x := v.Args[0] 14485 v.reset(OpARM64MOVBreg) 14486 v.AddArg(x) 14487 return true 14488 } 14489 } 14490 func rewriteValueARM64_OpSlicemask(v *Value, config *Config) bool { 14491 b := v.Block 14492 _ = b 14493 // match: (Slicemask <t> x) 14494 // cond: 14495 // result: (SRAconst (NEG <t> x) [63]) 14496 for { 14497 t := v.Type 14498 x := v.Args[0] 14499 v.reset(OpARM64SRAconst) 14500 v.AuxInt = 63 14501 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 14502 v0.AddArg(x) 14503 v.AddArg(v0) 14504 return true 14505 } 14506 } 14507 func rewriteValueARM64_OpSqrt(v *Value, config *Config) bool { 14508 b := v.Block 14509 _ = b 14510 // match: (Sqrt x) 14511 // cond: 14512 // result: (FSQRTD x) 14513 for { 14514 x := v.Args[0] 14515 v.reset(OpARM64FSQRTD) 14516 v.AddArg(x) 14517 return true 14518 } 14519 } 14520 func rewriteValueARM64_OpStaticCall(v *Value, config *Config) bool { 14521 b := v.Block 14522 _ = b 14523 // match: (StaticCall [argwid] {target} mem) 14524 // cond: 14525 // result: (CALLstatic [argwid] {target} mem) 14526 for { 14527 argwid := v.AuxInt 14528 target := v.Aux 14529 mem := v.Args[0] 14530 v.reset(OpARM64CALLstatic) 14531 v.AuxInt = argwid 14532 v.Aux = target 14533 v.AddArg(mem) 14534 return true 14535 } 14536 } 14537 func rewriteValueARM64_OpStore(v *Value, config *Config) bool { 14538 b := v.Block 14539 _ = b 14540 // match: (Store [1] ptr val mem) 14541 // cond: 14542 // result: (MOVBstore ptr val mem) 14543 for { 14544 if v.AuxInt != 1 { 14545 break 14546 } 14547 ptr := v.Args[0] 14548 val := v.Args[1] 14549 mem := v.Args[2] 14550 v.reset(OpARM64MOVBstore) 14551 v.AddArg(ptr) 14552 v.AddArg(val) 14553 v.AddArg(mem) 14554 return true 14555 } 14556 // match: (Store [2] ptr val mem) 14557 // cond: 14558 // result: (MOVHstore ptr val mem) 14559 for { 14560 if v.AuxInt != 2 { 14561 break 14562 } 14563 ptr := v.Args[0] 14564 val := v.Args[1] 14565 mem := v.Args[2] 14566 v.reset(OpARM64MOVHstore) 14567 v.AddArg(ptr) 14568 v.AddArg(val) 14569 v.AddArg(mem) 14570 return true 14571 } 14572 // match: (Store [4] ptr val mem) 14573 // cond: !is32BitFloat(val.Type) 14574 // result: (MOVWstore ptr val mem) 14575 for { 14576 if v.AuxInt != 4 { 14577 break 14578 } 14579 ptr := v.Args[0] 14580 val := v.Args[1] 14581 mem := v.Args[2] 14582 if !(!is32BitFloat(val.Type)) { 14583 break 14584 } 14585 v.reset(OpARM64MOVWstore) 14586 v.AddArg(ptr) 14587 v.AddArg(val) 14588 v.AddArg(mem) 14589 return true 14590 } 14591 // match: (Store [8] ptr val mem) 14592 // cond: !is64BitFloat(val.Type) 14593 // result: (MOVDstore ptr val mem) 14594 for { 14595 if v.AuxInt != 8 { 14596 break 14597 } 14598 ptr := v.Args[0] 14599 val := v.Args[1] 14600 mem := v.Args[2] 14601 if !(!is64BitFloat(val.Type)) { 14602 break 14603 } 14604 v.reset(OpARM64MOVDstore) 14605 v.AddArg(ptr) 14606 v.AddArg(val) 14607 v.AddArg(mem) 14608 return true 14609 } 14610 // match: (Store [4] ptr val mem) 14611 // cond: is32BitFloat(val.Type) 14612 // result: (FMOVSstore ptr val mem) 14613 for { 14614 if v.AuxInt != 4 { 14615 break 14616 } 14617 ptr := v.Args[0] 14618 val := v.Args[1] 14619 mem := v.Args[2] 14620 if !(is32BitFloat(val.Type)) { 14621 break 14622 } 14623 v.reset(OpARM64FMOVSstore) 14624 v.AddArg(ptr) 14625 v.AddArg(val) 14626 v.AddArg(mem) 14627 return true 14628 } 14629 // match: (Store [8] ptr val mem) 14630 // cond: is64BitFloat(val.Type) 14631 // result: (FMOVDstore ptr val mem) 14632 for { 14633 if v.AuxInt != 8 { 14634 break 14635 } 14636 ptr := v.Args[0] 14637 val := v.Args[1] 14638 mem := v.Args[2] 14639 if !(is64BitFloat(val.Type)) { 14640 break 14641 } 14642 v.reset(OpARM64FMOVDstore) 14643 v.AddArg(ptr) 14644 v.AddArg(val) 14645 v.AddArg(mem) 14646 return true 14647 } 14648 return false 14649 } 14650 func rewriteValueARM64_OpSub16(v *Value, config *Config) bool { 14651 b := v.Block 14652 _ = b 14653 // match: (Sub16 x y) 14654 // cond: 14655 // result: (SUB x y) 14656 for { 14657 x := v.Args[0] 14658 y := v.Args[1] 14659 v.reset(OpARM64SUB) 14660 v.AddArg(x) 14661 v.AddArg(y) 14662 return true 14663 } 14664 } 14665 func rewriteValueARM64_OpSub32(v *Value, config *Config) bool { 14666 b := v.Block 14667 _ = b 14668 // match: (Sub32 x y) 14669 // cond: 14670 // result: (SUB x y) 14671 for { 14672 x := v.Args[0] 14673 y := v.Args[1] 14674 v.reset(OpARM64SUB) 14675 v.AddArg(x) 14676 v.AddArg(y) 14677 return true 14678 } 14679 } 14680 func rewriteValueARM64_OpSub32F(v *Value, config *Config) bool { 14681 b := v.Block 14682 _ = b 14683 // match: (Sub32F x y) 14684 // cond: 14685 // result: (FSUBS x y) 14686 for { 14687 x := v.Args[0] 14688 y := v.Args[1] 14689 v.reset(OpARM64FSUBS) 14690 v.AddArg(x) 14691 v.AddArg(y) 14692 return true 14693 } 14694 } 14695 func rewriteValueARM64_OpSub64(v *Value, config *Config) bool { 14696 b := v.Block 14697 _ = b 14698 // match: (Sub64 x y) 14699 // cond: 14700 // result: (SUB x y) 14701 for { 14702 x := v.Args[0] 14703 y := v.Args[1] 14704 v.reset(OpARM64SUB) 14705 v.AddArg(x) 14706 v.AddArg(y) 14707 return true 14708 } 14709 } 14710 func rewriteValueARM64_OpSub64F(v *Value, config *Config) bool { 14711 b := v.Block 14712 _ = b 14713 // match: (Sub64F x y) 14714 // cond: 14715 // result: (FSUBD x y) 14716 for { 14717 x := v.Args[0] 14718 y := v.Args[1] 14719 v.reset(OpARM64FSUBD) 14720 v.AddArg(x) 14721 v.AddArg(y) 14722 return true 14723 } 14724 } 14725 func rewriteValueARM64_OpSub8(v *Value, config *Config) bool { 14726 b := v.Block 14727 _ = b 14728 // match: (Sub8 x y) 14729 // cond: 14730 // result: (SUB x y) 14731 for { 14732 x := v.Args[0] 14733 y := v.Args[1] 14734 v.reset(OpARM64SUB) 14735 v.AddArg(x) 14736 v.AddArg(y) 14737 return true 14738 } 14739 } 14740 func rewriteValueARM64_OpSubPtr(v *Value, config *Config) bool { 14741 b := v.Block 14742 _ = b 14743 // match: (SubPtr x y) 14744 // cond: 14745 // result: (SUB x y) 14746 for { 14747 x := v.Args[0] 14748 y := v.Args[1] 14749 v.reset(OpARM64SUB) 14750 v.AddArg(x) 14751 v.AddArg(y) 14752 return true 14753 } 14754 } 14755 func rewriteValueARM64_OpTrunc16to8(v *Value, config *Config) bool { 14756 b := v.Block 14757 _ = b 14758 // match: (Trunc16to8 x) 14759 // cond: 14760 // result: x 14761 for { 14762 x := v.Args[0] 14763 v.reset(OpCopy) 14764 v.Type = x.Type 14765 v.AddArg(x) 14766 return true 14767 } 14768 } 14769 func rewriteValueARM64_OpTrunc32to16(v *Value, config *Config) bool { 14770 b := v.Block 14771 _ = b 14772 // match: (Trunc32to16 x) 14773 // cond: 14774 // result: x 14775 for { 14776 x := v.Args[0] 14777 v.reset(OpCopy) 14778 v.Type = x.Type 14779 v.AddArg(x) 14780 return true 14781 } 14782 } 14783 func rewriteValueARM64_OpTrunc32to8(v *Value, config *Config) bool { 14784 b := v.Block 14785 _ = b 14786 // match: (Trunc32to8 x) 14787 // cond: 14788 // result: x 14789 for { 14790 x := v.Args[0] 14791 v.reset(OpCopy) 14792 v.Type = x.Type 14793 v.AddArg(x) 14794 return true 14795 } 14796 } 14797 func rewriteValueARM64_OpTrunc64to16(v *Value, config *Config) bool { 14798 b := v.Block 14799 _ = b 14800 // match: (Trunc64to16 x) 14801 // cond: 14802 // result: x 14803 for { 14804 x := v.Args[0] 14805 v.reset(OpCopy) 14806 v.Type = x.Type 14807 v.AddArg(x) 14808 return true 14809 } 14810 } 14811 func rewriteValueARM64_OpTrunc64to32(v *Value, config *Config) bool { 14812 b := v.Block 14813 _ = b 14814 // match: (Trunc64to32 x) 14815 // cond: 14816 // result: x 14817 for { 14818 x := v.Args[0] 14819 v.reset(OpCopy) 14820 v.Type = x.Type 14821 v.AddArg(x) 14822 return true 14823 } 14824 } 14825 func rewriteValueARM64_OpTrunc64to8(v *Value, config *Config) bool { 14826 b := v.Block 14827 _ = b 14828 // match: (Trunc64to8 x) 14829 // cond: 14830 // result: x 14831 for { 14832 x := v.Args[0] 14833 v.reset(OpCopy) 14834 v.Type = x.Type 14835 v.AddArg(x) 14836 return true 14837 } 14838 } 14839 func rewriteValueARM64_OpXor16(v *Value, config *Config) bool { 14840 b := v.Block 14841 _ = b 14842 // match: (Xor16 x y) 14843 // cond: 14844 // result: (XOR x y) 14845 for { 14846 x := v.Args[0] 14847 y := v.Args[1] 14848 v.reset(OpARM64XOR) 14849 v.AddArg(x) 14850 v.AddArg(y) 14851 return true 14852 } 14853 } 14854 func rewriteValueARM64_OpXor32(v *Value, config *Config) bool { 14855 b := v.Block 14856 _ = b 14857 // match: (Xor32 x y) 14858 // cond: 14859 // result: (XOR x y) 14860 for { 14861 x := v.Args[0] 14862 y := v.Args[1] 14863 v.reset(OpARM64XOR) 14864 v.AddArg(x) 14865 v.AddArg(y) 14866 return true 14867 } 14868 } 14869 func rewriteValueARM64_OpXor64(v *Value, config *Config) bool { 14870 b := v.Block 14871 _ = b 14872 // match: (Xor64 x y) 14873 // cond: 14874 // result: (XOR x y) 14875 for { 14876 x := v.Args[0] 14877 y := v.Args[1] 14878 v.reset(OpARM64XOR) 14879 v.AddArg(x) 14880 v.AddArg(y) 14881 return true 14882 } 14883 } 14884 func rewriteValueARM64_OpXor8(v *Value, config *Config) bool { 14885 b := v.Block 14886 _ = b 14887 // match: (Xor8 x y) 14888 // cond: 14889 // result: (XOR x y) 14890 for { 14891 x := v.Args[0] 14892 y := v.Args[1] 14893 v.reset(OpARM64XOR) 14894 v.AddArg(x) 14895 v.AddArg(y) 14896 return true 14897 } 14898 } 14899 func rewriteValueARM64_OpZero(v *Value, config *Config) bool { 14900 b := v.Block 14901 _ = b 14902 // match: (Zero [s] _ mem) 14903 // cond: SizeAndAlign(s).Size() == 0 14904 // result: mem 14905 for { 14906 s := v.AuxInt 14907 mem := v.Args[1] 14908 if !(SizeAndAlign(s).Size() == 0) { 14909 break 14910 } 14911 v.reset(OpCopy) 14912 v.Type = mem.Type 14913 v.AddArg(mem) 14914 return true 14915 } 14916 // match: (Zero [s] ptr mem) 14917 // cond: SizeAndAlign(s).Size() == 1 14918 // result: (MOVBstore ptr (MOVDconst [0]) mem) 14919 for { 14920 s := v.AuxInt 14921 ptr := v.Args[0] 14922 mem := v.Args[1] 14923 if !(SizeAndAlign(s).Size() == 1) { 14924 break 14925 } 14926 v.reset(OpARM64MOVBstore) 14927 v.AddArg(ptr) 14928 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 14929 v0.AuxInt = 0 14930 v.AddArg(v0) 14931 v.AddArg(mem) 14932 return true 14933 } 14934 // match: (Zero [s] ptr mem) 14935 // cond: SizeAndAlign(s).Size() == 2 14936 // result: (MOVHstore ptr (MOVDconst [0]) mem) 14937 for { 14938 s := v.AuxInt 14939 ptr := v.Args[0] 14940 mem := v.Args[1] 14941 if !(SizeAndAlign(s).Size() == 2) { 14942 break 14943 } 14944 v.reset(OpARM64MOVHstore) 14945 v.AddArg(ptr) 14946 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 14947 v0.AuxInt = 0 14948 v.AddArg(v0) 14949 v.AddArg(mem) 14950 return true 14951 } 14952 // match: (Zero [s] ptr mem) 14953 // cond: SizeAndAlign(s).Size() == 4 14954 // result: (MOVWstore ptr (MOVDconst [0]) mem) 14955 for { 14956 s := v.AuxInt 14957 ptr := v.Args[0] 14958 mem := v.Args[1] 14959 if !(SizeAndAlign(s).Size() == 4) { 14960 break 14961 } 14962 v.reset(OpARM64MOVWstore) 14963 v.AddArg(ptr) 14964 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 14965 v0.AuxInt = 0 14966 v.AddArg(v0) 14967 v.AddArg(mem) 14968 return true 14969 } 14970 // match: (Zero [s] ptr mem) 14971 // cond: SizeAndAlign(s).Size() == 8 14972 // result: (MOVDstore ptr (MOVDconst [0]) mem) 14973 for { 14974 s := v.AuxInt 14975 ptr := v.Args[0] 14976 mem := v.Args[1] 14977 if !(SizeAndAlign(s).Size() == 8) { 14978 break 14979 } 14980 v.reset(OpARM64MOVDstore) 14981 v.AddArg(ptr) 14982 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 14983 v0.AuxInt = 0 14984 v.AddArg(v0) 14985 v.AddArg(mem) 14986 return true 14987 } 14988 // match: (Zero [s] ptr mem) 14989 // cond: SizeAndAlign(s).Size() == 3 14990 // result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)) 14991 for { 14992 s := v.AuxInt 14993 ptr := v.Args[0] 14994 mem := v.Args[1] 14995 if !(SizeAndAlign(s).Size() == 3) { 14996 break 14997 } 14998 v.reset(OpARM64MOVBstore) 14999 v.AuxInt = 2 15000 v.AddArg(ptr) 15001 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 15002 v0.AuxInt = 0 15003 v.AddArg(v0) 15004 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, TypeMem) 15005 v1.AddArg(ptr) 15006 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 15007 v2.AuxInt = 0 15008 v1.AddArg(v2) 15009 v1.AddArg(mem) 15010 v.AddArg(v1) 15011 return true 15012 } 15013 // match: (Zero [s] ptr mem) 15014 // cond: SizeAndAlign(s).Size() == 5 15015 // result: (MOVBstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 15016 for { 15017 s := v.AuxInt 15018 ptr := v.Args[0] 15019 mem := v.Args[1] 15020 if !(SizeAndAlign(s).Size() == 5) { 15021 break 15022 } 15023 v.reset(OpARM64MOVBstore) 15024 v.AuxInt = 4 15025 v.AddArg(ptr) 15026 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 15027 v0.AuxInt = 0 15028 v.AddArg(v0) 15029 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, TypeMem) 15030 v1.AddArg(ptr) 15031 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 15032 v2.AuxInt = 0 15033 v1.AddArg(v2) 15034 v1.AddArg(mem) 15035 v.AddArg(v1) 15036 return true 15037 } 15038 // match: (Zero [s] ptr mem) 15039 // cond: SizeAndAlign(s).Size() == 6 15040 // result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 15041 for { 15042 s := v.AuxInt 15043 ptr := v.Args[0] 15044 mem := v.Args[1] 15045 if !(SizeAndAlign(s).Size() == 6) { 15046 break 15047 } 15048 v.reset(OpARM64MOVHstore) 15049 v.AuxInt = 4 15050 v.AddArg(ptr) 15051 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 15052 v0.AuxInt = 0 15053 v.AddArg(v0) 15054 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, TypeMem) 15055 v1.AddArg(ptr) 15056 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 15057 v2.AuxInt = 0 15058 v1.AddArg(v2) 15059 v1.AddArg(mem) 15060 v.AddArg(v1) 15061 return true 15062 } 15063 // match: (Zero [s] ptr mem) 15064 // cond: SizeAndAlign(s).Size() == 7 15065 // result: (MOVBstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))) 15066 for { 15067 s := v.AuxInt 15068 ptr := v.Args[0] 15069 mem := v.Args[1] 15070 if !(SizeAndAlign(s).Size() == 7) { 15071 break 15072 } 15073 v.reset(OpARM64MOVBstore) 15074 v.AuxInt = 6 15075 v.AddArg(ptr) 15076 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 15077 v0.AuxInt = 0 15078 v.AddArg(v0) 15079 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, TypeMem) 15080 v1.AuxInt = 4 15081 v1.AddArg(ptr) 15082 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 15083 v2.AuxInt = 0 15084 v1.AddArg(v2) 15085 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, TypeMem) 15086 v3.AddArg(ptr) 15087 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 15088 v4.AuxInt = 0 15089 v3.AddArg(v4) 15090 v3.AddArg(mem) 15091 v1.AddArg(v3) 15092 v.AddArg(v1) 15093 return true 15094 } 15095 // match: (Zero [s] ptr mem) 15096 // cond: SizeAndAlign(s).Size() == 12 15097 // result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 15098 for { 15099 s := v.AuxInt 15100 ptr := v.Args[0] 15101 mem := v.Args[1] 15102 if !(SizeAndAlign(s).Size() == 12) { 15103 break 15104 } 15105 v.reset(OpARM64MOVWstore) 15106 v.AuxInt = 8 15107 v.AddArg(ptr) 15108 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 15109 v0.AuxInt = 0 15110 v.AddArg(v0) 15111 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, TypeMem) 15112 v1.AddArg(ptr) 15113 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 15114 v2.AuxInt = 0 15115 v1.AddArg(v2) 15116 v1.AddArg(mem) 15117 v.AddArg(v1) 15118 return true 15119 } 15120 // match: (Zero [s] ptr mem) 15121 // cond: SizeAndAlign(s).Size() == 16 15122 // result: (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 15123 for { 15124 s := v.AuxInt 15125 ptr := v.Args[0] 15126 mem := v.Args[1] 15127 if !(SizeAndAlign(s).Size() == 16) { 15128 break 15129 } 15130 v.reset(OpARM64MOVDstore) 15131 v.AuxInt = 8 15132 v.AddArg(ptr) 15133 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 15134 v0.AuxInt = 0 15135 v.AddArg(v0) 15136 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, TypeMem) 15137 v1.AddArg(ptr) 15138 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 15139 v2.AuxInt = 0 15140 v1.AddArg(v2) 15141 v1.AddArg(mem) 15142 v.AddArg(v1) 15143 return true 15144 } 15145 // match: (Zero [s] ptr mem) 15146 // cond: SizeAndAlign(s).Size() == 24 15147 // result: (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 15148 for { 15149 s := v.AuxInt 15150 ptr := v.Args[0] 15151 mem := v.Args[1] 15152 if !(SizeAndAlign(s).Size() == 24) { 15153 break 15154 } 15155 v.reset(OpARM64MOVDstore) 15156 v.AuxInt = 16 15157 v.AddArg(ptr) 15158 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 15159 v0.AuxInt = 0 15160 v.AddArg(v0) 15161 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, TypeMem) 15162 v1.AuxInt = 8 15163 v1.AddArg(ptr) 15164 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 15165 v2.AuxInt = 0 15166 v1.AddArg(v2) 15167 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, TypeMem) 15168 v3.AddArg(ptr) 15169 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) 15170 v4.AuxInt = 0 15171 v3.AddArg(v4) 15172 v3.AddArg(mem) 15173 v1.AddArg(v3) 15174 v.AddArg(v1) 15175 return true 15176 } 15177 // match: (Zero [s] ptr mem) 15178 // cond: SizeAndAlign(s).Size()%8 != 0 && SizeAndAlign(s).Size() > 8 15179 // result: (Zero [MakeSizeAndAlign(SizeAndAlign(s).Size()%8, 1).Int64()] (OffPtr <ptr.Type> ptr [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8]) (Zero [MakeSizeAndAlign(SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8, 1).Int64()] ptr mem)) 15180 for { 15181 s := v.AuxInt 15182 ptr := v.Args[0] 15183 mem := v.Args[1] 15184 if !(SizeAndAlign(s).Size()%8 != 0 && SizeAndAlign(s).Size() > 8) { 15185 break 15186 } 15187 v.reset(OpZero) 15188 v.AuxInt = MakeSizeAndAlign(SizeAndAlign(s).Size()%8, 1).Int64() 15189 v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type) 15190 v0.AuxInt = SizeAndAlign(s).Size() - SizeAndAlign(s).Size()%8 15191 v0.AddArg(ptr) 15192 v.AddArg(v0) 15193 v1 := b.NewValue0(v.Pos, OpZero, TypeMem) 15194 v1.AuxInt = MakeSizeAndAlign(SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8, 1).Int64() 15195 v1.AddArg(ptr) 15196 v1.AddArg(mem) 15197 v.AddArg(v1) 15198 return true 15199 } 15200 // match: (Zero [s] ptr mem) 15201 // cond: SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size() <= 8*128 && !config.noDuffDevice 15202 // result: (DUFFZERO [4 * (128 - int64(SizeAndAlign(s).Size()/8))] ptr mem) 15203 for { 15204 s := v.AuxInt 15205 ptr := v.Args[0] 15206 mem := v.Args[1] 15207 if !(SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size() <= 8*128 && !config.noDuffDevice) { 15208 break 15209 } 15210 v.reset(OpARM64DUFFZERO) 15211 v.AuxInt = 4 * (128 - int64(SizeAndAlign(s).Size()/8)) 15212 v.AddArg(ptr) 15213 v.AddArg(mem) 15214 return true 15215 } 15216 // match: (Zero [s] ptr mem) 15217 // cond: SizeAndAlign(s).Size()%8 == 0 && (SizeAndAlign(s).Size() > 8*128 || config.noDuffDevice) 15218 // result: (LoweredZero ptr (ADDconst <ptr.Type> [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)] ptr) mem) 15219 for { 15220 s := v.AuxInt 15221 ptr := v.Args[0] 15222 mem := v.Args[1] 15223 if !(SizeAndAlign(s).Size()%8 == 0 && (SizeAndAlign(s).Size() > 8*128 || config.noDuffDevice)) { 15224 break 15225 } 15226 v.reset(OpARM64LoweredZero) 15227 v.AddArg(ptr) 15228 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, ptr.Type) 15229 v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config) 15230 v0.AddArg(ptr) 15231 v.AddArg(v0) 15232 v.AddArg(mem) 15233 return true 15234 } 15235 return false 15236 } 15237 func rewriteValueARM64_OpZeroExt16to32(v *Value, config *Config) bool { 15238 b := v.Block 15239 _ = b 15240 // match: (ZeroExt16to32 x) 15241 // cond: 15242 // result: (MOVHUreg x) 15243 for { 15244 x := v.Args[0] 15245 v.reset(OpARM64MOVHUreg) 15246 v.AddArg(x) 15247 return true 15248 } 15249 } 15250 func rewriteValueARM64_OpZeroExt16to64(v *Value, config *Config) bool { 15251 b := v.Block 15252 _ = b 15253 // match: (ZeroExt16to64 x) 15254 // cond: 15255 // result: (MOVHUreg x) 15256 for { 15257 x := v.Args[0] 15258 v.reset(OpARM64MOVHUreg) 15259 v.AddArg(x) 15260 return true 15261 } 15262 } 15263 func rewriteValueARM64_OpZeroExt32to64(v *Value, config *Config) bool { 15264 b := v.Block 15265 _ = b 15266 // match: (ZeroExt32to64 x) 15267 // cond: 15268 // result: (MOVWUreg x) 15269 for { 15270 x := v.Args[0] 15271 v.reset(OpARM64MOVWUreg) 15272 v.AddArg(x) 15273 return true 15274 } 15275 } 15276 func rewriteValueARM64_OpZeroExt8to16(v *Value, config *Config) bool { 15277 b := v.Block 15278 _ = b 15279 // match: (ZeroExt8to16 x) 15280 // cond: 15281 // result: (MOVBUreg x) 15282 for { 15283 x := v.Args[0] 15284 v.reset(OpARM64MOVBUreg) 15285 v.AddArg(x) 15286 return true 15287 } 15288 } 15289 func rewriteValueARM64_OpZeroExt8to32(v *Value, config *Config) bool { 15290 b := v.Block 15291 _ = b 15292 // match: (ZeroExt8to32 x) 15293 // cond: 15294 // result: (MOVBUreg x) 15295 for { 15296 x := v.Args[0] 15297 v.reset(OpARM64MOVBUreg) 15298 v.AddArg(x) 15299 return true 15300 } 15301 } 15302 func rewriteValueARM64_OpZeroExt8to64(v *Value, config *Config) bool { 15303 b := v.Block 15304 _ = b 15305 // match: (ZeroExt8to64 x) 15306 // cond: 15307 // result: (MOVBUreg x) 15308 for { 15309 x := v.Args[0] 15310 v.reset(OpARM64MOVBUreg) 15311 v.AddArg(x) 15312 return true 15313 } 15314 } 15315 func rewriteBlockARM64(b *Block, config *Config) bool { 15316 switch b.Kind { 15317 case BlockARM64EQ: 15318 // match: (EQ (CMPconst [0] x) yes no) 15319 // cond: 15320 // result: (Z x yes no) 15321 for { 15322 v := b.Control 15323 if v.Op != OpARM64CMPconst { 15324 break 15325 } 15326 if v.AuxInt != 0 { 15327 break 15328 } 15329 x := v.Args[0] 15330 yes := b.Succs[0] 15331 no := b.Succs[1] 15332 b.Kind = BlockARM64Z 15333 b.SetControl(x) 15334 _ = yes 15335 _ = no 15336 return true 15337 } 15338 // match: (EQ (CMPWconst [0] x) yes no) 15339 // cond: 15340 // result: (ZW x yes no) 15341 for { 15342 v := b.Control 15343 if v.Op != OpARM64CMPWconst { 15344 break 15345 } 15346 if v.AuxInt != 0 { 15347 break 15348 } 15349 x := v.Args[0] 15350 yes := b.Succs[0] 15351 no := b.Succs[1] 15352 b.Kind = BlockARM64ZW 15353 b.SetControl(x) 15354 _ = yes 15355 _ = no 15356 return true 15357 } 15358 // match: (EQ (FlagEQ) yes no) 15359 // cond: 15360 // result: (First nil yes no) 15361 for { 15362 v := b.Control 15363 if v.Op != OpARM64FlagEQ { 15364 break 15365 } 15366 yes := b.Succs[0] 15367 no := b.Succs[1] 15368 b.Kind = BlockFirst 15369 b.SetControl(nil) 15370 _ = yes 15371 _ = no 15372 return true 15373 } 15374 // match: (EQ (FlagLT_ULT) yes no) 15375 // cond: 15376 // result: (First nil no yes) 15377 for { 15378 v := b.Control 15379 if v.Op != OpARM64FlagLT_ULT { 15380 break 15381 } 15382 yes := b.Succs[0] 15383 no := b.Succs[1] 15384 b.Kind = BlockFirst 15385 b.SetControl(nil) 15386 b.swapSuccessors() 15387 _ = no 15388 _ = yes 15389 return true 15390 } 15391 // match: (EQ (FlagLT_UGT) yes no) 15392 // cond: 15393 // result: (First nil no yes) 15394 for { 15395 v := b.Control 15396 if v.Op != OpARM64FlagLT_UGT { 15397 break 15398 } 15399 yes := b.Succs[0] 15400 no := b.Succs[1] 15401 b.Kind = BlockFirst 15402 b.SetControl(nil) 15403 b.swapSuccessors() 15404 _ = no 15405 _ = yes 15406 return true 15407 } 15408 // match: (EQ (FlagGT_ULT) yes no) 15409 // cond: 15410 // result: (First nil no yes) 15411 for { 15412 v := b.Control 15413 if v.Op != OpARM64FlagGT_ULT { 15414 break 15415 } 15416 yes := b.Succs[0] 15417 no := b.Succs[1] 15418 b.Kind = BlockFirst 15419 b.SetControl(nil) 15420 b.swapSuccessors() 15421 _ = no 15422 _ = yes 15423 return true 15424 } 15425 // match: (EQ (FlagGT_UGT) yes no) 15426 // cond: 15427 // result: (First nil no yes) 15428 for { 15429 v := b.Control 15430 if v.Op != OpARM64FlagGT_UGT { 15431 break 15432 } 15433 yes := b.Succs[0] 15434 no := b.Succs[1] 15435 b.Kind = BlockFirst 15436 b.SetControl(nil) 15437 b.swapSuccessors() 15438 _ = no 15439 _ = yes 15440 return true 15441 } 15442 // match: (EQ (InvertFlags cmp) yes no) 15443 // cond: 15444 // result: (EQ cmp yes no) 15445 for { 15446 v := b.Control 15447 if v.Op != OpARM64InvertFlags { 15448 break 15449 } 15450 cmp := v.Args[0] 15451 yes := b.Succs[0] 15452 no := b.Succs[1] 15453 b.Kind = BlockARM64EQ 15454 b.SetControl(cmp) 15455 _ = yes 15456 _ = no 15457 return true 15458 } 15459 case BlockARM64GE: 15460 // match: (GE (FlagEQ) yes no) 15461 // cond: 15462 // result: (First nil yes no) 15463 for { 15464 v := b.Control 15465 if v.Op != OpARM64FlagEQ { 15466 break 15467 } 15468 yes := b.Succs[0] 15469 no := b.Succs[1] 15470 b.Kind = BlockFirst 15471 b.SetControl(nil) 15472 _ = yes 15473 _ = no 15474 return true 15475 } 15476 // match: (GE (FlagLT_ULT) yes no) 15477 // cond: 15478 // result: (First nil no yes) 15479 for { 15480 v := b.Control 15481 if v.Op != OpARM64FlagLT_ULT { 15482 break 15483 } 15484 yes := b.Succs[0] 15485 no := b.Succs[1] 15486 b.Kind = BlockFirst 15487 b.SetControl(nil) 15488 b.swapSuccessors() 15489 _ = no 15490 _ = yes 15491 return true 15492 } 15493 // match: (GE (FlagLT_UGT) yes no) 15494 // cond: 15495 // result: (First nil no yes) 15496 for { 15497 v := b.Control 15498 if v.Op != OpARM64FlagLT_UGT { 15499 break 15500 } 15501 yes := b.Succs[0] 15502 no := b.Succs[1] 15503 b.Kind = BlockFirst 15504 b.SetControl(nil) 15505 b.swapSuccessors() 15506 _ = no 15507 _ = yes 15508 return true 15509 } 15510 // match: (GE (FlagGT_ULT) yes no) 15511 // cond: 15512 // result: (First nil yes no) 15513 for { 15514 v := b.Control 15515 if v.Op != OpARM64FlagGT_ULT { 15516 break 15517 } 15518 yes := b.Succs[0] 15519 no := b.Succs[1] 15520 b.Kind = BlockFirst 15521 b.SetControl(nil) 15522 _ = yes 15523 _ = no 15524 return true 15525 } 15526 // match: (GE (FlagGT_UGT) yes no) 15527 // cond: 15528 // result: (First nil yes no) 15529 for { 15530 v := b.Control 15531 if v.Op != OpARM64FlagGT_UGT { 15532 break 15533 } 15534 yes := b.Succs[0] 15535 no := b.Succs[1] 15536 b.Kind = BlockFirst 15537 b.SetControl(nil) 15538 _ = yes 15539 _ = no 15540 return true 15541 } 15542 // match: (GE (InvertFlags cmp) yes no) 15543 // cond: 15544 // result: (LE cmp yes no) 15545 for { 15546 v := b.Control 15547 if v.Op != OpARM64InvertFlags { 15548 break 15549 } 15550 cmp := v.Args[0] 15551 yes := b.Succs[0] 15552 no := b.Succs[1] 15553 b.Kind = BlockARM64LE 15554 b.SetControl(cmp) 15555 _ = yes 15556 _ = no 15557 return true 15558 } 15559 case BlockARM64GT: 15560 // match: (GT (FlagEQ) yes no) 15561 // cond: 15562 // result: (First nil no yes) 15563 for { 15564 v := b.Control 15565 if v.Op != OpARM64FlagEQ { 15566 break 15567 } 15568 yes := b.Succs[0] 15569 no := b.Succs[1] 15570 b.Kind = BlockFirst 15571 b.SetControl(nil) 15572 b.swapSuccessors() 15573 _ = no 15574 _ = yes 15575 return true 15576 } 15577 // match: (GT (FlagLT_ULT) yes no) 15578 // cond: 15579 // result: (First nil no yes) 15580 for { 15581 v := b.Control 15582 if v.Op != OpARM64FlagLT_ULT { 15583 break 15584 } 15585 yes := b.Succs[0] 15586 no := b.Succs[1] 15587 b.Kind = BlockFirst 15588 b.SetControl(nil) 15589 b.swapSuccessors() 15590 _ = no 15591 _ = yes 15592 return true 15593 } 15594 // match: (GT (FlagLT_UGT) yes no) 15595 // cond: 15596 // result: (First nil no yes) 15597 for { 15598 v := b.Control 15599 if v.Op != OpARM64FlagLT_UGT { 15600 break 15601 } 15602 yes := b.Succs[0] 15603 no := b.Succs[1] 15604 b.Kind = BlockFirst 15605 b.SetControl(nil) 15606 b.swapSuccessors() 15607 _ = no 15608 _ = yes 15609 return true 15610 } 15611 // match: (GT (FlagGT_ULT) yes no) 15612 // cond: 15613 // result: (First nil yes no) 15614 for { 15615 v := b.Control 15616 if v.Op != OpARM64FlagGT_ULT { 15617 break 15618 } 15619 yes := b.Succs[0] 15620 no := b.Succs[1] 15621 b.Kind = BlockFirst 15622 b.SetControl(nil) 15623 _ = yes 15624 _ = no 15625 return true 15626 } 15627 // match: (GT (FlagGT_UGT) yes no) 15628 // cond: 15629 // result: (First nil yes no) 15630 for { 15631 v := b.Control 15632 if v.Op != OpARM64FlagGT_UGT { 15633 break 15634 } 15635 yes := b.Succs[0] 15636 no := b.Succs[1] 15637 b.Kind = BlockFirst 15638 b.SetControl(nil) 15639 _ = yes 15640 _ = no 15641 return true 15642 } 15643 // match: (GT (InvertFlags cmp) yes no) 15644 // cond: 15645 // result: (LT cmp yes no) 15646 for { 15647 v := b.Control 15648 if v.Op != OpARM64InvertFlags { 15649 break 15650 } 15651 cmp := v.Args[0] 15652 yes := b.Succs[0] 15653 no := b.Succs[1] 15654 b.Kind = BlockARM64LT 15655 b.SetControl(cmp) 15656 _ = yes 15657 _ = no 15658 return true 15659 } 15660 case BlockIf: 15661 // match: (If (Equal cc) yes no) 15662 // cond: 15663 // result: (EQ cc yes no) 15664 for { 15665 v := b.Control 15666 if v.Op != OpARM64Equal { 15667 break 15668 } 15669 cc := v.Args[0] 15670 yes := b.Succs[0] 15671 no := b.Succs[1] 15672 b.Kind = BlockARM64EQ 15673 b.SetControl(cc) 15674 _ = yes 15675 _ = no 15676 return true 15677 } 15678 // match: (If (NotEqual cc) yes no) 15679 // cond: 15680 // result: (NE cc yes no) 15681 for { 15682 v := b.Control 15683 if v.Op != OpARM64NotEqual { 15684 break 15685 } 15686 cc := v.Args[0] 15687 yes := b.Succs[0] 15688 no := b.Succs[1] 15689 b.Kind = BlockARM64NE 15690 b.SetControl(cc) 15691 _ = yes 15692 _ = no 15693 return true 15694 } 15695 // match: (If (LessThan cc) yes no) 15696 // cond: 15697 // result: (LT cc yes no) 15698 for { 15699 v := b.Control 15700 if v.Op != OpARM64LessThan { 15701 break 15702 } 15703 cc := v.Args[0] 15704 yes := b.Succs[0] 15705 no := b.Succs[1] 15706 b.Kind = BlockARM64LT 15707 b.SetControl(cc) 15708 _ = yes 15709 _ = no 15710 return true 15711 } 15712 // match: (If (LessThanU cc) yes no) 15713 // cond: 15714 // result: (ULT cc yes no) 15715 for { 15716 v := b.Control 15717 if v.Op != OpARM64LessThanU { 15718 break 15719 } 15720 cc := v.Args[0] 15721 yes := b.Succs[0] 15722 no := b.Succs[1] 15723 b.Kind = BlockARM64ULT 15724 b.SetControl(cc) 15725 _ = yes 15726 _ = no 15727 return true 15728 } 15729 // match: (If (LessEqual cc) yes no) 15730 // cond: 15731 // result: (LE cc yes no) 15732 for { 15733 v := b.Control 15734 if v.Op != OpARM64LessEqual { 15735 break 15736 } 15737 cc := v.Args[0] 15738 yes := b.Succs[0] 15739 no := b.Succs[1] 15740 b.Kind = BlockARM64LE 15741 b.SetControl(cc) 15742 _ = yes 15743 _ = no 15744 return true 15745 } 15746 // match: (If (LessEqualU cc) yes no) 15747 // cond: 15748 // result: (ULE cc yes no) 15749 for { 15750 v := b.Control 15751 if v.Op != OpARM64LessEqualU { 15752 break 15753 } 15754 cc := v.Args[0] 15755 yes := b.Succs[0] 15756 no := b.Succs[1] 15757 b.Kind = BlockARM64ULE 15758 b.SetControl(cc) 15759 _ = yes 15760 _ = no 15761 return true 15762 } 15763 // match: (If (GreaterThan cc) yes no) 15764 // cond: 15765 // result: (GT cc yes no) 15766 for { 15767 v := b.Control 15768 if v.Op != OpARM64GreaterThan { 15769 break 15770 } 15771 cc := v.Args[0] 15772 yes := b.Succs[0] 15773 no := b.Succs[1] 15774 b.Kind = BlockARM64GT 15775 b.SetControl(cc) 15776 _ = yes 15777 _ = no 15778 return true 15779 } 15780 // match: (If (GreaterThanU cc) yes no) 15781 // cond: 15782 // result: (UGT cc yes no) 15783 for { 15784 v := b.Control 15785 if v.Op != OpARM64GreaterThanU { 15786 break 15787 } 15788 cc := v.Args[0] 15789 yes := b.Succs[0] 15790 no := b.Succs[1] 15791 b.Kind = BlockARM64UGT 15792 b.SetControl(cc) 15793 _ = yes 15794 _ = no 15795 return true 15796 } 15797 // match: (If (GreaterEqual cc) yes no) 15798 // cond: 15799 // result: (GE cc yes no) 15800 for { 15801 v := b.Control 15802 if v.Op != OpARM64GreaterEqual { 15803 break 15804 } 15805 cc := v.Args[0] 15806 yes := b.Succs[0] 15807 no := b.Succs[1] 15808 b.Kind = BlockARM64GE 15809 b.SetControl(cc) 15810 _ = yes 15811 _ = no 15812 return true 15813 } 15814 // match: (If (GreaterEqualU cc) yes no) 15815 // cond: 15816 // result: (UGE cc yes no) 15817 for { 15818 v := b.Control 15819 if v.Op != OpARM64GreaterEqualU { 15820 break 15821 } 15822 cc := v.Args[0] 15823 yes := b.Succs[0] 15824 no := b.Succs[1] 15825 b.Kind = BlockARM64UGE 15826 b.SetControl(cc) 15827 _ = yes 15828 _ = no 15829 return true 15830 } 15831 // match: (If cond yes no) 15832 // cond: 15833 // result: (NZ cond yes no) 15834 for { 15835 v := b.Control 15836 _ = v 15837 cond := b.Control 15838 yes := b.Succs[0] 15839 no := b.Succs[1] 15840 b.Kind = BlockARM64NZ 15841 b.SetControl(cond) 15842 _ = yes 15843 _ = no 15844 return true 15845 } 15846 case BlockARM64LE: 15847 // match: (LE (FlagEQ) yes no) 15848 // cond: 15849 // result: (First nil yes no) 15850 for { 15851 v := b.Control 15852 if v.Op != OpARM64FlagEQ { 15853 break 15854 } 15855 yes := b.Succs[0] 15856 no := b.Succs[1] 15857 b.Kind = BlockFirst 15858 b.SetControl(nil) 15859 _ = yes 15860 _ = no 15861 return true 15862 } 15863 // match: (LE (FlagLT_ULT) yes no) 15864 // cond: 15865 // result: (First nil yes no) 15866 for { 15867 v := b.Control 15868 if v.Op != OpARM64FlagLT_ULT { 15869 break 15870 } 15871 yes := b.Succs[0] 15872 no := b.Succs[1] 15873 b.Kind = BlockFirst 15874 b.SetControl(nil) 15875 _ = yes 15876 _ = no 15877 return true 15878 } 15879 // match: (LE (FlagLT_UGT) yes no) 15880 // cond: 15881 // result: (First nil yes no) 15882 for { 15883 v := b.Control 15884 if v.Op != OpARM64FlagLT_UGT { 15885 break 15886 } 15887 yes := b.Succs[0] 15888 no := b.Succs[1] 15889 b.Kind = BlockFirst 15890 b.SetControl(nil) 15891 _ = yes 15892 _ = no 15893 return true 15894 } 15895 // match: (LE (FlagGT_ULT) yes no) 15896 // cond: 15897 // result: (First nil no yes) 15898 for { 15899 v := b.Control 15900 if v.Op != OpARM64FlagGT_ULT { 15901 break 15902 } 15903 yes := b.Succs[0] 15904 no := b.Succs[1] 15905 b.Kind = BlockFirst 15906 b.SetControl(nil) 15907 b.swapSuccessors() 15908 _ = no 15909 _ = yes 15910 return true 15911 } 15912 // match: (LE (FlagGT_UGT) yes no) 15913 // cond: 15914 // result: (First nil no yes) 15915 for { 15916 v := b.Control 15917 if v.Op != OpARM64FlagGT_UGT { 15918 break 15919 } 15920 yes := b.Succs[0] 15921 no := b.Succs[1] 15922 b.Kind = BlockFirst 15923 b.SetControl(nil) 15924 b.swapSuccessors() 15925 _ = no 15926 _ = yes 15927 return true 15928 } 15929 // match: (LE (InvertFlags cmp) yes no) 15930 // cond: 15931 // result: (GE cmp yes no) 15932 for { 15933 v := b.Control 15934 if v.Op != OpARM64InvertFlags { 15935 break 15936 } 15937 cmp := v.Args[0] 15938 yes := b.Succs[0] 15939 no := b.Succs[1] 15940 b.Kind = BlockARM64GE 15941 b.SetControl(cmp) 15942 _ = yes 15943 _ = no 15944 return true 15945 } 15946 case BlockARM64LT: 15947 // match: (LT (FlagEQ) yes no) 15948 // cond: 15949 // result: (First nil no yes) 15950 for { 15951 v := b.Control 15952 if v.Op != OpARM64FlagEQ { 15953 break 15954 } 15955 yes := b.Succs[0] 15956 no := b.Succs[1] 15957 b.Kind = BlockFirst 15958 b.SetControl(nil) 15959 b.swapSuccessors() 15960 _ = no 15961 _ = yes 15962 return true 15963 } 15964 // match: (LT (FlagLT_ULT) yes no) 15965 // cond: 15966 // result: (First nil yes no) 15967 for { 15968 v := b.Control 15969 if v.Op != OpARM64FlagLT_ULT { 15970 break 15971 } 15972 yes := b.Succs[0] 15973 no := b.Succs[1] 15974 b.Kind = BlockFirst 15975 b.SetControl(nil) 15976 _ = yes 15977 _ = no 15978 return true 15979 } 15980 // match: (LT (FlagLT_UGT) yes no) 15981 // cond: 15982 // result: (First nil yes no) 15983 for { 15984 v := b.Control 15985 if v.Op != OpARM64FlagLT_UGT { 15986 break 15987 } 15988 yes := b.Succs[0] 15989 no := b.Succs[1] 15990 b.Kind = BlockFirst 15991 b.SetControl(nil) 15992 _ = yes 15993 _ = no 15994 return true 15995 } 15996 // match: (LT (FlagGT_ULT) yes no) 15997 // cond: 15998 // result: (First nil no yes) 15999 for { 16000 v := b.Control 16001 if v.Op != OpARM64FlagGT_ULT { 16002 break 16003 } 16004 yes := b.Succs[0] 16005 no := b.Succs[1] 16006 b.Kind = BlockFirst 16007 b.SetControl(nil) 16008 b.swapSuccessors() 16009 _ = no 16010 _ = yes 16011 return true 16012 } 16013 // match: (LT (FlagGT_UGT) yes no) 16014 // cond: 16015 // result: (First nil no yes) 16016 for { 16017 v := b.Control 16018 if v.Op != OpARM64FlagGT_UGT { 16019 break 16020 } 16021 yes := b.Succs[0] 16022 no := b.Succs[1] 16023 b.Kind = BlockFirst 16024 b.SetControl(nil) 16025 b.swapSuccessors() 16026 _ = no 16027 _ = yes 16028 return true 16029 } 16030 // match: (LT (InvertFlags cmp) yes no) 16031 // cond: 16032 // result: (GT cmp yes no) 16033 for { 16034 v := b.Control 16035 if v.Op != OpARM64InvertFlags { 16036 break 16037 } 16038 cmp := v.Args[0] 16039 yes := b.Succs[0] 16040 no := b.Succs[1] 16041 b.Kind = BlockARM64GT 16042 b.SetControl(cmp) 16043 _ = yes 16044 _ = no 16045 return true 16046 } 16047 case BlockARM64NE: 16048 // match: (NE (CMPconst [0] x) yes no) 16049 // cond: 16050 // result: (NZ x yes no) 16051 for { 16052 v := b.Control 16053 if v.Op != OpARM64CMPconst { 16054 break 16055 } 16056 if v.AuxInt != 0 { 16057 break 16058 } 16059 x := v.Args[0] 16060 yes := b.Succs[0] 16061 no := b.Succs[1] 16062 b.Kind = BlockARM64NZ 16063 b.SetControl(x) 16064 _ = yes 16065 _ = no 16066 return true 16067 } 16068 // match: (NE (CMPWconst [0] x) yes no) 16069 // cond: 16070 // result: (NZW x yes no) 16071 for { 16072 v := b.Control 16073 if v.Op != OpARM64CMPWconst { 16074 break 16075 } 16076 if v.AuxInt != 0 { 16077 break 16078 } 16079 x := v.Args[0] 16080 yes := b.Succs[0] 16081 no := b.Succs[1] 16082 b.Kind = BlockARM64NZW 16083 b.SetControl(x) 16084 _ = yes 16085 _ = no 16086 return true 16087 } 16088 // match: (NE (FlagEQ) yes no) 16089 // cond: 16090 // result: (First nil no yes) 16091 for { 16092 v := b.Control 16093 if v.Op != OpARM64FlagEQ { 16094 break 16095 } 16096 yes := b.Succs[0] 16097 no := b.Succs[1] 16098 b.Kind = BlockFirst 16099 b.SetControl(nil) 16100 b.swapSuccessors() 16101 _ = no 16102 _ = yes 16103 return true 16104 } 16105 // match: (NE (FlagLT_ULT) yes no) 16106 // cond: 16107 // result: (First nil yes no) 16108 for { 16109 v := b.Control 16110 if v.Op != OpARM64FlagLT_ULT { 16111 break 16112 } 16113 yes := b.Succs[0] 16114 no := b.Succs[1] 16115 b.Kind = BlockFirst 16116 b.SetControl(nil) 16117 _ = yes 16118 _ = no 16119 return true 16120 } 16121 // match: (NE (FlagLT_UGT) yes no) 16122 // cond: 16123 // result: (First nil yes no) 16124 for { 16125 v := b.Control 16126 if v.Op != OpARM64FlagLT_UGT { 16127 break 16128 } 16129 yes := b.Succs[0] 16130 no := b.Succs[1] 16131 b.Kind = BlockFirst 16132 b.SetControl(nil) 16133 _ = yes 16134 _ = no 16135 return true 16136 } 16137 // match: (NE (FlagGT_ULT) yes no) 16138 // cond: 16139 // result: (First nil yes no) 16140 for { 16141 v := b.Control 16142 if v.Op != OpARM64FlagGT_ULT { 16143 break 16144 } 16145 yes := b.Succs[0] 16146 no := b.Succs[1] 16147 b.Kind = BlockFirst 16148 b.SetControl(nil) 16149 _ = yes 16150 _ = no 16151 return true 16152 } 16153 // match: (NE (FlagGT_UGT) yes no) 16154 // cond: 16155 // result: (First nil yes no) 16156 for { 16157 v := b.Control 16158 if v.Op != OpARM64FlagGT_UGT { 16159 break 16160 } 16161 yes := b.Succs[0] 16162 no := b.Succs[1] 16163 b.Kind = BlockFirst 16164 b.SetControl(nil) 16165 _ = yes 16166 _ = no 16167 return true 16168 } 16169 // match: (NE (InvertFlags cmp) yes no) 16170 // cond: 16171 // result: (NE cmp yes no) 16172 for { 16173 v := b.Control 16174 if v.Op != OpARM64InvertFlags { 16175 break 16176 } 16177 cmp := v.Args[0] 16178 yes := b.Succs[0] 16179 no := b.Succs[1] 16180 b.Kind = BlockARM64NE 16181 b.SetControl(cmp) 16182 _ = yes 16183 _ = no 16184 return true 16185 } 16186 case BlockARM64NZ: 16187 // match: (NZ (Equal cc) yes no) 16188 // cond: 16189 // result: (EQ cc yes no) 16190 for { 16191 v := b.Control 16192 if v.Op != OpARM64Equal { 16193 break 16194 } 16195 cc := v.Args[0] 16196 yes := b.Succs[0] 16197 no := b.Succs[1] 16198 b.Kind = BlockARM64EQ 16199 b.SetControl(cc) 16200 _ = yes 16201 _ = no 16202 return true 16203 } 16204 // match: (NZ (NotEqual cc) yes no) 16205 // cond: 16206 // result: (NE cc yes no) 16207 for { 16208 v := b.Control 16209 if v.Op != OpARM64NotEqual { 16210 break 16211 } 16212 cc := v.Args[0] 16213 yes := b.Succs[0] 16214 no := b.Succs[1] 16215 b.Kind = BlockARM64NE 16216 b.SetControl(cc) 16217 _ = yes 16218 _ = no 16219 return true 16220 } 16221 // match: (NZ (LessThan cc) yes no) 16222 // cond: 16223 // result: (LT cc yes no) 16224 for { 16225 v := b.Control 16226 if v.Op != OpARM64LessThan { 16227 break 16228 } 16229 cc := v.Args[0] 16230 yes := b.Succs[0] 16231 no := b.Succs[1] 16232 b.Kind = BlockARM64LT 16233 b.SetControl(cc) 16234 _ = yes 16235 _ = no 16236 return true 16237 } 16238 // match: (NZ (LessThanU cc) yes no) 16239 // cond: 16240 // result: (ULT cc yes no) 16241 for { 16242 v := b.Control 16243 if v.Op != OpARM64LessThanU { 16244 break 16245 } 16246 cc := v.Args[0] 16247 yes := b.Succs[0] 16248 no := b.Succs[1] 16249 b.Kind = BlockARM64ULT 16250 b.SetControl(cc) 16251 _ = yes 16252 _ = no 16253 return true 16254 } 16255 // match: (NZ (LessEqual cc) yes no) 16256 // cond: 16257 // result: (LE cc yes no) 16258 for { 16259 v := b.Control 16260 if v.Op != OpARM64LessEqual { 16261 break 16262 } 16263 cc := v.Args[0] 16264 yes := b.Succs[0] 16265 no := b.Succs[1] 16266 b.Kind = BlockARM64LE 16267 b.SetControl(cc) 16268 _ = yes 16269 _ = no 16270 return true 16271 } 16272 // match: (NZ (LessEqualU cc) yes no) 16273 // cond: 16274 // result: (ULE cc yes no) 16275 for { 16276 v := b.Control 16277 if v.Op != OpARM64LessEqualU { 16278 break 16279 } 16280 cc := v.Args[0] 16281 yes := b.Succs[0] 16282 no := b.Succs[1] 16283 b.Kind = BlockARM64ULE 16284 b.SetControl(cc) 16285 _ = yes 16286 _ = no 16287 return true 16288 } 16289 // match: (NZ (GreaterThan cc) yes no) 16290 // cond: 16291 // result: (GT cc yes no) 16292 for { 16293 v := b.Control 16294 if v.Op != OpARM64GreaterThan { 16295 break 16296 } 16297 cc := v.Args[0] 16298 yes := b.Succs[0] 16299 no := b.Succs[1] 16300 b.Kind = BlockARM64GT 16301 b.SetControl(cc) 16302 _ = yes 16303 _ = no 16304 return true 16305 } 16306 // match: (NZ (GreaterThanU cc) yes no) 16307 // cond: 16308 // result: (UGT cc yes no) 16309 for { 16310 v := b.Control 16311 if v.Op != OpARM64GreaterThanU { 16312 break 16313 } 16314 cc := v.Args[0] 16315 yes := b.Succs[0] 16316 no := b.Succs[1] 16317 b.Kind = BlockARM64UGT 16318 b.SetControl(cc) 16319 _ = yes 16320 _ = no 16321 return true 16322 } 16323 // match: (NZ (GreaterEqual cc) yes no) 16324 // cond: 16325 // result: (GE cc yes no) 16326 for { 16327 v := b.Control 16328 if v.Op != OpARM64GreaterEqual { 16329 break 16330 } 16331 cc := v.Args[0] 16332 yes := b.Succs[0] 16333 no := b.Succs[1] 16334 b.Kind = BlockARM64GE 16335 b.SetControl(cc) 16336 _ = yes 16337 _ = no 16338 return true 16339 } 16340 // match: (NZ (GreaterEqualU cc) yes no) 16341 // cond: 16342 // result: (UGE cc yes no) 16343 for { 16344 v := b.Control 16345 if v.Op != OpARM64GreaterEqualU { 16346 break 16347 } 16348 cc := v.Args[0] 16349 yes := b.Succs[0] 16350 no := b.Succs[1] 16351 b.Kind = BlockARM64UGE 16352 b.SetControl(cc) 16353 _ = yes 16354 _ = no 16355 return true 16356 } 16357 // match: (NZ (MOVDconst [0]) yes no) 16358 // cond: 16359 // result: (First nil no yes) 16360 for { 16361 v := b.Control 16362 if v.Op != OpARM64MOVDconst { 16363 break 16364 } 16365 if v.AuxInt != 0 { 16366 break 16367 } 16368 yes := b.Succs[0] 16369 no := b.Succs[1] 16370 b.Kind = BlockFirst 16371 b.SetControl(nil) 16372 b.swapSuccessors() 16373 _ = no 16374 _ = yes 16375 return true 16376 } 16377 // match: (NZ (MOVDconst [c]) yes no) 16378 // cond: c != 0 16379 // result: (First nil yes no) 16380 for { 16381 v := b.Control 16382 if v.Op != OpARM64MOVDconst { 16383 break 16384 } 16385 c := v.AuxInt 16386 yes := b.Succs[0] 16387 no := b.Succs[1] 16388 if !(c != 0) { 16389 break 16390 } 16391 b.Kind = BlockFirst 16392 b.SetControl(nil) 16393 _ = yes 16394 _ = no 16395 return true 16396 } 16397 case BlockARM64NZW: 16398 // match: (NZW (MOVDconst [c]) yes no) 16399 // cond: int32(c) == 0 16400 // result: (First nil no yes) 16401 for { 16402 v := b.Control 16403 if v.Op != OpARM64MOVDconst { 16404 break 16405 } 16406 c := v.AuxInt 16407 yes := b.Succs[0] 16408 no := b.Succs[1] 16409 if !(int32(c) == 0) { 16410 break 16411 } 16412 b.Kind = BlockFirst 16413 b.SetControl(nil) 16414 b.swapSuccessors() 16415 _ = no 16416 _ = yes 16417 return true 16418 } 16419 // match: (NZW (MOVDconst [c]) yes no) 16420 // cond: int32(c) != 0 16421 // result: (First nil yes no) 16422 for { 16423 v := b.Control 16424 if v.Op != OpARM64MOVDconst { 16425 break 16426 } 16427 c := v.AuxInt 16428 yes := b.Succs[0] 16429 no := b.Succs[1] 16430 if !(int32(c) != 0) { 16431 break 16432 } 16433 b.Kind = BlockFirst 16434 b.SetControl(nil) 16435 _ = yes 16436 _ = no 16437 return true 16438 } 16439 case BlockARM64UGE: 16440 // match: (UGE (FlagEQ) yes no) 16441 // cond: 16442 // result: (First nil yes no) 16443 for { 16444 v := b.Control 16445 if v.Op != OpARM64FlagEQ { 16446 break 16447 } 16448 yes := b.Succs[0] 16449 no := b.Succs[1] 16450 b.Kind = BlockFirst 16451 b.SetControl(nil) 16452 _ = yes 16453 _ = no 16454 return true 16455 } 16456 // match: (UGE (FlagLT_ULT) yes no) 16457 // cond: 16458 // result: (First nil no yes) 16459 for { 16460 v := b.Control 16461 if v.Op != OpARM64FlagLT_ULT { 16462 break 16463 } 16464 yes := b.Succs[0] 16465 no := b.Succs[1] 16466 b.Kind = BlockFirst 16467 b.SetControl(nil) 16468 b.swapSuccessors() 16469 _ = no 16470 _ = yes 16471 return true 16472 } 16473 // match: (UGE (FlagLT_UGT) yes no) 16474 // cond: 16475 // result: (First nil yes no) 16476 for { 16477 v := b.Control 16478 if v.Op != OpARM64FlagLT_UGT { 16479 break 16480 } 16481 yes := b.Succs[0] 16482 no := b.Succs[1] 16483 b.Kind = BlockFirst 16484 b.SetControl(nil) 16485 _ = yes 16486 _ = no 16487 return true 16488 } 16489 // match: (UGE (FlagGT_ULT) yes no) 16490 // cond: 16491 // result: (First nil no yes) 16492 for { 16493 v := b.Control 16494 if v.Op != OpARM64FlagGT_ULT { 16495 break 16496 } 16497 yes := b.Succs[0] 16498 no := b.Succs[1] 16499 b.Kind = BlockFirst 16500 b.SetControl(nil) 16501 b.swapSuccessors() 16502 _ = no 16503 _ = yes 16504 return true 16505 } 16506 // match: (UGE (FlagGT_UGT) yes no) 16507 // cond: 16508 // result: (First nil yes no) 16509 for { 16510 v := b.Control 16511 if v.Op != OpARM64FlagGT_UGT { 16512 break 16513 } 16514 yes := b.Succs[0] 16515 no := b.Succs[1] 16516 b.Kind = BlockFirst 16517 b.SetControl(nil) 16518 _ = yes 16519 _ = no 16520 return true 16521 } 16522 // match: (UGE (InvertFlags cmp) yes no) 16523 // cond: 16524 // result: (ULE cmp yes no) 16525 for { 16526 v := b.Control 16527 if v.Op != OpARM64InvertFlags { 16528 break 16529 } 16530 cmp := v.Args[0] 16531 yes := b.Succs[0] 16532 no := b.Succs[1] 16533 b.Kind = BlockARM64ULE 16534 b.SetControl(cmp) 16535 _ = yes 16536 _ = no 16537 return true 16538 } 16539 case BlockARM64UGT: 16540 // match: (UGT (FlagEQ) yes no) 16541 // cond: 16542 // result: (First nil no yes) 16543 for { 16544 v := b.Control 16545 if v.Op != OpARM64FlagEQ { 16546 break 16547 } 16548 yes := b.Succs[0] 16549 no := b.Succs[1] 16550 b.Kind = BlockFirst 16551 b.SetControl(nil) 16552 b.swapSuccessors() 16553 _ = no 16554 _ = yes 16555 return true 16556 } 16557 // match: (UGT (FlagLT_ULT) yes no) 16558 // cond: 16559 // result: (First nil no yes) 16560 for { 16561 v := b.Control 16562 if v.Op != OpARM64FlagLT_ULT { 16563 break 16564 } 16565 yes := b.Succs[0] 16566 no := b.Succs[1] 16567 b.Kind = BlockFirst 16568 b.SetControl(nil) 16569 b.swapSuccessors() 16570 _ = no 16571 _ = yes 16572 return true 16573 } 16574 // match: (UGT (FlagLT_UGT) yes no) 16575 // cond: 16576 // result: (First nil yes no) 16577 for { 16578 v := b.Control 16579 if v.Op != OpARM64FlagLT_UGT { 16580 break 16581 } 16582 yes := b.Succs[0] 16583 no := b.Succs[1] 16584 b.Kind = BlockFirst 16585 b.SetControl(nil) 16586 _ = yes 16587 _ = no 16588 return true 16589 } 16590 // match: (UGT (FlagGT_ULT) yes no) 16591 // cond: 16592 // result: (First nil no yes) 16593 for { 16594 v := b.Control 16595 if v.Op != OpARM64FlagGT_ULT { 16596 break 16597 } 16598 yes := b.Succs[0] 16599 no := b.Succs[1] 16600 b.Kind = BlockFirst 16601 b.SetControl(nil) 16602 b.swapSuccessors() 16603 _ = no 16604 _ = yes 16605 return true 16606 } 16607 // match: (UGT (FlagGT_UGT) yes no) 16608 // cond: 16609 // result: (First nil yes no) 16610 for { 16611 v := b.Control 16612 if v.Op != OpARM64FlagGT_UGT { 16613 break 16614 } 16615 yes := b.Succs[0] 16616 no := b.Succs[1] 16617 b.Kind = BlockFirst 16618 b.SetControl(nil) 16619 _ = yes 16620 _ = no 16621 return true 16622 } 16623 // match: (UGT (InvertFlags cmp) yes no) 16624 // cond: 16625 // result: (ULT cmp yes no) 16626 for { 16627 v := b.Control 16628 if v.Op != OpARM64InvertFlags { 16629 break 16630 } 16631 cmp := v.Args[0] 16632 yes := b.Succs[0] 16633 no := b.Succs[1] 16634 b.Kind = BlockARM64ULT 16635 b.SetControl(cmp) 16636 _ = yes 16637 _ = no 16638 return true 16639 } 16640 case BlockARM64ULE: 16641 // match: (ULE (FlagEQ) yes no) 16642 // cond: 16643 // result: (First nil yes no) 16644 for { 16645 v := b.Control 16646 if v.Op != OpARM64FlagEQ { 16647 break 16648 } 16649 yes := b.Succs[0] 16650 no := b.Succs[1] 16651 b.Kind = BlockFirst 16652 b.SetControl(nil) 16653 _ = yes 16654 _ = no 16655 return true 16656 } 16657 // match: (ULE (FlagLT_ULT) yes no) 16658 // cond: 16659 // result: (First nil yes no) 16660 for { 16661 v := b.Control 16662 if v.Op != OpARM64FlagLT_ULT { 16663 break 16664 } 16665 yes := b.Succs[0] 16666 no := b.Succs[1] 16667 b.Kind = BlockFirst 16668 b.SetControl(nil) 16669 _ = yes 16670 _ = no 16671 return true 16672 } 16673 // match: (ULE (FlagLT_UGT) yes no) 16674 // cond: 16675 // result: (First nil no yes) 16676 for { 16677 v := b.Control 16678 if v.Op != OpARM64FlagLT_UGT { 16679 break 16680 } 16681 yes := b.Succs[0] 16682 no := b.Succs[1] 16683 b.Kind = BlockFirst 16684 b.SetControl(nil) 16685 b.swapSuccessors() 16686 _ = no 16687 _ = yes 16688 return true 16689 } 16690 // match: (ULE (FlagGT_ULT) yes no) 16691 // cond: 16692 // result: (First nil yes no) 16693 for { 16694 v := b.Control 16695 if v.Op != OpARM64FlagGT_ULT { 16696 break 16697 } 16698 yes := b.Succs[0] 16699 no := b.Succs[1] 16700 b.Kind = BlockFirst 16701 b.SetControl(nil) 16702 _ = yes 16703 _ = no 16704 return true 16705 } 16706 // match: (ULE (FlagGT_UGT) yes no) 16707 // cond: 16708 // result: (First nil no yes) 16709 for { 16710 v := b.Control 16711 if v.Op != OpARM64FlagGT_UGT { 16712 break 16713 } 16714 yes := b.Succs[0] 16715 no := b.Succs[1] 16716 b.Kind = BlockFirst 16717 b.SetControl(nil) 16718 b.swapSuccessors() 16719 _ = no 16720 _ = yes 16721 return true 16722 } 16723 // match: (ULE (InvertFlags cmp) yes no) 16724 // cond: 16725 // result: (UGE cmp yes no) 16726 for { 16727 v := b.Control 16728 if v.Op != OpARM64InvertFlags { 16729 break 16730 } 16731 cmp := v.Args[0] 16732 yes := b.Succs[0] 16733 no := b.Succs[1] 16734 b.Kind = BlockARM64UGE 16735 b.SetControl(cmp) 16736 _ = yes 16737 _ = no 16738 return true 16739 } 16740 case BlockARM64ULT: 16741 // match: (ULT (FlagEQ) yes no) 16742 // cond: 16743 // result: (First nil no yes) 16744 for { 16745 v := b.Control 16746 if v.Op != OpARM64FlagEQ { 16747 break 16748 } 16749 yes := b.Succs[0] 16750 no := b.Succs[1] 16751 b.Kind = BlockFirst 16752 b.SetControl(nil) 16753 b.swapSuccessors() 16754 _ = no 16755 _ = yes 16756 return true 16757 } 16758 // match: (ULT (FlagLT_ULT) yes no) 16759 // cond: 16760 // result: (First nil yes no) 16761 for { 16762 v := b.Control 16763 if v.Op != OpARM64FlagLT_ULT { 16764 break 16765 } 16766 yes := b.Succs[0] 16767 no := b.Succs[1] 16768 b.Kind = BlockFirst 16769 b.SetControl(nil) 16770 _ = yes 16771 _ = no 16772 return true 16773 } 16774 // match: (ULT (FlagLT_UGT) yes no) 16775 // cond: 16776 // result: (First nil no yes) 16777 for { 16778 v := b.Control 16779 if v.Op != OpARM64FlagLT_UGT { 16780 break 16781 } 16782 yes := b.Succs[0] 16783 no := b.Succs[1] 16784 b.Kind = BlockFirst 16785 b.SetControl(nil) 16786 b.swapSuccessors() 16787 _ = no 16788 _ = yes 16789 return true 16790 } 16791 // match: (ULT (FlagGT_ULT) yes no) 16792 // cond: 16793 // result: (First nil yes no) 16794 for { 16795 v := b.Control 16796 if v.Op != OpARM64FlagGT_ULT { 16797 break 16798 } 16799 yes := b.Succs[0] 16800 no := b.Succs[1] 16801 b.Kind = BlockFirst 16802 b.SetControl(nil) 16803 _ = yes 16804 _ = no 16805 return true 16806 } 16807 // match: (ULT (FlagGT_UGT) yes no) 16808 // cond: 16809 // result: (First nil no yes) 16810 for { 16811 v := b.Control 16812 if v.Op != OpARM64FlagGT_UGT { 16813 break 16814 } 16815 yes := b.Succs[0] 16816 no := b.Succs[1] 16817 b.Kind = BlockFirst 16818 b.SetControl(nil) 16819 b.swapSuccessors() 16820 _ = no 16821 _ = yes 16822 return true 16823 } 16824 // match: (ULT (InvertFlags cmp) yes no) 16825 // cond: 16826 // result: (UGT cmp yes no) 16827 for { 16828 v := b.Control 16829 if v.Op != OpARM64InvertFlags { 16830 break 16831 } 16832 cmp := v.Args[0] 16833 yes := b.Succs[0] 16834 no := b.Succs[1] 16835 b.Kind = BlockARM64UGT 16836 b.SetControl(cmp) 16837 _ = yes 16838 _ = no 16839 return true 16840 } 16841 case BlockARM64Z: 16842 // match: (Z (MOVDconst [0]) yes no) 16843 // cond: 16844 // result: (First nil yes no) 16845 for { 16846 v := b.Control 16847 if v.Op != OpARM64MOVDconst { 16848 break 16849 } 16850 if v.AuxInt != 0 { 16851 break 16852 } 16853 yes := b.Succs[0] 16854 no := b.Succs[1] 16855 b.Kind = BlockFirst 16856 b.SetControl(nil) 16857 _ = yes 16858 _ = no 16859 return true 16860 } 16861 // match: (Z (MOVDconst [c]) yes no) 16862 // cond: c != 0 16863 // result: (First nil no yes) 16864 for { 16865 v := b.Control 16866 if v.Op != OpARM64MOVDconst { 16867 break 16868 } 16869 c := v.AuxInt 16870 yes := b.Succs[0] 16871 no := b.Succs[1] 16872 if !(c != 0) { 16873 break 16874 } 16875 b.Kind = BlockFirst 16876 b.SetControl(nil) 16877 b.swapSuccessors() 16878 _ = no 16879 _ = yes 16880 return true 16881 } 16882 case BlockARM64ZW: 16883 // match: (ZW (MOVDconst [c]) yes no) 16884 // cond: int32(c) == 0 16885 // result: (First nil yes no) 16886 for { 16887 v := b.Control 16888 if v.Op != OpARM64MOVDconst { 16889 break 16890 } 16891 c := v.AuxInt 16892 yes := b.Succs[0] 16893 no := b.Succs[1] 16894 if !(int32(c) == 0) { 16895 break 16896 } 16897 b.Kind = BlockFirst 16898 b.SetControl(nil) 16899 _ = yes 16900 _ = no 16901 return true 16902 } 16903 // match: (ZW (MOVDconst [c]) yes no) 16904 // cond: int32(c) != 0 16905 // result: (First nil no yes) 16906 for { 16907 v := b.Control 16908 if v.Op != OpARM64MOVDconst { 16909 break 16910 } 16911 c := v.AuxInt 16912 yes := b.Succs[0] 16913 no := b.Succs[1] 16914 if !(int32(c) != 0) { 16915 break 16916 } 16917 b.Kind = BlockFirst 16918 b.SetControl(nil) 16919 b.swapSuccessors() 16920 _ = no 16921 _ = yes 16922 return true 16923 } 16924 } 16925 return false 16926 }