github.com/slayercat/go@v0.0.0-20170428012452-c51559813f61/src/cmd/compile/internal/ssa/rewriteARM64.go (about) 1 // Code generated from gen/ARM64.rules; DO NOT EDIT. 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "math" 7 import "cmd/internal/obj" 8 import "cmd/internal/objabi" 9 10 var _ = math.MinInt8 // in case not otherwise used 11 var _ = obj.ANOP // in case not otherwise used 12 var _ = objabi.GOROOT // in case not otherwise used 13 14 func rewriteValueARM64(v *Value) bool { 15 switch v.Op { 16 case OpARM64ADD: 17 return rewriteValueARM64_OpARM64ADD_0(v) 18 case OpARM64ADDconst: 19 return rewriteValueARM64_OpARM64ADDconst_0(v) 20 case OpARM64ADDshiftLL: 21 return rewriteValueARM64_OpARM64ADDshiftLL_0(v) 22 case OpARM64ADDshiftRA: 23 return rewriteValueARM64_OpARM64ADDshiftRA_0(v) 24 case OpARM64ADDshiftRL: 25 return rewriteValueARM64_OpARM64ADDshiftRL_0(v) 26 case OpARM64AND: 27 return rewriteValueARM64_OpARM64AND_0(v) || rewriteValueARM64_OpARM64AND_10(v) 28 case OpARM64ANDconst: 29 return rewriteValueARM64_OpARM64ANDconst_0(v) 30 case OpARM64ANDshiftLL: 31 return rewriteValueARM64_OpARM64ANDshiftLL_0(v) 32 case OpARM64ANDshiftRA: 33 return rewriteValueARM64_OpARM64ANDshiftRA_0(v) 34 case OpARM64ANDshiftRL: 35 return rewriteValueARM64_OpARM64ANDshiftRL_0(v) 36 case OpARM64BIC: 37 return rewriteValueARM64_OpARM64BIC_0(v) 38 case OpARM64BICconst: 39 return rewriteValueARM64_OpARM64BICconst_0(v) 40 case OpARM64BICshiftLL: 41 return rewriteValueARM64_OpARM64BICshiftLL_0(v) 42 case OpARM64BICshiftRA: 43 return rewriteValueARM64_OpARM64BICshiftRA_0(v) 44 case OpARM64BICshiftRL: 45 return rewriteValueARM64_OpARM64BICshiftRL_0(v) 46 case OpARM64CMP: 47 return rewriteValueARM64_OpARM64CMP_0(v) 48 case OpARM64CMPW: 49 return rewriteValueARM64_OpARM64CMPW_0(v) 50 case OpARM64CMPWconst: 51 return rewriteValueARM64_OpARM64CMPWconst_0(v) 52 case OpARM64CMPconst: 53 return rewriteValueARM64_OpARM64CMPconst_0(v) 54 case OpARM64CMPshiftLL: 55 return rewriteValueARM64_OpARM64CMPshiftLL_0(v) 56 case OpARM64CMPshiftRA: 57 return rewriteValueARM64_OpARM64CMPshiftRA_0(v) 58 case OpARM64CMPshiftRL: 59 return rewriteValueARM64_OpARM64CMPshiftRL_0(v) 60 case OpARM64CSELULT: 61 return rewriteValueARM64_OpARM64CSELULT_0(v) 62 case OpARM64CSELULT0: 63 return rewriteValueARM64_OpARM64CSELULT0_0(v) 64 case OpARM64DIV: 65 return rewriteValueARM64_OpARM64DIV_0(v) 66 case OpARM64DIVW: 67 return rewriteValueARM64_OpARM64DIVW_0(v) 68 case OpARM64Equal: 69 return rewriteValueARM64_OpARM64Equal_0(v) 70 case OpARM64FMOVDload: 71 return rewriteValueARM64_OpARM64FMOVDload_0(v) 72 case OpARM64FMOVDstore: 73 return rewriteValueARM64_OpARM64FMOVDstore_0(v) 74 case OpARM64FMOVSload: 75 return rewriteValueARM64_OpARM64FMOVSload_0(v) 76 case OpARM64FMOVSstore: 77 return rewriteValueARM64_OpARM64FMOVSstore_0(v) 78 case OpARM64GreaterEqual: 79 return rewriteValueARM64_OpARM64GreaterEqual_0(v) 80 case OpARM64GreaterEqualU: 81 return rewriteValueARM64_OpARM64GreaterEqualU_0(v) 82 case OpARM64GreaterThan: 83 return rewriteValueARM64_OpARM64GreaterThan_0(v) 84 case OpARM64GreaterThanU: 85 return rewriteValueARM64_OpARM64GreaterThanU_0(v) 86 case OpARM64LessEqual: 87 return rewriteValueARM64_OpARM64LessEqual_0(v) 88 case OpARM64LessEqualU: 89 return rewriteValueARM64_OpARM64LessEqualU_0(v) 90 case OpARM64LessThan: 91 return rewriteValueARM64_OpARM64LessThan_0(v) 92 case OpARM64LessThanU: 93 return rewriteValueARM64_OpARM64LessThanU_0(v) 94 case OpARM64MOD: 95 return rewriteValueARM64_OpARM64MOD_0(v) 96 case OpARM64MODW: 97 return rewriteValueARM64_OpARM64MODW_0(v) 98 case OpARM64MOVBUload: 99 return rewriteValueARM64_OpARM64MOVBUload_0(v) 100 case OpARM64MOVBUreg: 101 return rewriteValueARM64_OpARM64MOVBUreg_0(v) 102 case OpARM64MOVBload: 103 return rewriteValueARM64_OpARM64MOVBload_0(v) 104 case OpARM64MOVBreg: 105 return rewriteValueARM64_OpARM64MOVBreg_0(v) 106 case OpARM64MOVBstore: 107 return rewriteValueARM64_OpARM64MOVBstore_0(v) 108 case OpARM64MOVBstorezero: 109 return rewriteValueARM64_OpARM64MOVBstorezero_0(v) 110 case OpARM64MOVDload: 111 return rewriteValueARM64_OpARM64MOVDload_0(v) 112 case OpARM64MOVDreg: 113 return rewriteValueARM64_OpARM64MOVDreg_0(v) 114 case OpARM64MOVDstore: 115 return rewriteValueARM64_OpARM64MOVDstore_0(v) 116 case OpARM64MOVDstorezero: 117 return rewriteValueARM64_OpARM64MOVDstorezero_0(v) 118 case OpARM64MOVHUload: 119 return rewriteValueARM64_OpARM64MOVHUload_0(v) 120 case OpARM64MOVHUreg: 121 return rewriteValueARM64_OpARM64MOVHUreg_0(v) 122 case OpARM64MOVHload: 123 return rewriteValueARM64_OpARM64MOVHload_0(v) 124 case OpARM64MOVHreg: 125 return rewriteValueARM64_OpARM64MOVHreg_0(v) 126 case OpARM64MOVHstore: 127 return rewriteValueARM64_OpARM64MOVHstore_0(v) 128 case OpARM64MOVHstorezero: 129 return rewriteValueARM64_OpARM64MOVHstorezero_0(v) 130 case OpARM64MOVWUload: 131 return rewriteValueARM64_OpARM64MOVWUload_0(v) 132 case OpARM64MOVWUreg: 133 return rewriteValueARM64_OpARM64MOVWUreg_0(v) 134 case OpARM64MOVWload: 135 return rewriteValueARM64_OpARM64MOVWload_0(v) 136 case OpARM64MOVWreg: 137 return rewriteValueARM64_OpARM64MOVWreg_0(v) || rewriteValueARM64_OpARM64MOVWreg_10(v) 138 case OpARM64MOVWstore: 139 return rewriteValueARM64_OpARM64MOVWstore_0(v) 140 case OpARM64MOVWstorezero: 141 return rewriteValueARM64_OpARM64MOVWstorezero_0(v) 142 case OpARM64MUL: 143 return rewriteValueARM64_OpARM64MUL_0(v) || rewriteValueARM64_OpARM64MUL_10(v) || rewriteValueARM64_OpARM64MUL_20(v) 144 case OpARM64MULW: 145 return rewriteValueARM64_OpARM64MULW_0(v) || rewriteValueARM64_OpARM64MULW_10(v) || rewriteValueARM64_OpARM64MULW_20(v) 146 case OpARM64MVN: 147 return rewriteValueARM64_OpARM64MVN_0(v) 148 case OpARM64NEG: 149 return rewriteValueARM64_OpARM64NEG_0(v) 150 case OpARM64NotEqual: 151 return rewriteValueARM64_OpARM64NotEqual_0(v) 152 case OpARM64OR: 153 return rewriteValueARM64_OpARM64OR_0(v) || rewriteValueARM64_OpARM64OR_10(v) 154 case OpARM64ORconst: 155 return rewriteValueARM64_OpARM64ORconst_0(v) 156 case OpARM64ORshiftLL: 157 return rewriteValueARM64_OpARM64ORshiftLL_0(v) || rewriteValueARM64_OpARM64ORshiftLL_10(v) 158 case OpARM64ORshiftRA: 159 return rewriteValueARM64_OpARM64ORshiftRA_0(v) 160 case OpARM64ORshiftRL: 161 return rewriteValueARM64_OpARM64ORshiftRL_0(v) 162 case OpARM64SLL: 163 return rewriteValueARM64_OpARM64SLL_0(v) 164 case OpARM64SLLconst: 165 return rewriteValueARM64_OpARM64SLLconst_0(v) 166 case OpARM64SRA: 167 return rewriteValueARM64_OpARM64SRA_0(v) 168 case OpARM64SRAconst: 169 return rewriteValueARM64_OpARM64SRAconst_0(v) 170 case OpARM64SRL: 171 return rewriteValueARM64_OpARM64SRL_0(v) 172 case OpARM64SRLconst: 173 return rewriteValueARM64_OpARM64SRLconst_0(v) 174 case OpARM64SUB: 175 return rewriteValueARM64_OpARM64SUB_0(v) 176 case OpARM64SUBconst: 177 return rewriteValueARM64_OpARM64SUBconst_0(v) 178 case OpARM64SUBshiftLL: 179 return rewriteValueARM64_OpARM64SUBshiftLL_0(v) 180 case OpARM64SUBshiftRA: 181 return rewriteValueARM64_OpARM64SUBshiftRA_0(v) 182 case OpARM64SUBshiftRL: 183 return rewriteValueARM64_OpARM64SUBshiftRL_0(v) 184 case OpARM64UDIV: 185 return rewriteValueARM64_OpARM64UDIV_0(v) 186 case OpARM64UDIVW: 187 return rewriteValueARM64_OpARM64UDIVW_0(v) 188 case OpARM64UMOD: 189 return rewriteValueARM64_OpARM64UMOD_0(v) 190 case OpARM64UMODW: 191 return rewriteValueARM64_OpARM64UMODW_0(v) 192 case OpARM64XOR: 193 return rewriteValueARM64_OpARM64XOR_0(v) 194 case OpARM64XORconst: 195 return rewriteValueARM64_OpARM64XORconst_0(v) 196 case OpARM64XORshiftLL: 197 return rewriteValueARM64_OpARM64XORshiftLL_0(v) 198 case OpARM64XORshiftRA: 199 return rewriteValueARM64_OpARM64XORshiftRA_0(v) 200 case OpARM64XORshiftRL: 201 return rewriteValueARM64_OpARM64XORshiftRL_0(v) 202 case OpAdd16: 203 return rewriteValueARM64_OpAdd16_0(v) 204 case OpAdd32: 205 return rewriteValueARM64_OpAdd32_0(v) 206 case OpAdd32F: 207 return rewriteValueARM64_OpAdd32F_0(v) 208 case OpAdd64: 209 return rewriteValueARM64_OpAdd64_0(v) 210 case OpAdd64F: 211 return rewriteValueARM64_OpAdd64F_0(v) 212 case OpAdd8: 213 return rewriteValueARM64_OpAdd8_0(v) 214 case OpAddPtr: 215 return rewriteValueARM64_OpAddPtr_0(v) 216 case OpAddr: 217 return rewriteValueARM64_OpAddr_0(v) 218 case OpAnd16: 219 return rewriteValueARM64_OpAnd16_0(v) 220 case OpAnd32: 221 return rewriteValueARM64_OpAnd32_0(v) 222 case OpAnd64: 223 return rewriteValueARM64_OpAnd64_0(v) 224 case OpAnd8: 225 return rewriteValueARM64_OpAnd8_0(v) 226 case OpAndB: 227 return rewriteValueARM64_OpAndB_0(v) 228 case OpAtomicAdd32: 229 return rewriteValueARM64_OpAtomicAdd32_0(v) 230 case OpAtomicAdd64: 231 return rewriteValueARM64_OpAtomicAdd64_0(v) 232 case OpAtomicAnd8: 233 return rewriteValueARM64_OpAtomicAnd8_0(v) 234 case OpAtomicCompareAndSwap32: 235 return rewriteValueARM64_OpAtomicCompareAndSwap32_0(v) 236 case OpAtomicCompareAndSwap64: 237 return rewriteValueARM64_OpAtomicCompareAndSwap64_0(v) 238 case OpAtomicExchange32: 239 return rewriteValueARM64_OpAtomicExchange32_0(v) 240 case OpAtomicExchange64: 241 return rewriteValueARM64_OpAtomicExchange64_0(v) 242 case OpAtomicLoad32: 243 return rewriteValueARM64_OpAtomicLoad32_0(v) 244 case OpAtomicLoad64: 245 return rewriteValueARM64_OpAtomicLoad64_0(v) 246 case OpAtomicLoadPtr: 247 return rewriteValueARM64_OpAtomicLoadPtr_0(v) 248 case OpAtomicOr8: 249 return rewriteValueARM64_OpAtomicOr8_0(v) 250 case OpAtomicStore32: 251 return rewriteValueARM64_OpAtomicStore32_0(v) 252 case OpAtomicStore64: 253 return rewriteValueARM64_OpAtomicStore64_0(v) 254 case OpAtomicStorePtrNoWB: 255 return rewriteValueARM64_OpAtomicStorePtrNoWB_0(v) 256 case OpAvg64u: 257 return rewriteValueARM64_OpAvg64u_0(v) 258 case OpBitLen64: 259 return rewriteValueARM64_OpBitLen64_0(v) 260 case OpBitRev16: 261 return rewriteValueARM64_OpBitRev16_0(v) 262 case OpBitRev32: 263 return rewriteValueARM64_OpBitRev32_0(v) 264 case OpBitRev64: 265 return rewriteValueARM64_OpBitRev64_0(v) 266 case OpBitRev8: 267 return rewriteValueARM64_OpBitRev8_0(v) 268 case OpBswap32: 269 return rewriteValueARM64_OpBswap32_0(v) 270 case OpBswap64: 271 return rewriteValueARM64_OpBswap64_0(v) 272 case OpClosureCall: 273 return rewriteValueARM64_OpClosureCall_0(v) 274 case OpCom16: 275 return rewriteValueARM64_OpCom16_0(v) 276 case OpCom32: 277 return rewriteValueARM64_OpCom32_0(v) 278 case OpCom64: 279 return rewriteValueARM64_OpCom64_0(v) 280 case OpCom8: 281 return rewriteValueARM64_OpCom8_0(v) 282 case OpConst16: 283 return rewriteValueARM64_OpConst16_0(v) 284 case OpConst32: 285 return rewriteValueARM64_OpConst32_0(v) 286 case OpConst32F: 287 return rewriteValueARM64_OpConst32F_0(v) 288 case OpConst64: 289 return rewriteValueARM64_OpConst64_0(v) 290 case OpConst64F: 291 return rewriteValueARM64_OpConst64F_0(v) 292 case OpConst8: 293 return rewriteValueARM64_OpConst8_0(v) 294 case OpConstBool: 295 return rewriteValueARM64_OpConstBool_0(v) 296 case OpConstNil: 297 return rewriteValueARM64_OpConstNil_0(v) 298 case OpConvert: 299 return rewriteValueARM64_OpConvert_0(v) 300 case OpCtz32: 301 return rewriteValueARM64_OpCtz32_0(v) 302 case OpCtz64: 303 return rewriteValueARM64_OpCtz64_0(v) 304 case OpCvt32Fto32: 305 return rewriteValueARM64_OpCvt32Fto32_0(v) 306 case OpCvt32Fto32U: 307 return rewriteValueARM64_OpCvt32Fto32U_0(v) 308 case OpCvt32Fto64: 309 return rewriteValueARM64_OpCvt32Fto64_0(v) 310 case OpCvt32Fto64F: 311 return rewriteValueARM64_OpCvt32Fto64F_0(v) 312 case OpCvt32Fto64U: 313 return rewriteValueARM64_OpCvt32Fto64U_0(v) 314 case OpCvt32Uto32F: 315 return rewriteValueARM64_OpCvt32Uto32F_0(v) 316 case OpCvt32Uto64F: 317 return rewriteValueARM64_OpCvt32Uto64F_0(v) 318 case OpCvt32to32F: 319 return rewriteValueARM64_OpCvt32to32F_0(v) 320 case OpCvt32to64F: 321 return rewriteValueARM64_OpCvt32to64F_0(v) 322 case OpCvt64Fto32: 323 return rewriteValueARM64_OpCvt64Fto32_0(v) 324 case OpCvt64Fto32F: 325 return rewriteValueARM64_OpCvt64Fto32F_0(v) 326 case OpCvt64Fto32U: 327 return rewriteValueARM64_OpCvt64Fto32U_0(v) 328 case OpCvt64Fto64: 329 return rewriteValueARM64_OpCvt64Fto64_0(v) 330 case OpCvt64Fto64U: 331 return rewriteValueARM64_OpCvt64Fto64U_0(v) 332 case OpCvt64Uto32F: 333 return rewriteValueARM64_OpCvt64Uto32F_0(v) 334 case OpCvt64Uto64F: 335 return rewriteValueARM64_OpCvt64Uto64F_0(v) 336 case OpCvt64to32F: 337 return rewriteValueARM64_OpCvt64to32F_0(v) 338 case OpCvt64to64F: 339 return rewriteValueARM64_OpCvt64to64F_0(v) 340 case OpDiv16: 341 return rewriteValueARM64_OpDiv16_0(v) 342 case OpDiv16u: 343 return rewriteValueARM64_OpDiv16u_0(v) 344 case OpDiv32: 345 return rewriteValueARM64_OpDiv32_0(v) 346 case OpDiv32F: 347 return rewriteValueARM64_OpDiv32F_0(v) 348 case OpDiv32u: 349 return rewriteValueARM64_OpDiv32u_0(v) 350 case OpDiv64: 351 return rewriteValueARM64_OpDiv64_0(v) 352 case OpDiv64F: 353 return rewriteValueARM64_OpDiv64F_0(v) 354 case OpDiv64u: 355 return rewriteValueARM64_OpDiv64u_0(v) 356 case OpDiv8: 357 return rewriteValueARM64_OpDiv8_0(v) 358 case OpDiv8u: 359 return rewriteValueARM64_OpDiv8u_0(v) 360 case OpEq16: 361 return rewriteValueARM64_OpEq16_0(v) 362 case OpEq32: 363 return rewriteValueARM64_OpEq32_0(v) 364 case OpEq32F: 365 return rewriteValueARM64_OpEq32F_0(v) 366 case OpEq64: 367 return rewriteValueARM64_OpEq64_0(v) 368 case OpEq64F: 369 return rewriteValueARM64_OpEq64F_0(v) 370 case OpEq8: 371 return rewriteValueARM64_OpEq8_0(v) 372 case OpEqB: 373 return rewriteValueARM64_OpEqB_0(v) 374 case OpEqPtr: 375 return rewriteValueARM64_OpEqPtr_0(v) 376 case OpGeq16: 377 return rewriteValueARM64_OpGeq16_0(v) 378 case OpGeq16U: 379 return rewriteValueARM64_OpGeq16U_0(v) 380 case OpGeq32: 381 return rewriteValueARM64_OpGeq32_0(v) 382 case OpGeq32F: 383 return rewriteValueARM64_OpGeq32F_0(v) 384 case OpGeq32U: 385 return rewriteValueARM64_OpGeq32U_0(v) 386 case OpGeq64: 387 return rewriteValueARM64_OpGeq64_0(v) 388 case OpGeq64F: 389 return rewriteValueARM64_OpGeq64F_0(v) 390 case OpGeq64U: 391 return rewriteValueARM64_OpGeq64U_0(v) 392 case OpGeq8: 393 return rewriteValueARM64_OpGeq8_0(v) 394 case OpGeq8U: 395 return rewriteValueARM64_OpGeq8U_0(v) 396 case OpGetClosurePtr: 397 return rewriteValueARM64_OpGetClosurePtr_0(v) 398 case OpGreater16: 399 return rewriteValueARM64_OpGreater16_0(v) 400 case OpGreater16U: 401 return rewriteValueARM64_OpGreater16U_0(v) 402 case OpGreater32: 403 return rewriteValueARM64_OpGreater32_0(v) 404 case OpGreater32F: 405 return rewriteValueARM64_OpGreater32F_0(v) 406 case OpGreater32U: 407 return rewriteValueARM64_OpGreater32U_0(v) 408 case OpGreater64: 409 return rewriteValueARM64_OpGreater64_0(v) 410 case OpGreater64F: 411 return rewriteValueARM64_OpGreater64F_0(v) 412 case OpGreater64U: 413 return rewriteValueARM64_OpGreater64U_0(v) 414 case OpGreater8: 415 return rewriteValueARM64_OpGreater8_0(v) 416 case OpGreater8U: 417 return rewriteValueARM64_OpGreater8U_0(v) 418 case OpHmul32: 419 return rewriteValueARM64_OpHmul32_0(v) 420 case OpHmul32u: 421 return rewriteValueARM64_OpHmul32u_0(v) 422 case OpHmul64: 423 return rewriteValueARM64_OpHmul64_0(v) 424 case OpHmul64u: 425 return rewriteValueARM64_OpHmul64u_0(v) 426 case OpInterCall: 427 return rewriteValueARM64_OpInterCall_0(v) 428 case OpIsInBounds: 429 return rewriteValueARM64_OpIsInBounds_0(v) 430 case OpIsNonNil: 431 return rewriteValueARM64_OpIsNonNil_0(v) 432 case OpIsSliceInBounds: 433 return rewriteValueARM64_OpIsSliceInBounds_0(v) 434 case OpLeq16: 435 return rewriteValueARM64_OpLeq16_0(v) 436 case OpLeq16U: 437 return rewriteValueARM64_OpLeq16U_0(v) 438 case OpLeq32: 439 return rewriteValueARM64_OpLeq32_0(v) 440 case OpLeq32F: 441 return rewriteValueARM64_OpLeq32F_0(v) 442 case OpLeq32U: 443 return rewriteValueARM64_OpLeq32U_0(v) 444 case OpLeq64: 445 return rewriteValueARM64_OpLeq64_0(v) 446 case OpLeq64F: 447 return rewriteValueARM64_OpLeq64F_0(v) 448 case OpLeq64U: 449 return rewriteValueARM64_OpLeq64U_0(v) 450 case OpLeq8: 451 return rewriteValueARM64_OpLeq8_0(v) 452 case OpLeq8U: 453 return rewriteValueARM64_OpLeq8U_0(v) 454 case OpLess16: 455 return rewriteValueARM64_OpLess16_0(v) 456 case OpLess16U: 457 return rewriteValueARM64_OpLess16U_0(v) 458 case OpLess32: 459 return rewriteValueARM64_OpLess32_0(v) 460 case OpLess32F: 461 return rewriteValueARM64_OpLess32F_0(v) 462 case OpLess32U: 463 return rewriteValueARM64_OpLess32U_0(v) 464 case OpLess64: 465 return rewriteValueARM64_OpLess64_0(v) 466 case OpLess64F: 467 return rewriteValueARM64_OpLess64F_0(v) 468 case OpLess64U: 469 return rewriteValueARM64_OpLess64U_0(v) 470 case OpLess8: 471 return rewriteValueARM64_OpLess8_0(v) 472 case OpLess8U: 473 return rewriteValueARM64_OpLess8U_0(v) 474 case OpLoad: 475 return rewriteValueARM64_OpLoad_0(v) 476 case OpLsh16x16: 477 return rewriteValueARM64_OpLsh16x16_0(v) 478 case OpLsh16x32: 479 return rewriteValueARM64_OpLsh16x32_0(v) 480 case OpLsh16x64: 481 return rewriteValueARM64_OpLsh16x64_0(v) 482 case OpLsh16x8: 483 return rewriteValueARM64_OpLsh16x8_0(v) 484 case OpLsh32x16: 485 return rewriteValueARM64_OpLsh32x16_0(v) 486 case OpLsh32x32: 487 return rewriteValueARM64_OpLsh32x32_0(v) 488 case OpLsh32x64: 489 return rewriteValueARM64_OpLsh32x64_0(v) 490 case OpLsh32x8: 491 return rewriteValueARM64_OpLsh32x8_0(v) 492 case OpLsh64x16: 493 return rewriteValueARM64_OpLsh64x16_0(v) 494 case OpLsh64x32: 495 return rewriteValueARM64_OpLsh64x32_0(v) 496 case OpLsh64x64: 497 return rewriteValueARM64_OpLsh64x64_0(v) 498 case OpLsh64x8: 499 return rewriteValueARM64_OpLsh64x8_0(v) 500 case OpLsh8x16: 501 return rewriteValueARM64_OpLsh8x16_0(v) 502 case OpLsh8x32: 503 return rewriteValueARM64_OpLsh8x32_0(v) 504 case OpLsh8x64: 505 return rewriteValueARM64_OpLsh8x64_0(v) 506 case OpLsh8x8: 507 return rewriteValueARM64_OpLsh8x8_0(v) 508 case OpMod16: 509 return rewriteValueARM64_OpMod16_0(v) 510 case OpMod16u: 511 return rewriteValueARM64_OpMod16u_0(v) 512 case OpMod32: 513 return rewriteValueARM64_OpMod32_0(v) 514 case OpMod32u: 515 return rewriteValueARM64_OpMod32u_0(v) 516 case OpMod64: 517 return rewriteValueARM64_OpMod64_0(v) 518 case OpMod64u: 519 return rewriteValueARM64_OpMod64u_0(v) 520 case OpMod8: 521 return rewriteValueARM64_OpMod8_0(v) 522 case OpMod8u: 523 return rewriteValueARM64_OpMod8u_0(v) 524 case OpMove: 525 return rewriteValueARM64_OpMove_0(v) || rewriteValueARM64_OpMove_10(v) 526 case OpMul16: 527 return rewriteValueARM64_OpMul16_0(v) 528 case OpMul32: 529 return rewriteValueARM64_OpMul32_0(v) 530 case OpMul32F: 531 return rewriteValueARM64_OpMul32F_0(v) 532 case OpMul64: 533 return rewriteValueARM64_OpMul64_0(v) 534 case OpMul64F: 535 return rewriteValueARM64_OpMul64F_0(v) 536 case OpMul8: 537 return rewriteValueARM64_OpMul8_0(v) 538 case OpNeg16: 539 return rewriteValueARM64_OpNeg16_0(v) 540 case OpNeg32: 541 return rewriteValueARM64_OpNeg32_0(v) 542 case OpNeg32F: 543 return rewriteValueARM64_OpNeg32F_0(v) 544 case OpNeg64: 545 return rewriteValueARM64_OpNeg64_0(v) 546 case OpNeg64F: 547 return rewriteValueARM64_OpNeg64F_0(v) 548 case OpNeg8: 549 return rewriteValueARM64_OpNeg8_0(v) 550 case OpNeq16: 551 return rewriteValueARM64_OpNeq16_0(v) 552 case OpNeq32: 553 return rewriteValueARM64_OpNeq32_0(v) 554 case OpNeq32F: 555 return rewriteValueARM64_OpNeq32F_0(v) 556 case OpNeq64: 557 return rewriteValueARM64_OpNeq64_0(v) 558 case OpNeq64F: 559 return rewriteValueARM64_OpNeq64F_0(v) 560 case OpNeq8: 561 return rewriteValueARM64_OpNeq8_0(v) 562 case OpNeqB: 563 return rewriteValueARM64_OpNeqB_0(v) 564 case OpNeqPtr: 565 return rewriteValueARM64_OpNeqPtr_0(v) 566 case OpNilCheck: 567 return rewriteValueARM64_OpNilCheck_0(v) 568 case OpNot: 569 return rewriteValueARM64_OpNot_0(v) 570 case OpOffPtr: 571 return rewriteValueARM64_OpOffPtr_0(v) 572 case OpOr16: 573 return rewriteValueARM64_OpOr16_0(v) 574 case OpOr32: 575 return rewriteValueARM64_OpOr32_0(v) 576 case OpOr64: 577 return rewriteValueARM64_OpOr64_0(v) 578 case OpOr8: 579 return rewriteValueARM64_OpOr8_0(v) 580 case OpOrB: 581 return rewriteValueARM64_OpOrB_0(v) 582 case OpRound32F: 583 return rewriteValueARM64_OpRound32F_0(v) 584 case OpRound64F: 585 return rewriteValueARM64_OpRound64F_0(v) 586 case OpRsh16Ux16: 587 return rewriteValueARM64_OpRsh16Ux16_0(v) 588 case OpRsh16Ux32: 589 return rewriteValueARM64_OpRsh16Ux32_0(v) 590 case OpRsh16Ux64: 591 return rewriteValueARM64_OpRsh16Ux64_0(v) 592 case OpRsh16Ux8: 593 return rewriteValueARM64_OpRsh16Ux8_0(v) 594 case OpRsh16x16: 595 return rewriteValueARM64_OpRsh16x16_0(v) 596 case OpRsh16x32: 597 return rewriteValueARM64_OpRsh16x32_0(v) 598 case OpRsh16x64: 599 return rewriteValueARM64_OpRsh16x64_0(v) 600 case OpRsh16x8: 601 return rewriteValueARM64_OpRsh16x8_0(v) 602 case OpRsh32Ux16: 603 return rewriteValueARM64_OpRsh32Ux16_0(v) 604 case OpRsh32Ux32: 605 return rewriteValueARM64_OpRsh32Ux32_0(v) 606 case OpRsh32Ux64: 607 return rewriteValueARM64_OpRsh32Ux64_0(v) 608 case OpRsh32Ux8: 609 return rewriteValueARM64_OpRsh32Ux8_0(v) 610 case OpRsh32x16: 611 return rewriteValueARM64_OpRsh32x16_0(v) 612 case OpRsh32x32: 613 return rewriteValueARM64_OpRsh32x32_0(v) 614 case OpRsh32x64: 615 return rewriteValueARM64_OpRsh32x64_0(v) 616 case OpRsh32x8: 617 return rewriteValueARM64_OpRsh32x8_0(v) 618 case OpRsh64Ux16: 619 return rewriteValueARM64_OpRsh64Ux16_0(v) 620 case OpRsh64Ux32: 621 return rewriteValueARM64_OpRsh64Ux32_0(v) 622 case OpRsh64Ux64: 623 return rewriteValueARM64_OpRsh64Ux64_0(v) 624 case OpRsh64Ux8: 625 return rewriteValueARM64_OpRsh64Ux8_0(v) 626 case OpRsh64x16: 627 return rewriteValueARM64_OpRsh64x16_0(v) 628 case OpRsh64x32: 629 return rewriteValueARM64_OpRsh64x32_0(v) 630 case OpRsh64x64: 631 return rewriteValueARM64_OpRsh64x64_0(v) 632 case OpRsh64x8: 633 return rewriteValueARM64_OpRsh64x8_0(v) 634 case OpRsh8Ux16: 635 return rewriteValueARM64_OpRsh8Ux16_0(v) 636 case OpRsh8Ux32: 637 return rewriteValueARM64_OpRsh8Ux32_0(v) 638 case OpRsh8Ux64: 639 return rewriteValueARM64_OpRsh8Ux64_0(v) 640 case OpRsh8Ux8: 641 return rewriteValueARM64_OpRsh8Ux8_0(v) 642 case OpRsh8x16: 643 return rewriteValueARM64_OpRsh8x16_0(v) 644 case OpRsh8x32: 645 return rewriteValueARM64_OpRsh8x32_0(v) 646 case OpRsh8x64: 647 return rewriteValueARM64_OpRsh8x64_0(v) 648 case OpRsh8x8: 649 return rewriteValueARM64_OpRsh8x8_0(v) 650 case OpSignExt16to32: 651 return rewriteValueARM64_OpSignExt16to32_0(v) 652 case OpSignExt16to64: 653 return rewriteValueARM64_OpSignExt16to64_0(v) 654 case OpSignExt32to64: 655 return rewriteValueARM64_OpSignExt32to64_0(v) 656 case OpSignExt8to16: 657 return rewriteValueARM64_OpSignExt8to16_0(v) 658 case OpSignExt8to32: 659 return rewriteValueARM64_OpSignExt8to32_0(v) 660 case OpSignExt8to64: 661 return rewriteValueARM64_OpSignExt8to64_0(v) 662 case OpSlicemask: 663 return rewriteValueARM64_OpSlicemask_0(v) 664 case OpSqrt: 665 return rewriteValueARM64_OpSqrt_0(v) 666 case OpStaticCall: 667 return rewriteValueARM64_OpStaticCall_0(v) 668 case OpStore: 669 return rewriteValueARM64_OpStore_0(v) 670 case OpSub16: 671 return rewriteValueARM64_OpSub16_0(v) 672 case OpSub32: 673 return rewriteValueARM64_OpSub32_0(v) 674 case OpSub32F: 675 return rewriteValueARM64_OpSub32F_0(v) 676 case OpSub64: 677 return rewriteValueARM64_OpSub64_0(v) 678 case OpSub64F: 679 return rewriteValueARM64_OpSub64F_0(v) 680 case OpSub8: 681 return rewriteValueARM64_OpSub8_0(v) 682 case OpSubPtr: 683 return rewriteValueARM64_OpSubPtr_0(v) 684 case OpTrunc16to8: 685 return rewriteValueARM64_OpTrunc16to8_0(v) 686 case OpTrunc32to16: 687 return rewriteValueARM64_OpTrunc32to16_0(v) 688 case OpTrunc32to8: 689 return rewriteValueARM64_OpTrunc32to8_0(v) 690 case OpTrunc64to16: 691 return rewriteValueARM64_OpTrunc64to16_0(v) 692 case OpTrunc64to32: 693 return rewriteValueARM64_OpTrunc64to32_0(v) 694 case OpTrunc64to8: 695 return rewriteValueARM64_OpTrunc64to8_0(v) 696 case OpXor16: 697 return rewriteValueARM64_OpXor16_0(v) 698 case OpXor32: 699 return rewriteValueARM64_OpXor32_0(v) 700 case OpXor64: 701 return rewriteValueARM64_OpXor64_0(v) 702 case OpXor8: 703 return rewriteValueARM64_OpXor8_0(v) 704 case OpZero: 705 return rewriteValueARM64_OpZero_0(v) || rewriteValueARM64_OpZero_10(v) 706 case OpZeroExt16to32: 707 return rewriteValueARM64_OpZeroExt16to32_0(v) 708 case OpZeroExt16to64: 709 return rewriteValueARM64_OpZeroExt16to64_0(v) 710 case OpZeroExt32to64: 711 return rewriteValueARM64_OpZeroExt32to64_0(v) 712 case OpZeroExt8to16: 713 return rewriteValueARM64_OpZeroExt8to16_0(v) 714 case OpZeroExt8to32: 715 return rewriteValueARM64_OpZeroExt8to32_0(v) 716 case OpZeroExt8to64: 717 return rewriteValueARM64_OpZeroExt8to64_0(v) 718 } 719 return false 720 } 721 func rewriteValueARM64_OpARM64ADD_0(v *Value) bool { 722 // match: (ADD x (MOVDconst [c])) 723 // cond: 724 // result: (ADDconst [c] x) 725 for { 726 x := v.Args[0] 727 v_1 := v.Args[1] 728 if v_1.Op != OpARM64MOVDconst { 729 break 730 } 731 c := v_1.AuxInt 732 v.reset(OpARM64ADDconst) 733 v.AuxInt = c 734 v.AddArg(x) 735 return true 736 } 737 // match: (ADD (MOVDconst [c]) x) 738 // cond: 739 // result: (ADDconst [c] x) 740 for { 741 v_0 := v.Args[0] 742 if v_0.Op != OpARM64MOVDconst { 743 break 744 } 745 c := v_0.AuxInt 746 x := v.Args[1] 747 v.reset(OpARM64ADDconst) 748 v.AuxInt = c 749 v.AddArg(x) 750 return true 751 } 752 // match: (ADD x (NEG y)) 753 // cond: 754 // result: (SUB x y) 755 for { 756 x := v.Args[0] 757 v_1 := v.Args[1] 758 if v_1.Op != OpARM64NEG { 759 break 760 } 761 y := v_1.Args[0] 762 v.reset(OpARM64SUB) 763 v.AddArg(x) 764 v.AddArg(y) 765 return true 766 } 767 // match: (ADD (NEG y) x) 768 // cond: 769 // result: (SUB x y) 770 for { 771 v_0 := v.Args[0] 772 if v_0.Op != OpARM64NEG { 773 break 774 } 775 y := v_0.Args[0] 776 x := v.Args[1] 777 v.reset(OpARM64SUB) 778 v.AddArg(x) 779 v.AddArg(y) 780 return true 781 } 782 // match: (ADD x (SLLconst [c] y)) 783 // cond: 784 // result: (ADDshiftLL x y [c]) 785 for { 786 x := v.Args[0] 787 v_1 := v.Args[1] 788 if v_1.Op != OpARM64SLLconst { 789 break 790 } 791 c := v_1.AuxInt 792 y := v_1.Args[0] 793 v.reset(OpARM64ADDshiftLL) 794 v.AuxInt = c 795 v.AddArg(x) 796 v.AddArg(y) 797 return true 798 } 799 // match: (ADD (SLLconst [c] y) x) 800 // cond: 801 // result: (ADDshiftLL x y [c]) 802 for { 803 v_0 := v.Args[0] 804 if v_0.Op != OpARM64SLLconst { 805 break 806 } 807 c := v_0.AuxInt 808 y := v_0.Args[0] 809 x := v.Args[1] 810 v.reset(OpARM64ADDshiftLL) 811 v.AuxInt = c 812 v.AddArg(x) 813 v.AddArg(y) 814 return true 815 } 816 // match: (ADD x (SRLconst [c] y)) 817 // cond: 818 // result: (ADDshiftRL x y [c]) 819 for { 820 x := v.Args[0] 821 v_1 := v.Args[1] 822 if v_1.Op != OpARM64SRLconst { 823 break 824 } 825 c := v_1.AuxInt 826 y := v_1.Args[0] 827 v.reset(OpARM64ADDshiftRL) 828 v.AuxInt = c 829 v.AddArg(x) 830 v.AddArg(y) 831 return true 832 } 833 // match: (ADD (SRLconst [c] y) x) 834 // cond: 835 // result: (ADDshiftRL x y [c]) 836 for { 837 v_0 := v.Args[0] 838 if v_0.Op != OpARM64SRLconst { 839 break 840 } 841 c := v_0.AuxInt 842 y := v_0.Args[0] 843 x := v.Args[1] 844 v.reset(OpARM64ADDshiftRL) 845 v.AuxInt = c 846 v.AddArg(x) 847 v.AddArg(y) 848 return true 849 } 850 // match: (ADD x (SRAconst [c] y)) 851 // cond: 852 // result: (ADDshiftRA x y [c]) 853 for { 854 x := v.Args[0] 855 v_1 := v.Args[1] 856 if v_1.Op != OpARM64SRAconst { 857 break 858 } 859 c := v_1.AuxInt 860 y := v_1.Args[0] 861 v.reset(OpARM64ADDshiftRA) 862 v.AuxInt = c 863 v.AddArg(x) 864 v.AddArg(y) 865 return true 866 } 867 // match: (ADD (SRAconst [c] y) x) 868 // cond: 869 // result: (ADDshiftRA x y [c]) 870 for { 871 v_0 := v.Args[0] 872 if v_0.Op != OpARM64SRAconst { 873 break 874 } 875 c := v_0.AuxInt 876 y := v_0.Args[0] 877 x := v.Args[1] 878 v.reset(OpARM64ADDshiftRA) 879 v.AuxInt = c 880 v.AddArg(x) 881 v.AddArg(y) 882 return true 883 } 884 return false 885 } 886 func rewriteValueARM64_OpARM64ADDconst_0(v *Value) bool { 887 // match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr)) 888 // cond: 889 // result: (MOVDaddr [off1+off2] {sym} ptr) 890 for { 891 off1 := v.AuxInt 892 v_0 := v.Args[0] 893 if v_0.Op != OpARM64MOVDaddr { 894 break 895 } 896 off2 := v_0.AuxInt 897 sym := v_0.Aux 898 ptr := v_0.Args[0] 899 v.reset(OpARM64MOVDaddr) 900 v.AuxInt = off1 + off2 901 v.Aux = sym 902 v.AddArg(ptr) 903 return true 904 } 905 // match: (ADDconst [0] x) 906 // cond: 907 // result: x 908 for { 909 if v.AuxInt != 0 { 910 break 911 } 912 x := v.Args[0] 913 v.reset(OpCopy) 914 v.Type = x.Type 915 v.AddArg(x) 916 return true 917 } 918 // match: (ADDconst [c] (MOVDconst [d])) 919 // cond: 920 // result: (MOVDconst [c+d]) 921 for { 922 c := v.AuxInt 923 v_0 := v.Args[0] 924 if v_0.Op != OpARM64MOVDconst { 925 break 926 } 927 d := v_0.AuxInt 928 v.reset(OpARM64MOVDconst) 929 v.AuxInt = c + d 930 return true 931 } 932 // match: (ADDconst [c] (ADDconst [d] x)) 933 // cond: 934 // result: (ADDconst [c+d] x) 935 for { 936 c := v.AuxInt 937 v_0 := v.Args[0] 938 if v_0.Op != OpARM64ADDconst { 939 break 940 } 941 d := v_0.AuxInt 942 x := v_0.Args[0] 943 v.reset(OpARM64ADDconst) 944 v.AuxInt = c + d 945 v.AddArg(x) 946 return true 947 } 948 // match: (ADDconst [c] (SUBconst [d] x)) 949 // cond: 950 // result: (ADDconst [c-d] x) 951 for { 952 c := v.AuxInt 953 v_0 := v.Args[0] 954 if v_0.Op != OpARM64SUBconst { 955 break 956 } 957 d := v_0.AuxInt 958 x := v_0.Args[0] 959 v.reset(OpARM64ADDconst) 960 v.AuxInt = c - d 961 v.AddArg(x) 962 return true 963 } 964 return false 965 } 966 func rewriteValueARM64_OpARM64ADDshiftLL_0(v *Value) bool { 967 b := v.Block 968 _ = b 969 // match: (ADDshiftLL (MOVDconst [c]) x [d]) 970 // cond: 971 // result: (ADDconst [c] (SLLconst <x.Type> x [d])) 972 for { 973 d := v.AuxInt 974 v_0 := v.Args[0] 975 if v_0.Op != OpARM64MOVDconst { 976 break 977 } 978 c := v_0.AuxInt 979 x := v.Args[1] 980 v.reset(OpARM64ADDconst) 981 v.AuxInt = c 982 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 983 v0.AuxInt = d 984 v0.AddArg(x) 985 v.AddArg(v0) 986 return true 987 } 988 // match: (ADDshiftLL x (MOVDconst [c]) [d]) 989 // cond: 990 // result: (ADDconst x [int64(uint64(c)<<uint64(d))]) 991 for { 992 d := v.AuxInt 993 x := v.Args[0] 994 v_1 := v.Args[1] 995 if v_1.Op != OpARM64MOVDconst { 996 break 997 } 998 c := v_1.AuxInt 999 v.reset(OpARM64ADDconst) 1000 v.AuxInt = int64(uint64(c) << uint64(d)) 1001 v.AddArg(x) 1002 return true 1003 } 1004 // match: (ADDshiftLL [c] (SRLconst x [64-c]) x) 1005 // cond: 1006 // result: (RORconst [64-c] x) 1007 for { 1008 c := v.AuxInt 1009 v_0 := v.Args[0] 1010 if v_0.Op != OpARM64SRLconst { 1011 break 1012 } 1013 if v_0.AuxInt != 64-c { 1014 break 1015 } 1016 x := v_0.Args[0] 1017 if x != v.Args[1] { 1018 break 1019 } 1020 v.reset(OpARM64RORconst) 1021 v.AuxInt = 64 - c 1022 v.AddArg(x) 1023 return true 1024 } 1025 // match: (ADDshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x) 1026 // cond: c < 32 && t.Size() == 4 1027 // result: (RORWconst [32-c] x) 1028 for { 1029 t := v.Type 1030 c := v.AuxInt 1031 v_0 := v.Args[0] 1032 if v_0.Op != OpARM64SRLconst { 1033 break 1034 } 1035 if v_0.AuxInt != 32-c { 1036 break 1037 } 1038 v_0_0 := v_0.Args[0] 1039 if v_0_0.Op != OpARM64MOVWUreg { 1040 break 1041 } 1042 x := v_0_0.Args[0] 1043 if x != v.Args[1] { 1044 break 1045 } 1046 if !(c < 32 && t.Size() == 4) { 1047 break 1048 } 1049 v.reset(OpARM64RORWconst) 1050 v.AuxInt = 32 - c 1051 v.AddArg(x) 1052 return true 1053 } 1054 return false 1055 } 1056 func rewriteValueARM64_OpARM64ADDshiftRA_0(v *Value) bool { 1057 b := v.Block 1058 _ = b 1059 // match: (ADDshiftRA (MOVDconst [c]) x [d]) 1060 // cond: 1061 // result: (ADDconst [c] (SRAconst <x.Type> x [d])) 1062 for { 1063 d := v.AuxInt 1064 v_0 := v.Args[0] 1065 if v_0.Op != OpARM64MOVDconst { 1066 break 1067 } 1068 c := v_0.AuxInt 1069 x := v.Args[1] 1070 v.reset(OpARM64ADDconst) 1071 v.AuxInt = c 1072 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 1073 v0.AuxInt = d 1074 v0.AddArg(x) 1075 v.AddArg(v0) 1076 return true 1077 } 1078 // match: (ADDshiftRA x (MOVDconst [c]) [d]) 1079 // cond: 1080 // result: (ADDconst x [int64(int64(c)>>uint64(d))]) 1081 for { 1082 d := v.AuxInt 1083 x := v.Args[0] 1084 v_1 := v.Args[1] 1085 if v_1.Op != OpARM64MOVDconst { 1086 break 1087 } 1088 c := v_1.AuxInt 1089 v.reset(OpARM64ADDconst) 1090 v.AuxInt = int64(int64(c) >> uint64(d)) 1091 v.AddArg(x) 1092 return true 1093 } 1094 return false 1095 } 1096 func rewriteValueARM64_OpARM64ADDshiftRL_0(v *Value) bool { 1097 b := v.Block 1098 _ = b 1099 // match: (ADDshiftRL (MOVDconst [c]) x [d]) 1100 // cond: 1101 // result: (ADDconst [c] (SRLconst <x.Type> x [d])) 1102 for { 1103 d := v.AuxInt 1104 v_0 := v.Args[0] 1105 if v_0.Op != OpARM64MOVDconst { 1106 break 1107 } 1108 c := v_0.AuxInt 1109 x := v.Args[1] 1110 v.reset(OpARM64ADDconst) 1111 v.AuxInt = c 1112 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 1113 v0.AuxInt = d 1114 v0.AddArg(x) 1115 v.AddArg(v0) 1116 return true 1117 } 1118 // match: (ADDshiftRL x (MOVDconst [c]) [d]) 1119 // cond: 1120 // result: (ADDconst x [int64(uint64(c)>>uint64(d))]) 1121 for { 1122 d := v.AuxInt 1123 x := v.Args[0] 1124 v_1 := v.Args[1] 1125 if v_1.Op != OpARM64MOVDconst { 1126 break 1127 } 1128 c := v_1.AuxInt 1129 v.reset(OpARM64ADDconst) 1130 v.AuxInt = int64(uint64(c) >> uint64(d)) 1131 v.AddArg(x) 1132 return true 1133 } 1134 // match: (ADDshiftRL [c] (SLLconst x [64-c]) x) 1135 // cond: 1136 // result: (RORconst [ c] x) 1137 for { 1138 c := v.AuxInt 1139 v_0 := v.Args[0] 1140 if v_0.Op != OpARM64SLLconst { 1141 break 1142 } 1143 if v_0.AuxInt != 64-c { 1144 break 1145 } 1146 x := v_0.Args[0] 1147 if x != v.Args[1] { 1148 break 1149 } 1150 v.reset(OpARM64RORconst) 1151 v.AuxInt = c 1152 v.AddArg(x) 1153 return true 1154 } 1155 // match: (ADDshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 1156 // cond: c < 32 && t.Size() == 4 1157 // result: (RORWconst [ c] x) 1158 for { 1159 t := v.Type 1160 c := v.AuxInt 1161 v_0 := v.Args[0] 1162 if v_0.Op != OpARM64SLLconst { 1163 break 1164 } 1165 if v_0.AuxInt != 32-c { 1166 break 1167 } 1168 x := v_0.Args[0] 1169 v_1 := v.Args[1] 1170 if v_1.Op != OpARM64MOVWUreg { 1171 break 1172 } 1173 if x != v_1.Args[0] { 1174 break 1175 } 1176 if !(c < 32 && t.Size() == 4) { 1177 break 1178 } 1179 v.reset(OpARM64RORWconst) 1180 v.AuxInt = c 1181 v.AddArg(x) 1182 return true 1183 } 1184 return false 1185 } 1186 func rewriteValueARM64_OpARM64AND_0(v *Value) bool { 1187 // match: (AND x (MOVDconst [c])) 1188 // cond: 1189 // result: (ANDconst [c] x) 1190 for { 1191 x := v.Args[0] 1192 v_1 := v.Args[1] 1193 if v_1.Op != OpARM64MOVDconst { 1194 break 1195 } 1196 c := v_1.AuxInt 1197 v.reset(OpARM64ANDconst) 1198 v.AuxInt = c 1199 v.AddArg(x) 1200 return true 1201 } 1202 // match: (AND (MOVDconst [c]) x) 1203 // cond: 1204 // result: (ANDconst [c] x) 1205 for { 1206 v_0 := v.Args[0] 1207 if v_0.Op != OpARM64MOVDconst { 1208 break 1209 } 1210 c := v_0.AuxInt 1211 x := v.Args[1] 1212 v.reset(OpARM64ANDconst) 1213 v.AuxInt = c 1214 v.AddArg(x) 1215 return true 1216 } 1217 // match: (AND x x) 1218 // cond: 1219 // result: x 1220 for { 1221 x := v.Args[0] 1222 if x != v.Args[1] { 1223 break 1224 } 1225 v.reset(OpCopy) 1226 v.Type = x.Type 1227 v.AddArg(x) 1228 return true 1229 } 1230 // match: (AND x (MVN y)) 1231 // cond: 1232 // result: (BIC x y) 1233 for { 1234 x := v.Args[0] 1235 v_1 := v.Args[1] 1236 if v_1.Op != OpARM64MVN { 1237 break 1238 } 1239 y := v_1.Args[0] 1240 v.reset(OpARM64BIC) 1241 v.AddArg(x) 1242 v.AddArg(y) 1243 return true 1244 } 1245 // match: (AND (MVN y) x) 1246 // cond: 1247 // result: (BIC x y) 1248 for { 1249 v_0 := v.Args[0] 1250 if v_0.Op != OpARM64MVN { 1251 break 1252 } 1253 y := v_0.Args[0] 1254 x := v.Args[1] 1255 v.reset(OpARM64BIC) 1256 v.AddArg(x) 1257 v.AddArg(y) 1258 return true 1259 } 1260 // match: (AND x (SLLconst [c] y)) 1261 // cond: 1262 // result: (ANDshiftLL x y [c]) 1263 for { 1264 x := v.Args[0] 1265 v_1 := v.Args[1] 1266 if v_1.Op != OpARM64SLLconst { 1267 break 1268 } 1269 c := v_1.AuxInt 1270 y := v_1.Args[0] 1271 v.reset(OpARM64ANDshiftLL) 1272 v.AuxInt = c 1273 v.AddArg(x) 1274 v.AddArg(y) 1275 return true 1276 } 1277 // match: (AND (SLLconst [c] y) x) 1278 // cond: 1279 // result: (ANDshiftLL x y [c]) 1280 for { 1281 v_0 := v.Args[0] 1282 if v_0.Op != OpARM64SLLconst { 1283 break 1284 } 1285 c := v_0.AuxInt 1286 y := v_0.Args[0] 1287 x := v.Args[1] 1288 v.reset(OpARM64ANDshiftLL) 1289 v.AuxInt = c 1290 v.AddArg(x) 1291 v.AddArg(y) 1292 return true 1293 } 1294 // match: (AND x (SRLconst [c] y)) 1295 // cond: 1296 // result: (ANDshiftRL x y [c]) 1297 for { 1298 x := v.Args[0] 1299 v_1 := v.Args[1] 1300 if v_1.Op != OpARM64SRLconst { 1301 break 1302 } 1303 c := v_1.AuxInt 1304 y := v_1.Args[0] 1305 v.reset(OpARM64ANDshiftRL) 1306 v.AuxInt = c 1307 v.AddArg(x) 1308 v.AddArg(y) 1309 return true 1310 } 1311 // match: (AND (SRLconst [c] y) x) 1312 // cond: 1313 // result: (ANDshiftRL x y [c]) 1314 for { 1315 v_0 := v.Args[0] 1316 if v_0.Op != OpARM64SRLconst { 1317 break 1318 } 1319 c := v_0.AuxInt 1320 y := v_0.Args[0] 1321 x := v.Args[1] 1322 v.reset(OpARM64ANDshiftRL) 1323 v.AuxInt = c 1324 v.AddArg(x) 1325 v.AddArg(y) 1326 return true 1327 } 1328 // match: (AND x (SRAconst [c] y)) 1329 // cond: 1330 // result: (ANDshiftRA x y [c]) 1331 for { 1332 x := v.Args[0] 1333 v_1 := v.Args[1] 1334 if v_1.Op != OpARM64SRAconst { 1335 break 1336 } 1337 c := v_1.AuxInt 1338 y := v_1.Args[0] 1339 v.reset(OpARM64ANDshiftRA) 1340 v.AuxInt = c 1341 v.AddArg(x) 1342 v.AddArg(y) 1343 return true 1344 } 1345 return false 1346 } 1347 func rewriteValueARM64_OpARM64AND_10(v *Value) bool { 1348 // match: (AND (SRAconst [c] y) x) 1349 // cond: 1350 // result: (ANDshiftRA x y [c]) 1351 for { 1352 v_0 := v.Args[0] 1353 if v_0.Op != OpARM64SRAconst { 1354 break 1355 } 1356 c := v_0.AuxInt 1357 y := v_0.Args[0] 1358 x := v.Args[1] 1359 v.reset(OpARM64ANDshiftRA) 1360 v.AuxInt = c 1361 v.AddArg(x) 1362 v.AddArg(y) 1363 return true 1364 } 1365 return false 1366 } 1367 func rewriteValueARM64_OpARM64ANDconst_0(v *Value) bool { 1368 // match: (ANDconst [0] _) 1369 // cond: 1370 // result: (MOVDconst [0]) 1371 for { 1372 if v.AuxInt != 0 { 1373 break 1374 } 1375 v.reset(OpARM64MOVDconst) 1376 v.AuxInt = 0 1377 return true 1378 } 1379 // match: (ANDconst [-1] x) 1380 // cond: 1381 // result: x 1382 for { 1383 if v.AuxInt != -1 { 1384 break 1385 } 1386 x := v.Args[0] 1387 v.reset(OpCopy) 1388 v.Type = x.Type 1389 v.AddArg(x) 1390 return true 1391 } 1392 // match: (ANDconst [c] (MOVDconst [d])) 1393 // cond: 1394 // result: (MOVDconst [c&d]) 1395 for { 1396 c := v.AuxInt 1397 v_0 := v.Args[0] 1398 if v_0.Op != OpARM64MOVDconst { 1399 break 1400 } 1401 d := v_0.AuxInt 1402 v.reset(OpARM64MOVDconst) 1403 v.AuxInt = c & d 1404 return true 1405 } 1406 // match: (ANDconst [c] (ANDconst [d] x)) 1407 // cond: 1408 // result: (ANDconst [c&d] x) 1409 for { 1410 c := v.AuxInt 1411 v_0 := v.Args[0] 1412 if v_0.Op != OpARM64ANDconst { 1413 break 1414 } 1415 d := v_0.AuxInt 1416 x := v_0.Args[0] 1417 v.reset(OpARM64ANDconst) 1418 v.AuxInt = c & d 1419 v.AddArg(x) 1420 return true 1421 } 1422 return false 1423 } 1424 func rewriteValueARM64_OpARM64ANDshiftLL_0(v *Value) bool { 1425 b := v.Block 1426 _ = b 1427 // match: (ANDshiftLL (MOVDconst [c]) x [d]) 1428 // cond: 1429 // result: (ANDconst [c] (SLLconst <x.Type> x [d])) 1430 for { 1431 d := v.AuxInt 1432 v_0 := v.Args[0] 1433 if v_0.Op != OpARM64MOVDconst { 1434 break 1435 } 1436 c := v_0.AuxInt 1437 x := v.Args[1] 1438 v.reset(OpARM64ANDconst) 1439 v.AuxInt = c 1440 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 1441 v0.AuxInt = d 1442 v0.AddArg(x) 1443 v.AddArg(v0) 1444 return true 1445 } 1446 // match: (ANDshiftLL x (MOVDconst [c]) [d]) 1447 // cond: 1448 // result: (ANDconst x [int64(uint64(c)<<uint64(d))]) 1449 for { 1450 d := v.AuxInt 1451 x := v.Args[0] 1452 v_1 := v.Args[1] 1453 if v_1.Op != OpARM64MOVDconst { 1454 break 1455 } 1456 c := v_1.AuxInt 1457 v.reset(OpARM64ANDconst) 1458 v.AuxInt = int64(uint64(c) << uint64(d)) 1459 v.AddArg(x) 1460 return true 1461 } 1462 // match: (ANDshiftLL x y:(SLLconst x [c]) [d]) 1463 // cond: c==d 1464 // result: y 1465 for { 1466 d := v.AuxInt 1467 x := v.Args[0] 1468 y := v.Args[1] 1469 if y.Op != OpARM64SLLconst { 1470 break 1471 } 1472 c := y.AuxInt 1473 if x != y.Args[0] { 1474 break 1475 } 1476 if !(c == d) { 1477 break 1478 } 1479 v.reset(OpCopy) 1480 v.Type = y.Type 1481 v.AddArg(y) 1482 return true 1483 } 1484 return false 1485 } 1486 func rewriteValueARM64_OpARM64ANDshiftRA_0(v *Value) bool { 1487 b := v.Block 1488 _ = b 1489 // match: (ANDshiftRA (MOVDconst [c]) x [d]) 1490 // cond: 1491 // result: (ANDconst [c] (SRAconst <x.Type> x [d])) 1492 for { 1493 d := v.AuxInt 1494 v_0 := v.Args[0] 1495 if v_0.Op != OpARM64MOVDconst { 1496 break 1497 } 1498 c := v_0.AuxInt 1499 x := v.Args[1] 1500 v.reset(OpARM64ANDconst) 1501 v.AuxInt = c 1502 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 1503 v0.AuxInt = d 1504 v0.AddArg(x) 1505 v.AddArg(v0) 1506 return true 1507 } 1508 // match: (ANDshiftRA x (MOVDconst [c]) [d]) 1509 // cond: 1510 // result: (ANDconst x [int64(int64(c)>>uint64(d))]) 1511 for { 1512 d := v.AuxInt 1513 x := v.Args[0] 1514 v_1 := v.Args[1] 1515 if v_1.Op != OpARM64MOVDconst { 1516 break 1517 } 1518 c := v_1.AuxInt 1519 v.reset(OpARM64ANDconst) 1520 v.AuxInt = int64(int64(c) >> uint64(d)) 1521 v.AddArg(x) 1522 return true 1523 } 1524 // match: (ANDshiftRA x y:(SRAconst x [c]) [d]) 1525 // cond: c==d 1526 // result: y 1527 for { 1528 d := v.AuxInt 1529 x := v.Args[0] 1530 y := v.Args[1] 1531 if y.Op != OpARM64SRAconst { 1532 break 1533 } 1534 c := y.AuxInt 1535 if x != y.Args[0] { 1536 break 1537 } 1538 if !(c == d) { 1539 break 1540 } 1541 v.reset(OpCopy) 1542 v.Type = y.Type 1543 v.AddArg(y) 1544 return true 1545 } 1546 return false 1547 } 1548 func rewriteValueARM64_OpARM64ANDshiftRL_0(v *Value) bool { 1549 b := v.Block 1550 _ = b 1551 // match: (ANDshiftRL (MOVDconst [c]) x [d]) 1552 // cond: 1553 // result: (ANDconst [c] (SRLconst <x.Type> x [d])) 1554 for { 1555 d := v.AuxInt 1556 v_0 := v.Args[0] 1557 if v_0.Op != OpARM64MOVDconst { 1558 break 1559 } 1560 c := v_0.AuxInt 1561 x := v.Args[1] 1562 v.reset(OpARM64ANDconst) 1563 v.AuxInt = c 1564 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 1565 v0.AuxInt = d 1566 v0.AddArg(x) 1567 v.AddArg(v0) 1568 return true 1569 } 1570 // match: (ANDshiftRL x (MOVDconst [c]) [d]) 1571 // cond: 1572 // result: (ANDconst x [int64(uint64(c)>>uint64(d))]) 1573 for { 1574 d := v.AuxInt 1575 x := v.Args[0] 1576 v_1 := v.Args[1] 1577 if v_1.Op != OpARM64MOVDconst { 1578 break 1579 } 1580 c := v_1.AuxInt 1581 v.reset(OpARM64ANDconst) 1582 v.AuxInt = int64(uint64(c) >> uint64(d)) 1583 v.AddArg(x) 1584 return true 1585 } 1586 // match: (ANDshiftRL x y:(SRLconst x [c]) [d]) 1587 // cond: c==d 1588 // result: y 1589 for { 1590 d := v.AuxInt 1591 x := v.Args[0] 1592 y := v.Args[1] 1593 if y.Op != OpARM64SRLconst { 1594 break 1595 } 1596 c := y.AuxInt 1597 if x != y.Args[0] { 1598 break 1599 } 1600 if !(c == d) { 1601 break 1602 } 1603 v.reset(OpCopy) 1604 v.Type = y.Type 1605 v.AddArg(y) 1606 return true 1607 } 1608 return false 1609 } 1610 func rewriteValueARM64_OpARM64BIC_0(v *Value) bool { 1611 // match: (BIC x (MOVDconst [c])) 1612 // cond: 1613 // result: (BICconst [c] x) 1614 for { 1615 x := v.Args[0] 1616 v_1 := v.Args[1] 1617 if v_1.Op != OpARM64MOVDconst { 1618 break 1619 } 1620 c := v_1.AuxInt 1621 v.reset(OpARM64BICconst) 1622 v.AuxInt = c 1623 v.AddArg(x) 1624 return true 1625 } 1626 // match: (BIC x x) 1627 // cond: 1628 // result: (MOVDconst [0]) 1629 for { 1630 x := v.Args[0] 1631 if x != v.Args[1] { 1632 break 1633 } 1634 v.reset(OpARM64MOVDconst) 1635 v.AuxInt = 0 1636 return true 1637 } 1638 // match: (BIC x (SLLconst [c] y)) 1639 // cond: 1640 // result: (BICshiftLL x y [c]) 1641 for { 1642 x := v.Args[0] 1643 v_1 := v.Args[1] 1644 if v_1.Op != OpARM64SLLconst { 1645 break 1646 } 1647 c := v_1.AuxInt 1648 y := v_1.Args[0] 1649 v.reset(OpARM64BICshiftLL) 1650 v.AuxInt = c 1651 v.AddArg(x) 1652 v.AddArg(y) 1653 return true 1654 } 1655 // match: (BIC x (SRLconst [c] y)) 1656 // cond: 1657 // result: (BICshiftRL x y [c]) 1658 for { 1659 x := v.Args[0] 1660 v_1 := v.Args[1] 1661 if v_1.Op != OpARM64SRLconst { 1662 break 1663 } 1664 c := v_1.AuxInt 1665 y := v_1.Args[0] 1666 v.reset(OpARM64BICshiftRL) 1667 v.AuxInt = c 1668 v.AddArg(x) 1669 v.AddArg(y) 1670 return true 1671 } 1672 // match: (BIC x (SRAconst [c] y)) 1673 // cond: 1674 // result: (BICshiftRA x y [c]) 1675 for { 1676 x := v.Args[0] 1677 v_1 := v.Args[1] 1678 if v_1.Op != OpARM64SRAconst { 1679 break 1680 } 1681 c := v_1.AuxInt 1682 y := v_1.Args[0] 1683 v.reset(OpARM64BICshiftRA) 1684 v.AuxInt = c 1685 v.AddArg(x) 1686 v.AddArg(y) 1687 return true 1688 } 1689 return false 1690 } 1691 func rewriteValueARM64_OpARM64BICconst_0(v *Value) bool { 1692 // match: (BICconst [0] x) 1693 // cond: 1694 // result: x 1695 for { 1696 if v.AuxInt != 0 { 1697 break 1698 } 1699 x := v.Args[0] 1700 v.reset(OpCopy) 1701 v.Type = x.Type 1702 v.AddArg(x) 1703 return true 1704 } 1705 // match: (BICconst [-1] _) 1706 // cond: 1707 // result: (MOVDconst [0]) 1708 for { 1709 if v.AuxInt != -1 { 1710 break 1711 } 1712 v.reset(OpARM64MOVDconst) 1713 v.AuxInt = 0 1714 return true 1715 } 1716 // match: (BICconst [c] (MOVDconst [d])) 1717 // cond: 1718 // result: (MOVDconst [d&^c]) 1719 for { 1720 c := v.AuxInt 1721 v_0 := v.Args[0] 1722 if v_0.Op != OpARM64MOVDconst { 1723 break 1724 } 1725 d := v_0.AuxInt 1726 v.reset(OpARM64MOVDconst) 1727 v.AuxInt = d &^ c 1728 return true 1729 } 1730 return false 1731 } 1732 func rewriteValueARM64_OpARM64BICshiftLL_0(v *Value) bool { 1733 // match: (BICshiftLL x (MOVDconst [c]) [d]) 1734 // cond: 1735 // result: (BICconst x [int64(uint64(c)<<uint64(d))]) 1736 for { 1737 d := v.AuxInt 1738 x := v.Args[0] 1739 v_1 := v.Args[1] 1740 if v_1.Op != OpARM64MOVDconst { 1741 break 1742 } 1743 c := v_1.AuxInt 1744 v.reset(OpARM64BICconst) 1745 v.AuxInt = int64(uint64(c) << uint64(d)) 1746 v.AddArg(x) 1747 return true 1748 } 1749 // match: (BICshiftLL x (SLLconst x [c]) [d]) 1750 // cond: c==d 1751 // result: (MOVDconst [0]) 1752 for { 1753 d := v.AuxInt 1754 x := v.Args[0] 1755 v_1 := v.Args[1] 1756 if v_1.Op != OpARM64SLLconst { 1757 break 1758 } 1759 c := v_1.AuxInt 1760 if x != v_1.Args[0] { 1761 break 1762 } 1763 if !(c == d) { 1764 break 1765 } 1766 v.reset(OpARM64MOVDconst) 1767 v.AuxInt = 0 1768 return true 1769 } 1770 return false 1771 } 1772 func rewriteValueARM64_OpARM64BICshiftRA_0(v *Value) bool { 1773 // match: (BICshiftRA x (MOVDconst [c]) [d]) 1774 // cond: 1775 // result: (BICconst x [int64(int64(c)>>uint64(d))]) 1776 for { 1777 d := v.AuxInt 1778 x := v.Args[0] 1779 v_1 := v.Args[1] 1780 if v_1.Op != OpARM64MOVDconst { 1781 break 1782 } 1783 c := v_1.AuxInt 1784 v.reset(OpARM64BICconst) 1785 v.AuxInt = int64(int64(c) >> uint64(d)) 1786 v.AddArg(x) 1787 return true 1788 } 1789 // match: (BICshiftRA x (SRAconst x [c]) [d]) 1790 // cond: c==d 1791 // result: (MOVDconst [0]) 1792 for { 1793 d := v.AuxInt 1794 x := v.Args[0] 1795 v_1 := v.Args[1] 1796 if v_1.Op != OpARM64SRAconst { 1797 break 1798 } 1799 c := v_1.AuxInt 1800 if x != v_1.Args[0] { 1801 break 1802 } 1803 if !(c == d) { 1804 break 1805 } 1806 v.reset(OpARM64MOVDconst) 1807 v.AuxInt = 0 1808 return true 1809 } 1810 return false 1811 } 1812 func rewriteValueARM64_OpARM64BICshiftRL_0(v *Value) bool { 1813 // match: (BICshiftRL x (MOVDconst [c]) [d]) 1814 // cond: 1815 // result: (BICconst x [int64(uint64(c)>>uint64(d))]) 1816 for { 1817 d := v.AuxInt 1818 x := v.Args[0] 1819 v_1 := v.Args[1] 1820 if v_1.Op != OpARM64MOVDconst { 1821 break 1822 } 1823 c := v_1.AuxInt 1824 v.reset(OpARM64BICconst) 1825 v.AuxInt = int64(uint64(c) >> uint64(d)) 1826 v.AddArg(x) 1827 return true 1828 } 1829 // match: (BICshiftRL x (SRLconst x [c]) [d]) 1830 // cond: c==d 1831 // result: (MOVDconst [0]) 1832 for { 1833 d := v.AuxInt 1834 x := v.Args[0] 1835 v_1 := v.Args[1] 1836 if v_1.Op != OpARM64SRLconst { 1837 break 1838 } 1839 c := v_1.AuxInt 1840 if x != v_1.Args[0] { 1841 break 1842 } 1843 if !(c == d) { 1844 break 1845 } 1846 v.reset(OpARM64MOVDconst) 1847 v.AuxInt = 0 1848 return true 1849 } 1850 return false 1851 } 1852 func rewriteValueARM64_OpARM64CMP_0(v *Value) bool { 1853 b := v.Block 1854 _ = b 1855 // match: (CMP x (MOVDconst [c])) 1856 // cond: 1857 // result: (CMPconst [c] x) 1858 for { 1859 x := v.Args[0] 1860 v_1 := v.Args[1] 1861 if v_1.Op != OpARM64MOVDconst { 1862 break 1863 } 1864 c := v_1.AuxInt 1865 v.reset(OpARM64CMPconst) 1866 v.AuxInt = c 1867 v.AddArg(x) 1868 return true 1869 } 1870 // match: (CMP (MOVDconst [c]) x) 1871 // cond: 1872 // result: (InvertFlags (CMPconst [c] x)) 1873 for { 1874 v_0 := v.Args[0] 1875 if v_0.Op != OpARM64MOVDconst { 1876 break 1877 } 1878 c := v_0.AuxInt 1879 x := v.Args[1] 1880 v.reset(OpARM64InvertFlags) 1881 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 1882 v0.AuxInt = c 1883 v0.AddArg(x) 1884 v.AddArg(v0) 1885 return true 1886 } 1887 // match: (CMP x (SLLconst [c] y)) 1888 // cond: 1889 // result: (CMPshiftLL x y [c]) 1890 for { 1891 x := v.Args[0] 1892 v_1 := v.Args[1] 1893 if v_1.Op != OpARM64SLLconst { 1894 break 1895 } 1896 c := v_1.AuxInt 1897 y := v_1.Args[0] 1898 v.reset(OpARM64CMPshiftLL) 1899 v.AuxInt = c 1900 v.AddArg(x) 1901 v.AddArg(y) 1902 return true 1903 } 1904 // match: (CMP (SLLconst [c] y) x) 1905 // cond: 1906 // result: (InvertFlags (CMPshiftLL x y [c])) 1907 for { 1908 v_0 := v.Args[0] 1909 if v_0.Op != OpARM64SLLconst { 1910 break 1911 } 1912 c := v_0.AuxInt 1913 y := v_0.Args[0] 1914 x := v.Args[1] 1915 v.reset(OpARM64InvertFlags) 1916 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftLL, TypeFlags) 1917 v0.AuxInt = c 1918 v0.AddArg(x) 1919 v0.AddArg(y) 1920 v.AddArg(v0) 1921 return true 1922 } 1923 // match: (CMP x (SRLconst [c] y)) 1924 // cond: 1925 // result: (CMPshiftRL x y [c]) 1926 for { 1927 x := v.Args[0] 1928 v_1 := v.Args[1] 1929 if v_1.Op != OpARM64SRLconst { 1930 break 1931 } 1932 c := v_1.AuxInt 1933 y := v_1.Args[0] 1934 v.reset(OpARM64CMPshiftRL) 1935 v.AuxInt = c 1936 v.AddArg(x) 1937 v.AddArg(y) 1938 return true 1939 } 1940 // match: (CMP (SRLconst [c] y) x) 1941 // cond: 1942 // result: (InvertFlags (CMPshiftRL x y [c])) 1943 for { 1944 v_0 := v.Args[0] 1945 if v_0.Op != OpARM64SRLconst { 1946 break 1947 } 1948 c := v_0.AuxInt 1949 y := v_0.Args[0] 1950 x := v.Args[1] 1951 v.reset(OpARM64InvertFlags) 1952 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRL, TypeFlags) 1953 v0.AuxInt = c 1954 v0.AddArg(x) 1955 v0.AddArg(y) 1956 v.AddArg(v0) 1957 return true 1958 } 1959 // match: (CMP x (SRAconst [c] y)) 1960 // cond: 1961 // result: (CMPshiftRA x y [c]) 1962 for { 1963 x := v.Args[0] 1964 v_1 := v.Args[1] 1965 if v_1.Op != OpARM64SRAconst { 1966 break 1967 } 1968 c := v_1.AuxInt 1969 y := v_1.Args[0] 1970 v.reset(OpARM64CMPshiftRA) 1971 v.AuxInt = c 1972 v.AddArg(x) 1973 v.AddArg(y) 1974 return true 1975 } 1976 // match: (CMP (SRAconst [c] y) x) 1977 // cond: 1978 // result: (InvertFlags (CMPshiftRA x y [c])) 1979 for { 1980 v_0 := v.Args[0] 1981 if v_0.Op != OpARM64SRAconst { 1982 break 1983 } 1984 c := v_0.AuxInt 1985 y := v_0.Args[0] 1986 x := v.Args[1] 1987 v.reset(OpARM64InvertFlags) 1988 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRA, TypeFlags) 1989 v0.AuxInt = c 1990 v0.AddArg(x) 1991 v0.AddArg(y) 1992 v.AddArg(v0) 1993 return true 1994 } 1995 return false 1996 } 1997 func rewriteValueARM64_OpARM64CMPW_0(v *Value) bool { 1998 b := v.Block 1999 _ = b 2000 // match: (CMPW x (MOVDconst [c])) 2001 // cond: 2002 // result: (CMPWconst [int64(int32(c))] x) 2003 for { 2004 x := v.Args[0] 2005 v_1 := v.Args[1] 2006 if v_1.Op != OpARM64MOVDconst { 2007 break 2008 } 2009 c := v_1.AuxInt 2010 v.reset(OpARM64CMPWconst) 2011 v.AuxInt = int64(int32(c)) 2012 v.AddArg(x) 2013 return true 2014 } 2015 // match: (CMPW (MOVDconst [c]) x) 2016 // cond: 2017 // result: (InvertFlags (CMPWconst [int64(int32(c))] x)) 2018 for { 2019 v_0 := v.Args[0] 2020 if v_0.Op != OpARM64MOVDconst { 2021 break 2022 } 2023 c := v_0.AuxInt 2024 x := v.Args[1] 2025 v.reset(OpARM64InvertFlags) 2026 v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, TypeFlags) 2027 v0.AuxInt = int64(int32(c)) 2028 v0.AddArg(x) 2029 v.AddArg(v0) 2030 return true 2031 } 2032 return false 2033 } 2034 func rewriteValueARM64_OpARM64CMPWconst_0(v *Value) bool { 2035 // match: (CMPWconst (MOVDconst [x]) [y]) 2036 // cond: int32(x)==int32(y) 2037 // result: (FlagEQ) 2038 for { 2039 y := v.AuxInt 2040 v_0 := v.Args[0] 2041 if v_0.Op != OpARM64MOVDconst { 2042 break 2043 } 2044 x := v_0.AuxInt 2045 if !(int32(x) == int32(y)) { 2046 break 2047 } 2048 v.reset(OpARM64FlagEQ) 2049 return true 2050 } 2051 // match: (CMPWconst (MOVDconst [x]) [y]) 2052 // cond: int32(x)<int32(y) && uint32(x)<uint32(y) 2053 // result: (FlagLT_ULT) 2054 for { 2055 y := v.AuxInt 2056 v_0 := v.Args[0] 2057 if v_0.Op != OpARM64MOVDconst { 2058 break 2059 } 2060 x := v_0.AuxInt 2061 if !(int32(x) < int32(y) && uint32(x) < uint32(y)) { 2062 break 2063 } 2064 v.reset(OpARM64FlagLT_ULT) 2065 return true 2066 } 2067 // match: (CMPWconst (MOVDconst [x]) [y]) 2068 // cond: int32(x)<int32(y) && uint32(x)>uint32(y) 2069 // result: (FlagLT_UGT) 2070 for { 2071 y := v.AuxInt 2072 v_0 := v.Args[0] 2073 if v_0.Op != OpARM64MOVDconst { 2074 break 2075 } 2076 x := v_0.AuxInt 2077 if !(int32(x) < int32(y) && uint32(x) > uint32(y)) { 2078 break 2079 } 2080 v.reset(OpARM64FlagLT_UGT) 2081 return true 2082 } 2083 // match: (CMPWconst (MOVDconst [x]) [y]) 2084 // cond: int32(x)>int32(y) && uint32(x)<uint32(y) 2085 // result: (FlagGT_ULT) 2086 for { 2087 y := v.AuxInt 2088 v_0 := v.Args[0] 2089 if v_0.Op != OpARM64MOVDconst { 2090 break 2091 } 2092 x := v_0.AuxInt 2093 if !(int32(x) > int32(y) && uint32(x) < uint32(y)) { 2094 break 2095 } 2096 v.reset(OpARM64FlagGT_ULT) 2097 return true 2098 } 2099 // match: (CMPWconst (MOVDconst [x]) [y]) 2100 // cond: int32(x)>int32(y) && uint32(x)>uint32(y) 2101 // result: (FlagGT_UGT) 2102 for { 2103 y := v.AuxInt 2104 v_0 := v.Args[0] 2105 if v_0.Op != OpARM64MOVDconst { 2106 break 2107 } 2108 x := v_0.AuxInt 2109 if !(int32(x) > int32(y) && uint32(x) > uint32(y)) { 2110 break 2111 } 2112 v.reset(OpARM64FlagGT_UGT) 2113 return true 2114 } 2115 // match: (CMPWconst (MOVBUreg _) [c]) 2116 // cond: 0xff < int32(c) 2117 // result: (FlagLT_ULT) 2118 for { 2119 c := v.AuxInt 2120 v_0 := v.Args[0] 2121 if v_0.Op != OpARM64MOVBUreg { 2122 break 2123 } 2124 if !(0xff < int32(c)) { 2125 break 2126 } 2127 v.reset(OpARM64FlagLT_ULT) 2128 return true 2129 } 2130 // match: (CMPWconst (MOVHUreg _) [c]) 2131 // cond: 0xffff < int32(c) 2132 // result: (FlagLT_ULT) 2133 for { 2134 c := v.AuxInt 2135 v_0 := v.Args[0] 2136 if v_0.Op != OpARM64MOVHUreg { 2137 break 2138 } 2139 if !(0xffff < int32(c)) { 2140 break 2141 } 2142 v.reset(OpARM64FlagLT_ULT) 2143 return true 2144 } 2145 return false 2146 } 2147 func rewriteValueARM64_OpARM64CMPconst_0(v *Value) bool { 2148 // match: (CMPconst (MOVDconst [x]) [y]) 2149 // cond: x==y 2150 // result: (FlagEQ) 2151 for { 2152 y := v.AuxInt 2153 v_0 := v.Args[0] 2154 if v_0.Op != OpARM64MOVDconst { 2155 break 2156 } 2157 x := v_0.AuxInt 2158 if !(x == y) { 2159 break 2160 } 2161 v.reset(OpARM64FlagEQ) 2162 return true 2163 } 2164 // match: (CMPconst (MOVDconst [x]) [y]) 2165 // cond: int64(x)<int64(y) && uint64(x)<uint64(y) 2166 // result: (FlagLT_ULT) 2167 for { 2168 y := v.AuxInt 2169 v_0 := v.Args[0] 2170 if v_0.Op != OpARM64MOVDconst { 2171 break 2172 } 2173 x := v_0.AuxInt 2174 if !(int64(x) < int64(y) && uint64(x) < uint64(y)) { 2175 break 2176 } 2177 v.reset(OpARM64FlagLT_ULT) 2178 return true 2179 } 2180 // match: (CMPconst (MOVDconst [x]) [y]) 2181 // cond: int64(x)<int64(y) && uint64(x)>uint64(y) 2182 // result: (FlagLT_UGT) 2183 for { 2184 y := v.AuxInt 2185 v_0 := v.Args[0] 2186 if v_0.Op != OpARM64MOVDconst { 2187 break 2188 } 2189 x := v_0.AuxInt 2190 if !(int64(x) < int64(y) && uint64(x) > uint64(y)) { 2191 break 2192 } 2193 v.reset(OpARM64FlagLT_UGT) 2194 return true 2195 } 2196 // match: (CMPconst (MOVDconst [x]) [y]) 2197 // cond: int64(x)>int64(y) && uint64(x)<uint64(y) 2198 // result: (FlagGT_ULT) 2199 for { 2200 y := v.AuxInt 2201 v_0 := v.Args[0] 2202 if v_0.Op != OpARM64MOVDconst { 2203 break 2204 } 2205 x := v_0.AuxInt 2206 if !(int64(x) > int64(y) && uint64(x) < uint64(y)) { 2207 break 2208 } 2209 v.reset(OpARM64FlagGT_ULT) 2210 return true 2211 } 2212 // match: (CMPconst (MOVDconst [x]) [y]) 2213 // cond: int64(x)>int64(y) && uint64(x)>uint64(y) 2214 // result: (FlagGT_UGT) 2215 for { 2216 y := v.AuxInt 2217 v_0 := v.Args[0] 2218 if v_0.Op != OpARM64MOVDconst { 2219 break 2220 } 2221 x := v_0.AuxInt 2222 if !(int64(x) > int64(y) && uint64(x) > uint64(y)) { 2223 break 2224 } 2225 v.reset(OpARM64FlagGT_UGT) 2226 return true 2227 } 2228 // match: (CMPconst (MOVBUreg _) [c]) 2229 // cond: 0xff < c 2230 // result: (FlagLT_ULT) 2231 for { 2232 c := v.AuxInt 2233 v_0 := v.Args[0] 2234 if v_0.Op != OpARM64MOVBUreg { 2235 break 2236 } 2237 if !(0xff < c) { 2238 break 2239 } 2240 v.reset(OpARM64FlagLT_ULT) 2241 return true 2242 } 2243 // match: (CMPconst (MOVHUreg _) [c]) 2244 // cond: 0xffff < c 2245 // result: (FlagLT_ULT) 2246 for { 2247 c := v.AuxInt 2248 v_0 := v.Args[0] 2249 if v_0.Op != OpARM64MOVHUreg { 2250 break 2251 } 2252 if !(0xffff < c) { 2253 break 2254 } 2255 v.reset(OpARM64FlagLT_ULT) 2256 return true 2257 } 2258 // match: (CMPconst (MOVWUreg _) [c]) 2259 // cond: 0xffffffff < c 2260 // result: (FlagLT_ULT) 2261 for { 2262 c := v.AuxInt 2263 v_0 := v.Args[0] 2264 if v_0.Op != OpARM64MOVWUreg { 2265 break 2266 } 2267 if !(0xffffffff < c) { 2268 break 2269 } 2270 v.reset(OpARM64FlagLT_ULT) 2271 return true 2272 } 2273 // match: (CMPconst (ANDconst _ [m]) [n]) 2274 // cond: 0 <= m && m < n 2275 // result: (FlagLT_ULT) 2276 for { 2277 n := v.AuxInt 2278 v_0 := v.Args[0] 2279 if v_0.Op != OpARM64ANDconst { 2280 break 2281 } 2282 m := v_0.AuxInt 2283 if !(0 <= m && m < n) { 2284 break 2285 } 2286 v.reset(OpARM64FlagLT_ULT) 2287 return true 2288 } 2289 // match: (CMPconst (SRLconst _ [c]) [n]) 2290 // cond: 0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n) 2291 // result: (FlagLT_ULT) 2292 for { 2293 n := v.AuxInt 2294 v_0 := v.Args[0] 2295 if v_0.Op != OpARM64SRLconst { 2296 break 2297 } 2298 c := v_0.AuxInt 2299 if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) { 2300 break 2301 } 2302 v.reset(OpARM64FlagLT_ULT) 2303 return true 2304 } 2305 return false 2306 } 2307 func rewriteValueARM64_OpARM64CMPshiftLL_0(v *Value) bool { 2308 b := v.Block 2309 _ = b 2310 // match: (CMPshiftLL (MOVDconst [c]) x [d]) 2311 // cond: 2312 // result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d]))) 2313 for { 2314 d := v.AuxInt 2315 v_0 := v.Args[0] 2316 if v_0.Op != OpARM64MOVDconst { 2317 break 2318 } 2319 c := v_0.AuxInt 2320 x := v.Args[1] 2321 v.reset(OpARM64InvertFlags) 2322 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 2323 v0.AuxInt = c 2324 v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 2325 v1.AuxInt = d 2326 v1.AddArg(x) 2327 v0.AddArg(v1) 2328 v.AddArg(v0) 2329 return true 2330 } 2331 // match: (CMPshiftLL x (MOVDconst [c]) [d]) 2332 // cond: 2333 // result: (CMPconst x [int64(uint64(c)<<uint64(d))]) 2334 for { 2335 d := v.AuxInt 2336 x := v.Args[0] 2337 v_1 := v.Args[1] 2338 if v_1.Op != OpARM64MOVDconst { 2339 break 2340 } 2341 c := v_1.AuxInt 2342 v.reset(OpARM64CMPconst) 2343 v.AuxInt = int64(uint64(c) << uint64(d)) 2344 v.AddArg(x) 2345 return true 2346 } 2347 return false 2348 } 2349 func rewriteValueARM64_OpARM64CMPshiftRA_0(v *Value) bool { 2350 b := v.Block 2351 _ = b 2352 // match: (CMPshiftRA (MOVDconst [c]) x [d]) 2353 // cond: 2354 // result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d]))) 2355 for { 2356 d := v.AuxInt 2357 v_0 := v.Args[0] 2358 if v_0.Op != OpARM64MOVDconst { 2359 break 2360 } 2361 c := v_0.AuxInt 2362 x := v.Args[1] 2363 v.reset(OpARM64InvertFlags) 2364 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 2365 v0.AuxInt = c 2366 v1 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 2367 v1.AuxInt = d 2368 v1.AddArg(x) 2369 v0.AddArg(v1) 2370 v.AddArg(v0) 2371 return true 2372 } 2373 // match: (CMPshiftRA x (MOVDconst [c]) [d]) 2374 // cond: 2375 // result: (CMPconst x [int64(int64(c)>>uint64(d))]) 2376 for { 2377 d := v.AuxInt 2378 x := v.Args[0] 2379 v_1 := v.Args[1] 2380 if v_1.Op != OpARM64MOVDconst { 2381 break 2382 } 2383 c := v_1.AuxInt 2384 v.reset(OpARM64CMPconst) 2385 v.AuxInt = int64(int64(c) >> uint64(d)) 2386 v.AddArg(x) 2387 return true 2388 } 2389 return false 2390 } 2391 func rewriteValueARM64_OpARM64CMPshiftRL_0(v *Value) bool { 2392 b := v.Block 2393 _ = b 2394 // match: (CMPshiftRL (MOVDconst [c]) x [d]) 2395 // cond: 2396 // result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d]))) 2397 for { 2398 d := v.AuxInt 2399 v_0 := v.Args[0] 2400 if v_0.Op != OpARM64MOVDconst { 2401 break 2402 } 2403 c := v_0.AuxInt 2404 x := v.Args[1] 2405 v.reset(OpARM64InvertFlags) 2406 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 2407 v0.AuxInt = c 2408 v1 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 2409 v1.AuxInt = d 2410 v1.AddArg(x) 2411 v0.AddArg(v1) 2412 v.AddArg(v0) 2413 return true 2414 } 2415 // match: (CMPshiftRL x (MOVDconst [c]) [d]) 2416 // cond: 2417 // result: (CMPconst x [int64(uint64(c)>>uint64(d))]) 2418 for { 2419 d := v.AuxInt 2420 x := v.Args[0] 2421 v_1 := v.Args[1] 2422 if v_1.Op != OpARM64MOVDconst { 2423 break 2424 } 2425 c := v_1.AuxInt 2426 v.reset(OpARM64CMPconst) 2427 v.AuxInt = int64(uint64(c) >> uint64(d)) 2428 v.AddArg(x) 2429 return true 2430 } 2431 return false 2432 } 2433 func rewriteValueARM64_OpARM64CSELULT_0(v *Value) bool { 2434 // match: (CSELULT x (MOVDconst [0]) flag) 2435 // cond: 2436 // result: (CSELULT0 x flag) 2437 for { 2438 x := v.Args[0] 2439 v_1 := v.Args[1] 2440 if v_1.Op != OpARM64MOVDconst { 2441 break 2442 } 2443 if v_1.AuxInt != 0 { 2444 break 2445 } 2446 flag := v.Args[2] 2447 v.reset(OpARM64CSELULT0) 2448 v.AddArg(x) 2449 v.AddArg(flag) 2450 return true 2451 } 2452 // match: (CSELULT _ y (FlagEQ)) 2453 // cond: 2454 // result: y 2455 for { 2456 y := v.Args[1] 2457 v_2 := v.Args[2] 2458 if v_2.Op != OpARM64FlagEQ { 2459 break 2460 } 2461 v.reset(OpCopy) 2462 v.Type = y.Type 2463 v.AddArg(y) 2464 return true 2465 } 2466 // match: (CSELULT x _ (FlagLT_ULT)) 2467 // cond: 2468 // result: x 2469 for { 2470 x := v.Args[0] 2471 v_2 := v.Args[2] 2472 if v_2.Op != OpARM64FlagLT_ULT { 2473 break 2474 } 2475 v.reset(OpCopy) 2476 v.Type = x.Type 2477 v.AddArg(x) 2478 return true 2479 } 2480 // match: (CSELULT _ y (FlagLT_UGT)) 2481 // cond: 2482 // result: y 2483 for { 2484 y := v.Args[1] 2485 v_2 := v.Args[2] 2486 if v_2.Op != OpARM64FlagLT_UGT { 2487 break 2488 } 2489 v.reset(OpCopy) 2490 v.Type = y.Type 2491 v.AddArg(y) 2492 return true 2493 } 2494 // match: (CSELULT x _ (FlagGT_ULT)) 2495 // cond: 2496 // result: x 2497 for { 2498 x := v.Args[0] 2499 v_2 := v.Args[2] 2500 if v_2.Op != OpARM64FlagGT_ULT { 2501 break 2502 } 2503 v.reset(OpCopy) 2504 v.Type = x.Type 2505 v.AddArg(x) 2506 return true 2507 } 2508 // match: (CSELULT _ y (FlagGT_UGT)) 2509 // cond: 2510 // result: y 2511 for { 2512 y := v.Args[1] 2513 v_2 := v.Args[2] 2514 if v_2.Op != OpARM64FlagGT_UGT { 2515 break 2516 } 2517 v.reset(OpCopy) 2518 v.Type = y.Type 2519 v.AddArg(y) 2520 return true 2521 } 2522 return false 2523 } 2524 func rewriteValueARM64_OpARM64CSELULT0_0(v *Value) bool { 2525 // match: (CSELULT0 _ (FlagEQ)) 2526 // cond: 2527 // result: (MOVDconst [0]) 2528 for { 2529 v_1 := v.Args[1] 2530 if v_1.Op != OpARM64FlagEQ { 2531 break 2532 } 2533 v.reset(OpARM64MOVDconst) 2534 v.AuxInt = 0 2535 return true 2536 } 2537 // match: (CSELULT0 x (FlagLT_ULT)) 2538 // cond: 2539 // result: x 2540 for { 2541 x := v.Args[0] 2542 v_1 := v.Args[1] 2543 if v_1.Op != OpARM64FlagLT_ULT { 2544 break 2545 } 2546 v.reset(OpCopy) 2547 v.Type = x.Type 2548 v.AddArg(x) 2549 return true 2550 } 2551 // match: (CSELULT0 _ (FlagLT_UGT)) 2552 // cond: 2553 // result: (MOVDconst [0]) 2554 for { 2555 v_1 := v.Args[1] 2556 if v_1.Op != OpARM64FlagLT_UGT { 2557 break 2558 } 2559 v.reset(OpARM64MOVDconst) 2560 v.AuxInt = 0 2561 return true 2562 } 2563 // match: (CSELULT0 x (FlagGT_ULT)) 2564 // cond: 2565 // result: x 2566 for { 2567 x := v.Args[0] 2568 v_1 := v.Args[1] 2569 if v_1.Op != OpARM64FlagGT_ULT { 2570 break 2571 } 2572 v.reset(OpCopy) 2573 v.Type = x.Type 2574 v.AddArg(x) 2575 return true 2576 } 2577 // match: (CSELULT0 _ (FlagGT_UGT)) 2578 // cond: 2579 // result: (MOVDconst [0]) 2580 for { 2581 v_1 := v.Args[1] 2582 if v_1.Op != OpARM64FlagGT_UGT { 2583 break 2584 } 2585 v.reset(OpARM64MOVDconst) 2586 v.AuxInt = 0 2587 return true 2588 } 2589 return false 2590 } 2591 func rewriteValueARM64_OpARM64DIV_0(v *Value) bool { 2592 // match: (DIV (MOVDconst [c]) (MOVDconst [d])) 2593 // cond: 2594 // result: (MOVDconst [int64(c)/int64(d)]) 2595 for { 2596 v_0 := v.Args[0] 2597 if v_0.Op != OpARM64MOVDconst { 2598 break 2599 } 2600 c := v_0.AuxInt 2601 v_1 := v.Args[1] 2602 if v_1.Op != OpARM64MOVDconst { 2603 break 2604 } 2605 d := v_1.AuxInt 2606 v.reset(OpARM64MOVDconst) 2607 v.AuxInt = int64(c) / int64(d) 2608 return true 2609 } 2610 return false 2611 } 2612 func rewriteValueARM64_OpARM64DIVW_0(v *Value) bool { 2613 // match: (DIVW (MOVDconst [c]) (MOVDconst [d])) 2614 // cond: 2615 // result: (MOVDconst [int64(int32(c)/int32(d))]) 2616 for { 2617 v_0 := v.Args[0] 2618 if v_0.Op != OpARM64MOVDconst { 2619 break 2620 } 2621 c := v_0.AuxInt 2622 v_1 := v.Args[1] 2623 if v_1.Op != OpARM64MOVDconst { 2624 break 2625 } 2626 d := v_1.AuxInt 2627 v.reset(OpARM64MOVDconst) 2628 v.AuxInt = int64(int32(c) / int32(d)) 2629 return true 2630 } 2631 return false 2632 } 2633 func rewriteValueARM64_OpARM64Equal_0(v *Value) bool { 2634 // match: (Equal (FlagEQ)) 2635 // cond: 2636 // result: (MOVDconst [1]) 2637 for { 2638 v_0 := v.Args[0] 2639 if v_0.Op != OpARM64FlagEQ { 2640 break 2641 } 2642 v.reset(OpARM64MOVDconst) 2643 v.AuxInt = 1 2644 return true 2645 } 2646 // match: (Equal (FlagLT_ULT)) 2647 // cond: 2648 // result: (MOVDconst [0]) 2649 for { 2650 v_0 := v.Args[0] 2651 if v_0.Op != OpARM64FlagLT_ULT { 2652 break 2653 } 2654 v.reset(OpARM64MOVDconst) 2655 v.AuxInt = 0 2656 return true 2657 } 2658 // match: (Equal (FlagLT_UGT)) 2659 // cond: 2660 // result: (MOVDconst [0]) 2661 for { 2662 v_0 := v.Args[0] 2663 if v_0.Op != OpARM64FlagLT_UGT { 2664 break 2665 } 2666 v.reset(OpARM64MOVDconst) 2667 v.AuxInt = 0 2668 return true 2669 } 2670 // match: (Equal (FlagGT_ULT)) 2671 // cond: 2672 // result: (MOVDconst [0]) 2673 for { 2674 v_0 := v.Args[0] 2675 if v_0.Op != OpARM64FlagGT_ULT { 2676 break 2677 } 2678 v.reset(OpARM64MOVDconst) 2679 v.AuxInt = 0 2680 return true 2681 } 2682 // match: (Equal (FlagGT_UGT)) 2683 // cond: 2684 // result: (MOVDconst [0]) 2685 for { 2686 v_0 := v.Args[0] 2687 if v_0.Op != OpARM64FlagGT_UGT { 2688 break 2689 } 2690 v.reset(OpARM64MOVDconst) 2691 v.AuxInt = 0 2692 return true 2693 } 2694 // match: (Equal (InvertFlags x)) 2695 // cond: 2696 // result: (Equal x) 2697 for { 2698 v_0 := v.Args[0] 2699 if v_0.Op != OpARM64InvertFlags { 2700 break 2701 } 2702 x := v_0.Args[0] 2703 v.reset(OpARM64Equal) 2704 v.AddArg(x) 2705 return true 2706 } 2707 return false 2708 } 2709 func rewriteValueARM64_OpARM64FMOVDload_0(v *Value) bool { 2710 // match: (FMOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 2711 // cond: fitsARM64Offset(off1+off2, 8, sym) 2712 // result: (FMOVDload [off1+off2] {sym} ptr mem) 2713 for { 2714 off1 := v.AuxInt 2715 sym := v.Aux 2716 v_0 := v.Args[0] 2717 if v_0.Op != OpARM64ADDconst { 2718 break 2719 } 2720 off2 := v_0.AuxInt 2721 ptr := v_0.Args[0] 2722 mem := v.Args[1] 2723 if !(fitsARM64Offset(off1+off2, 8, sym)) { 2724 break 2725 } 2726 v.reset(OpARM64FMOVDload) 2727 v.AuxInt = off1 + off2 2728 v.Aux = sym 2729 v.AddArg(ptr) 2730 v.AddArg(mem) 2731 return true 2732 } 2733 // match: (FMOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 2734 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 8, mergeSym(sym1, sym2)) 2735 // result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 2736 for { 2737 off1 := v.AuxInt 2738 sym1 := v.Aux 2739 v_0 := v.Args[0] 2740 if v_0.Op != OpARM64MOVDaddr { 2741 break 2742 } 2743 off2 := v_0.AuxInt 2744 sym2 := v_0.Aux 2745 ptr := v_0.Args[0] 2746 mem := v.Args[1] 2747 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 8, mergeSym(sym1, sym2))) { 2748 break 2749 } 2750 v.reset(OpARM64FMOVDload) 2751 v.AuxInt = off1 + off2 2752 v.Aux = mergeSym(sym1, sym2) 2753 v.AddArg(ptr) 2754 v.AddArg(mem) 2755 return true 2756 } 2757 return false 2758 } 2759 func rewriteValueARM64_OpARM64FMOVDstore_0(v *Value) bool { 2760 // match: (FMOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 2761 // cond: fitsARM64Offset(off1+off2, 8, sym) 2762 // result: (FMOVDstore [off1+off2] {sym} ptr val mem) 2763 for { 2764 off1 := v.AuxInt 2765 sym := v.Aux 2766 v_0 := v.Args[0] 2767 if v_0.Op != OpARM64ADDconst { 2768 break 2769 } 2770 off2 := v_0.AuxInt 2771 ptr := v_0.Args[0] 2772 val := v.Args[1] 2773 mem := v.Args[2] 2774 if !(fitsARM64Offset(off1+off2, 8, sym)) { 2775 break 2776 } 2777 v.reset(OpARM64FMOVDstore) 2778 v.AuxInt = off1 + off2 2779 v.Aux = sym 2780 v.AddArg(ptr) 2781 v.AddArg(val) 2782 v.AddArg(mem) 2783 return true 2784 } 2785 // match: (FMOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 2786 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 8, mergeSym(sym1, sym2)) 2787 // result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 2788 for { 2789 off1 := v.AuxInt 2790 sym1 := v.Aux 2791 v_0 := v.Args[0] 2792 if v_0.Op != OpARM64MOVDaddr { 2793 break 2794 } 2795 off2 := v_0.AuxInt 2796 sym2 := v_0.Aux 2797 ptr := v_0.Args[0] 2798 val := v.Args[1] 2799 mem := v.Args[2] 2800 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 8, mergeSym(sym1, sym2))) { 2801 break 2802 } 2803 v.reset(OpARM64FMOVDstore) 2804 v.AuxInt = off1 + off2 2805 v.Aux = mergeSym(sym1, sym2) 2806 v.AddArg(ptr) 2807 v.AddArg(val) 2808 v.AddArg(mem) 2809 return true 2810 } 2811 return false 2812 } 2813 func rewriteValueARM64_OpARM64FMOVSload_0(v *Value) bool { 2814 // match: (FMOVSload [off1] {sym} (ADDconst [off2] ptr) mem) 2815 // cond: fitsARM64Offset(off1+off2, 4, sym) 2816 // result: (FMOVSload [off1+off2] {sym} ptr mem) 2817 for { 2818 off1 := v.AuxInt 2819 sym := v.Aux 2820 v_0 := v.Args[0] 2821 if v_0.Op != OpARM64ADDconst { 2822 break 2823 } 2824 off2 := v_0.AuxInt 2825 ptr := v_0.Args[0] 2826 mem := v.Args[1] 2827 if !(fitsARM64Offset(off1+off2, 4, sym)) { 2828 break 2829 } 2830 v.reset(OpARM64FMOVSload) 2831 v.AuxInt = off1 + off2 2832 v.Aux = sym 2833 v.AddArg(ptr) 2834 v.AddArg(mem) 2835 return true 2836 } 2837 // match: (FMOVSload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 2838 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 4, mergeSym(sym1, sym2)) 2839 // result: (FMOVSload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 2840 for { 2841 off1 := v.AuxInt 2842 sym1 := v.Aux 2843 v_0 := v.Args[0] 2844 if v_0.Op != OpARM64MOVDaddr { 2845 break 2846 } 2847 off2 := v_0.AuxInt 2848 sym2 := v_0.Aux 2849 ptr := v_0.Args[0] 2850 mem := v.Args[1] 2851 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 4, mergeSym(sym1, sym2))) { 2852 break 2853 } 2854 v.reset(OpARM64FMOVSload) 2855 v.AuxInt = off1 + off2 2856 v.Aux = mergeSym(sym1, sym2) 2857 v.AddArg(ptr) 2858 v.AddArg(mem) 2859 return true 2860 } 2861 return false 2862 } 2863 func rewriteValueARM64_OpARM64FMOVSstore_0(v *Value) bool { 2864 // match: (FMOVSstore [off1] {sym} (ADDconst [off2] ptr) val mem) 2865 // cond: fitsARM64Offset(off1+off2, 4, sym) 2866 // result: (FMOVSstore [off1+off2] {sym} ptr val mem) 2867 for { 2868 off1 := v.AuxInt 2869 sym := v.Aux 2870 v_0 := v.Args[0] 2871 if v_0.Op != OpARM64ADDconst { 2872 break 2873 } 2874 off2 := v_0.AuxInt 2875 ptr := v_0.Args[0] 2876 val := v.Args[1] 2877 mem := v.Args[2] 2878 if !(fitsARM64Offset(off1+off2, 4, sym)) { 2879 break 2880 } 2881 v.reset(OpARM64FMOVSstore) 2882 v.AuxInt = off1 + off2 2883 v.Aux = sym 2884 v.AddArg(ptr) 2885 v.AddArg(val) 2886 v.AddArg(mem) 2887 return true 2888 } 2889 // match: (FMOVSstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 2890 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 4, mergeSym(sym1, sym2)) 2891 // result: (FMOVSstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 2892 for { 2893 off1 := v.AuxInt 2894 sym1 := v.Aux 2895 v_0 := v.Args[0] 2896 if v_0.Op != OpARM64MOVDaddr { 2897 break 2898 } 2899 off2 := v_0.AuxInt 2900 sym2 := v_0.Aux 2901 ptr := v_0.Args[0] 2902 val := v.Args[1] 2903 mem := v.Args[2] 2904 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 4, mergeSym(sym1, sym2))) { 2905 break 2906 } 2907 v.reset(OpARM64FMOVSstore) 2908 v.AuxInt = off1 + off2 2909 v.Aux = mergeSym(sym1, sym2) 2910 v.AddArg(ptr) 2911 v.AddArg(val) 2912 v.AddArg(mem) 2913 return true 2914 } 2915 return false 2916 } 2917 func rewriteValueARM64_OpARM64GreaterEqual_0(v *Value) bool { 2918 // match: (GreaterEqual (FlagEQ)) 2919 // cond: 2920 // result: (MOVDconst [1]) 2921 for { 2922 v_0 := v.Args[0] 2923 if v_0.Op != OpARM64FlagEQ { 2924 break 2925 } 2926 v.reset(OpARM64MOVDconst) 2927 v.AuxInt = 1 2928 return true 2929 } 2930 // match: (GreaterEqual (FlagLT_ULT)) 2931 // cond: 2932 // result: (MOVDconst [0]) 2933 for { 2934 v_0 := v.Args[0] 2935 if v_0.Op != OpARM64FlagLT_ULT { 2936 break 2937 } 2938 v.reset(OpARM64MOVDconst) 2939 v.AuxInt = 0 2940 return true 2941 } 2942 // match: (GreaterEqual (FlagLT_UGT)) 2943 // cond: 2944 // result: (MOVDconst [0]) 2945 for { 2946 v_0 := v.Args[0] 2947 if v_0.Op != OpARM64FlagLT_UGT { 2948 break 2949 } 2950 v.reset(OpARM64MOVDconst) 2951 v.AuxInt = 0 2952 return true 2953 } 2954 // match: (GreaterEqual (FlagGT_ULT)) 2955 // cond: 2956 // result: (MOVDconst [1]) 2957 for { 2958 v_0 := v.Args[0] 2959 if v_0.Op != OpARM64FlagGT_ULT { 2960 break 2961 } 2962 v.reset(OpARM64MOVDconst) 2963 v.AuxInt = 1 2964 return true 2965 } 2966 // match: (GreaterEqual (FlagGT_UGT)) 2967 // cond: 2968 // result: (MOVDconst [1]) 2969 for { 2970 v_0 := v.Args[0] 2971 if v_0.Op != OpARM64FlagGT_UGT { 2972 break 2973 } 2974 v.reset(OpARM64MOVDconst) 2975 v.AuxInt = 1 2976 return true 2977 } 2978 // match: (GreaterEqual (InvertFlags x)) 2979 // cond: 2980 // result: (LessEqual x) 2981 for { 2982 v_0 := v.Args[0] 2983 if v_0.Op != OpARM64InvertFlags { 2984 break 2985 } 2986 x := v_0.Args[0] 2987 v.reset(OpARM64LessEqual) 2988 v.AddArg(x) 2989 return true 2990 } 2991 return false 2992 } 2993 func rewriteValueARM64_OpARM64GreaterEqualU_0(v *Value) bool { 2994 // match: (GreaterEqualU (FlagEQ)) 2995 // cond: 2996 // result: (MOVDconst [1]) 2997 for { 2998 v_0 := v.Args[0] 2999 if v_0.Op != OpARM64FlagEQ { 3000 break 3001 } 3002 v.reset(OpARM64MOVDconst) 3003 v.AuxInt = 1 3004 return true 3005 } 3006 // match: (GreaterEqualU (FlagLT_ULT)) 3007 // cond: 3008 // result: (MOVDconst [0]) 3009 for { 3010 v_0 := v.Args[0] 3011 if v_0.Op != OpARM64FlagLT_ULT { 3012 break 3013 } 3014 v.reset(OpARM64MOVDconst) 3015 v.AuxInt = 0 3016 return true 3017 } 3018 // match: (GreaterEqualU (FlagLT_UGT)) 3019 // cond: 3020 // result: (MOVDconst [1]) 3021 for { 3022 v_0 := v.Args[0] 3023 if v_0.Op != OpARM64FlagLT_UGT { 3024 break 3025 } 3026 v.reset(OpARM64MOVDconst) 3027 v.AuxInt = 1 3028 return true 3029 } 3030 // match: (GreaterEqualU (FlagGT_ULT)) 3031 // cond: 3032 // result: (MOVDconst [0]) 3033 for { 3034 v_0 := v.Args[0] 3035 if v_0.Op != OpARM64FlagGT_ULT { 3036 break 3037 } 3038 v.reset(OpARM64MOVDconst) 3039 v.AuxInt = 0 3040 return true 3041 } 3042 // match: (GreaterEqualU (FlagGT_UGT)) 3043 // cond: 3044 // result: (MOVDconst [1]) 3045 for { 3046 v_0 := v.Args[0] 3047 if v_0.Op != OpARM64FlagGT_UGT { 3048 break 3049 } 3050 v.reset(OpARM64MOVDconst) 3051 v.AuxInt = 1 3052 return true 3053 } 3054 // match: (GreaterEqualU (InvertFlags x)) 3055 // cond: 3056 // result: (LessEqualU x) 3057 for { 3058 v_0 := v.Args[0] 3059 if v_0.Op != OpARM64InvertFlags { 3060 break 3061 } 3062 x := v_0.Args[0] 3063 v.reset(OpARM64LessEqualU) 3064 v.AddArg(x) 3065 return true 3066 } 3067 return false 3068 } 3069 func rewriteValueARM64_OpARM64GreaterThan_0(v *Value) bool { 3070 // match: (GreaterThan (FlagEQ)) 3071 // cond: 3072 // result: (MOVDconst [0]) 3073 for { 3074 v_0 := v.Args[0] 3075 if v_0.Op != OpARM64FlagEQ { 3076 break 3077 } 3078 v.reset(OpARM64MOVDconst) 3079 v.AuxInt = 0 3080 return true 3081 } 3082 // match: (GreaterThan (FlagLT_ULT)) 3083 // cond: 3084 // result: (MOVDconst [0]) 3085 for { 3086 v_0 := v.Args[0] 3087 if v_0.Op != OpARM64FlagLT_ULT { 3088 break 3089 } 3090 v.reset(OpARM64MOVDconst) 3091 v.AuxInt = 0 3092 return true 3093 } 3094 // match: (GreaterThan (FlagLT_UGT)) 3095 // cond: 3096 // result: (MOVDconst [0]) 3097 for { 3098 v_0 := v.Args[0] 3099 if v_0.Op != OpARM64FlagLT_UGT { 3100 break 3101 } 3102 v.reset(OpARM64MOVDconst) 3103 v.AuxInt = 0 3104 return true 3105 } 3106 // match: (GreaterThan (FlagGT_ULT)) 3107 // cond: 3108 // result: (MOVDconst [1]) 3109 for { 3110 v_0 := v.Args[0] 3111 if v_0.Op != OpARM64FlagGT_ULT { 3112 break 3113 } 3114 v.reset(OpARM64MOVDconst) 3115 v.AuxInt = 1 3116 return true 3117 } 3118 // match: (GreaterThan (FlagGT_UGT)) 3119 // cond: 3120 // result: (MOVDconst [1]) 3121 for { 3122 v_0 := v.Args[0] 3123 if v_0.Op != OpARM64FlagGT_UGT { 3124 break 3125 } 3126 v.reset(OpARM64MOVDconst) 3127 v.AuxInt = 1 3128 return true 3129 } 3130 // match: (GreaterThan (InvertFlags x)) 3131 // cond: 3132 // result: (LessThan x) 3133 for { 3134 v_0 := v.Args[0] 3135 if v_0.Op != OpARM64InvertFlags { 3136 break 3137 } 3138 x := v_0.Args[0] 3139 v.reset(OpARM64LessThan) 3140 v.AddArg(x) 3141 return true 3142 } 3143 return false 3144 } 3145 func rewriteValueARM64_OpARM64GreaterThanU_0(v *Value) bool { 3146 // match: (GreaterThanU (FlagEQ)) 3147 // cond: 3148 // result: (MOVDconst [0]) 3149 for { 3150 v_0 := v.Args[0] 3151 if v_0.Op != OpARM64FlagEQ { 3152 break 3153 } 3154 v.reset(OpARM64MOVDconst) 3155 v.AuxInt = 0 3156 return true 3157 } 3158 // match: (GreaterThanU (FlagLT_ULT)) 3159 // cond: 3160 // result: (MOVDconst [0]) 3161 for { 3162 v_0 := v.Args[0] 3163 if v_0.Op != OpARM64FlagLT_ULT { 3164 break 3165 } 3166 v.reset(OpARM64MOVDconst) 3167 v.AuxInt = 0 3168 return true 3169 } 3170 // match: (GreaterThanU (FlagLT_UGT)) 3171 // cond: 3172 // result: (MOVDconst [1]) 3173 for { 3174 v_0 := v.Args[0] 3175 if v_0.Op != OpARM64FlagLT_UGT { 3176 break 3177 } 3178 v.reset(OpARM64MOVDconst) 3179 v.AuxInt = 1 3180 return true 3181 } 3182 // match: (GreaterThanU (FlagGT_ULT)) 3183 // cond: 3184 // result: (MOVDconst [0]) 3185 for { 3186 v_0 := v.Args[0] 3187 if v_0.Op != OpARM64FlagGT_ULT { 3188 break 3189 } 3190 v.reset(OpARM64MOVDconst) 3191 v.AuxInt = 0 3192 return true 3193 } 3194 // match: (GreaterThanU (FlagGT_UGT)) 3195 // cond: 3196 // result: (MOVDconst [1]) 3197 for { 3198 v_0 := v.Args[0] 3199 if v_0.Op != OpARM64FlagGT_UGT { 3200 break 3201 } 3202 v.reset(OpARM64MOVDconst) 3203 v.AuxInt = 1 3204 return true 3205 } 3206 // match: (GreaterThanU (InvertFlags x)) 3207 // cond: 3208 // result: (LessThanU x) 3209 for { 3210 v_0 := v.Args[0] 3211 if v_0.Op != OpARM64InvertFlags { 3212 break 3213 } 3214 x := v_0.Args[0] 3215 v.reset(OpARM64LessThanU) 3216 v.AddArg(x) 3217 return true 3218 } 3219 return false 3220 } 3221 func rewriteValueARM64_OpARM64LessEqual_0(v *Value) bool { 3222 // match: (LessEqual (FlagEQ)) 3223 // cond: 3224 // result: (MOVDconst [1]) 3225 for { 3226 v_0 := v.Args[0] 3227 if v_0.Op != OpARM64FlagEQ { 3228 break 3229 } 3230 v.reset(OpARM64MOVDconst) 3231 v.AuxInt = 1 3232 return true 3233 } 3234 // match: (LessEqual (FlagLT_ULT)) 3235 // cond: 3236 // result: (MOVDconst [1]) 3237 for { 3238 v_0 := v.Args[0] 3239 if v_0.Op != OpARM64FlagLT_ULT { 3240 break 3241 } 3242 v.reset(OpARM64MOVDconst) 3243 v.AuxInt = 1 3244 return true 3245 } 3246 // match: (LessEqual (FlagLT_UGT)) 3247 // cond: 3248 // result: (MOVDconst [1]) 3249 for { 3250 v_0 := v.Args[0] 3251 if v_0.Op != OpARM64FlagLT_UGT { 3252 break 3253 } 3254 v.reset(OpARM64MOVDconst) 3255 v.AuxInt = 1 3256 return true 3257 } 3258 // match: (LessEqual (FlagGT_ULT)) 3259 // cond: 3260 // result: (MOVDconst [0]) 3261 for { 3262 v_0 := v.Args[0] 3263 if v_0.Op != OpARM64FlagGT_ULT { 3264 break 3265 } 3266 v.reset(OpARM64MOVDconst) 3267 v.AuxInt = 0 3268 return true 3269 } 3270 // match: (LessEqual (FlagGT_UGT)) 3271 // cond: 3272 // result: (MOVDconst [0]) 3273 for { 3274 v_0 := v.Args[0] 3275 if v_0.Op != OpARM64FlagGT_UGT { 3276 break 3277 } 3278 v.reset(OpARM64MOVDconst) 3279 v.AuxInt = 0 3280 return true 3281 } 3282 // match: (LessEqual (InvertFlags x)) 3283 // cond: 3284 // result: (GreaterEqual x) 3285 for { 3286 v_0 := v.Args[0] 3287 if v_0.Op != OpARM64InvertFlags { 3288 break 3289 } 3290 x := v_0.Args[0] 3291 v.reset(OpARM64GreaterEqual) 3292 v.AddArg(x) 3293 return true 3294 } 3295 return false 3296 } 3297 func rewriteValueARM64_OpARM64LessEqualU_0(v *Value) bool { 3298 // match: (LessEqualU (FlagEQ)) 3299 // cond: 3300 // result: (MOVDconst [1]) 3301 for { 3302 v_0 := v.Args[0] 3303 if v_0.Op != OpARM64FlagEQ { 3304 break 3305 } 3306 v.reset(OpARM64MOVDconst) 3307 v.AuxInt = 1 3308 return true 3309 } 3310 // match: (LessEqualU (FlagLT_ULT)) 3311 // cond: 3312 // result: (MOVDconst [1]) 3313 for { 3314 v_0 := v.Args[0] 3315 if v_0.Op != OpARM64FlagLT_ULT { 3316 break 3317 } 3318 v.reset(OpARM64MOVDconst) 3319 v.AuxInt = 1 3320 return true 3321 } 3322 // match: (LessEqualU (FlagLT_UGT)) 3323 // cond: 3324 // result: (MOVDconst [0]) 3325 for { 3326 v_0 := v.Args[0] 3327 if v_0.Op != OpARM64FlagLT_UGT { 3328 break 3329 } 3330 v.reset(OpARM64MOVDconst) 3331 v.AuxInt = 0 3332 return true 3333 } 3334 // match: (LessEqualU (FlagGT_ULT)) 3335 // cond: 3336 // result: (MOVDconst [1]) 3337 for { 3338 v_0 := v.Args[0] 3339 if v_0.Op != OpARM64FlagGT_ULT { 3340 break 3341 } 3342 v.reset(OpARM64MOVDconst) 3343 v.AuxInt = 1 3344 return true 3345 } 3346 // match: (LessEqualU (FlagGT_UGT)) 3347 // cond: 3348 // result: (MOVDconst [0]) 3349 for { 3350 v_0 := v.Args[0] 3351 if v_0.Op != OpARM64FlagGT_UGT { 3352 break 3353 } 3354 v.reset(OpARM64MOVDconst) 3355 v.AuxInt = 0 3356 return true 3357 } 3358 // match: (LessEqualU (InvertFlags x)) 3359 // cond: 3360 // result: (GreaterEqualU x) 3361 for { 3362 v_0 := v.Args[0] 3363 if v_0.Op != OpARM64InvertFlags { 3364 break 3365 } 3366 x := v_0.Args[0] 3367 v.reset(OpARM64GreaterEqualU) 3368 v.AddArg(x) 3369 return true 3370 } 3371 return false 3372 } 3373 func rewriteValueARM64_OpARM64LessThan_0(v *Value) bool { 3374 // match: (LessThan (FlagEQ)) 3375 // cond: 3376 // result: (MOVDconst [0]) 3377 for { 3378 v_0 := v.Args[0] 3379 if v_0.Op != OpARM64FlagEQ { 3380 break 3381 } 3382 v.reset(OpARM64MOVDconst) 3383 v.AuxInt = 0 3384 return true 3385 } 3386 // match: (LessThan (FlagLT_ULT)) 3387 // cond: 3388 // result: (MOVDconst [1]) 3389 for { 3390 v_0 := v.Args[0] 3391 if v_0.Op != OpARM64FlagLT_ULT { 3392 break 3393 } 3394 v.reset(OpARM64MOVDconst) 3395 v.AuxInt = 1 3396 return true 3397 } 3398 // match: (LessThan (FlagLT_UGT)) 3399 // cond: 3400 // result: (MOVDconst [1]) 3401 for { 3402 v_0 := v.Args[0] 3403 if v_0.Op != OpARM64FlagLT_UGT { 3404 break 3405 } 3406 v.reset(OpARM64MOVDconst) 3407 v.AuxInt = 1 3408 return true 3409 } 3410 // match: (LessThan (FlagGT_ULT)) 3411 // cond: 3412 // result: (MOVDconst [0]) 3413 for { 3414 v_0 := v.Args[0] 3415 if v_0.Op != OpARM64FlagGT_ULT { 3416 break 3417 } 3418 v.reset(OpARM64MOVDconst) 3419 v.AuxInt = 0 3420 return true 3421 } 3422 // match: (LessThan (FlagGT_UGT)) 3423 // cond: 3424 // result: (MOVDconst [0]) 3425 for { 3426 v_0 := v.Args[0] 3427 if v_0.Op != OpARM64FlagGT_UGT { 3428 break 3429 } 3430 v.reset(OpARM64MOVDconst) 3431 v.AuxInt = 0 3432 return true 3433 } 3434 // match: (LessThan (InvertFlags x)) 3435 // cond: 3436 // result: (GreaterThan x) 3437 for { 3438 v_0 := v.Args[0] 3439 if v_0.Op != OpARM64InvertFlags { 3440 break 3441 } 3442 x := v_0.Args[0] 3443 v.reset(OpARM64GreaterThan) 3444 v.AddArg(x) 3445 return true 3446 } 3447 return false 3448 } 3449 func rewriteValueARM64_OpARM64LessThanU_0(v *Value) bool { 3450 // match: (LessThanU (FlagEQ)) 3451 // cond: 3452 // result: (MOVDconst [0]) 3453 for { 3454 v_0 := v.Args[0] 3455 if v_0.Op != OpARM64FlagEQ { 3456 break 3457 } 3458 v.reset(OpARM64MOVDconst) 3459 v.AuxInt = 0 3460 return true 3461 } 3462 // match: (LessThanU (FlagLT_ULT)) 3463 // cond: 3464 // result: (MOVDconst [1]) 3465 for { 3466 v_0 := v.Args[0] 3467 if v_0.Op != OpARM64FlagLT_ULT { 3468 break 3469 } 3470 v.reset(OpARM64MOVDconst) 3471 v.AuxInt = 1 3472 return true 3473 } 3474 // match: (LessThanU (FlagLT_UGT)) 3475 // cond: 3476 // result: (MOVDconst [0]) 3477 for { 3478 v_0 := v.Args[0] 3479 if v_0.Op != OpARM64FlagLT_UGT { 3480 break 3481 } 3482 v.reset(OpARM64MOVDconst) 3483 v.AuxInt = 0 3484 return true 3485 } 3486 // match: (LessThanU (FlagGT_ULT)) 3487 // cond: 3488 // result: (MOVDconst [1]) 3489 for { 3490 v_0 := v.Args[0] 3491 if v_0.Op != OpARM64FlagGT_ULT { 3492 break 3493 } 3494 v.reset(OpARM64MOVDconst) 3495 v.AuxInt = 1 3496 return true 3497 } 3498 // match: (LessThanU (FlagGT_UGT)) 3499 // cond: 3500 // result: (MOVDconst [0]) 3501 for { 3502 v_0 := v.Args[0] 3503 if v_0.Op != OpARM64FlagGT_UGT { 3504 break 3505 } 3506 v.reset(OpARM64MOVDconst) 3507 v.AuxInt = 0 3508 return true 3509 } 3510 // match: (LessThanU (InvertFlags x)) 3511 // cond: 3512 // result: (GreaterThanU x) 3513 for { 3514 v_0 := v.Args[0] 3515 if v_0.Op != OpARM64InvertFlags { 3516 break 3517 } 3518 x := v_0.Args[0] 3519 v.reset(OpARM64GreaterThanU) 3520 v.AddArg(x) 3521 return true 3522 } 3523 return false 3524 } 3525 func rewriteValueARM64_OpARM64MOD_0(v *Value) bool { 3526 // match: (MOD (MOVDconst [c]) (MOVDconst [d])) 3527 // cond: 3528 // result: (MOVDconst [int64(c)%int64(d)]) 3529 for { 3530 v_0 := v.Args[0] 3531 if v_0.Op != OpARM64MOVDconst { 3532 break 3533 } 3534 c := v_0.AuxInt 3535 v_1 := v.Args[1] 3536 if v_1.Op != OpARM64MOVDconst { 3537 break 3538 } 3539 d := v_1.AuxInt 3540 v.reset(OpARM64MOVDconst) 3541 v.AuxInt = int64(c) % int64(d) 3542 return true 3543 } 3544 return false 3545 } 3546 func rewriteValueARM64_OpARM64MODW_0(v *Value) bool { 3547 // match: (MODW (MOVDconst [c]) (MOVDconst [d])) 3548 // cond: 3549 // result: (MOVDconst [int64(int32(c)%int32(d))]) 3550 for { 3551 v_0 := v.Args[0] 3552 if v_0.Op != OpARM64MOVDconst { 3553 break 3554 } 3555 c := v_0.AuxInt 3556 v_1 := v.Args[1] 3557 if v_1.Op != OpARM64MOVDconst { 3558 break 3559 } 3560 d := v_1.AuxInt 3561 v.reset(OpARM64MOVDconst) 3562 v.AuxInt = int64(int32(c) % int32(d)) 3563 return true 3564 } 3565 return false 3566 } 3567 func rewriteValueARM64_OpARM64MOVBUload_0(v *Value) bool { 3568 // match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem) 3569 // cond: fitsARM64Offset(off1+off2, 1, sym) 3570 // result: (MOVBUload [off1+off2] {sym} ptr mem) 3571 for { 3572 off1 := v.AuxInt 3573 sym := v.Aux 3574 v_0 := v.Args[0] 3575 if v_0.Op != OpARM64ADDconst { 3576 break 3577 } 3578 off2 := v_0.AuxInt 3579 ptr := v_0.Args[0] 3580 mem := v.Args[1] 3581 if !(fitsARM64Offset(off1+off2, 1, sym)) { 3582 break 3583 } 3584 v.reset(OpARM64MOVBUload) 3585 v.AuxInt = off1 + off2 3586 v.Aux = sym 3587 v.AddArg(ptr) 3588 v.AddArg(mem) 3589 return true 3590 } 3591 // match: (MOVBUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 3592 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 1, mergeSym(sym1, sym2)) 3593 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3594 for { 3595 off1 := v.AuxInt 3596 sym1 := v.Aux 3597 v_0 := v.Args[0] 3598 if v_0.Op != OpARM64MOVDaddr { 3599 break 3600 } 3601 off2 := v_0.AuxInt 3602 sym2 := v_0.Aux 3603 ptr := v_0.Args[0] 3604 mem := v.Args[1] 3605 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 1, mergeSym(sym1, sym2))) { 3606 break 3607 } 3608 v.reset(OpARM64MOVBUload) 3609 v.AuxInt = off1 + off2 3610 v.Aux = mergeSym(sym1, sym2) 3611 v.AddArg(ptr) 3612 v.AddArg(mem) 3613 return true 3614 } 3615 // match: (MOVBUload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 3616 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 3617 // result: (MOVDconst [0]) 3618 for { 3619 off := v.AuxInt 3620 sym := v.Aux 3621 ptr := v.Args[0] 3622 v_1 := v.Args[1] 3623 if v_1.Op != OpARM64MOVBstorezero { 3624 break 3625 } 3626 off2 := v_1.AuxInt 3627 sym2 := v_1.Aux 3628 ptr2 := v_1.Args[0] 3629 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 3630 break 3631 } 3632 v.reset(OpARM64MOVDconst) 3633 v.AuxInt = 0 3634 return true 3635 } 3636 return false 3637 } 3638 func rewriteValueARM64_OpARM64MOVBUreg_0(v *Value) bool { 3639 // match: (MOVBUreg x:(MOVBUload _ _)) 3640 // cond: 3641 // result: (MOVDreg x) 3642 for { 3643 x := v.Args[0] 3644 if x.Op != OpARM64MOVBUload { 3645 break 3646 } 3647 v.reset(OpARM64MOVDreg) 3648 v.AddArg(x) 3649 return true 3650 } 3651 // match: (MOVBUreg x:(MOVBUreg _)) 3652 // cond: 3653 // result: (MOVDreg x) 3654 for { 3655 x := v.Args[0] 3656 if x.Op != OpARM64MOVBUreg { 3657 break 3658 } 3659 v.reset(OpARM64MOVDreg) 3660 v.AddArg(x) 3661 return true 3662 } 3663 // match: (MOVBUreg (MOVDconst [c])) 3664 // cond: 3665 // result: (MOVDconst [int64(uint8(c))]) 3666 for { 3667 v_0 := v.Args[0] 3668 if v_0.Op != OpARM64MOVDconst { 3669 break 3670 } 3671 c := v_0.AuxInt 3672 v.reset(OpARM64MOVDconst) 3673 v.AuxInt = int64(uint8(c)) 3674 return true 3675 } 3676 return false 3677 } 3678 func rewriteValueARM64_OpARM64MOVBload_0(v *Value) bool { 3679 // match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem) 3680 // cond: fitsARM64Offset(off1+off2, 1, sym) 3681 // result: (MOVBload [off1+off2] {sym} ptr mem) 3682 for { 3683 off1 := v.AuxInt 3684 sym := v.Aux 3685 v_0 := v.Args[0] 3686 if v_0.Op != OpARM64ADDconst { 3687 break 3688 } 3689 off2 := v_0.AuxInt 3690 ptr := v_0.Args[0] 3691 mem := v.Args[1] 3692 if !(fitsARM64Offset(off1+off2, 1, sym)) { 3693 break 3694 } 3695 v.reset(OpARM64MOVBload) 3696 v.AuxInt = off1 + off2 3697 v.Aux = sym 3698 v.AddArg(ptr) 3699 v.AddArg(mem) 3700 return true 3701 } 3702 // match: (MOVBload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 3703 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 1, mergeSym(sym1, sym2)) 3704 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3705 for { 3706 off1 := v.AuxInt 3707 sym1 := v.Aux 3708 v_0 := v.Args[0] 3709 if v_0.Op != OpARM64MOVDaddr { 3710 break 3711 } 3712 off2 := v_0.AuxInt 3713 sym2 := v_0.Aux 3714 ptr := v_0.Args[0] 3715 mem := v.Args[1] 3716 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 1, mergeSym(sym1, sym2))) { 3717 break 3718 } 3719 v.reset(OpARM64MOVBload) 3720 v.AuxInt = off1 + off2 3721 v.Aux = mergeSym(sym1, sym2) 3722 v.AddArg(ptr) 3723 v.AddArg(mem) 3724 return true 3725 } 3726 // match: (MOVBload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 3727 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 3728 // result: (MOVDconst [0]) 3729 for { 3730 off := v.AuxInt 3731 sym := v.Aux 3732 ptr := v.Args[0] 3733 v_1 := v.Args[1] 3734 if v_1.Op != OpARM64MOVBstorezero { 3735 break 3736 } 3737 off2 := v_1.AuxInt 3738 sym2 := v_1.Aux 3739 ptr2 := v_1.Args[0] 3740 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 3741 break 3742 } 3743 v.reset(OpARM64MOVDconst) 3744 v.AuxInt = 0 3745 return true 3746 } 3747 return false 3748 } 3749 func rewriteValueARM64_OpARM64MOVBreg_0(v *Value) bool { 3750 // match: (MOVBreg x:(MOVBload _ _)) 3751 // cond: 3752 // result: (MOVDreg x) 3753 for { 3754 x := v.Args[0] 3755 if x.Op != OpARM64MOVBload { 3756 break 3757 } 3758 v.reset(OpARM64MOVDreg) 3759 v.AddArg(x) 3760 return true 3761 } 3762 // match: (MOVBreg x:(MOVBreg _)) 3763 // cond: 3764 // result: (MOVDreg x) 3765 for { 3766 x := v.Args[0] 3767 if x.Op != OpARM64MOVBreg { 3768 break 3769 } 3770 v.reset(OpARM64MOVDreg) 3771 v.AddArg(x) 3772 return true 3773 } 3774 // match: (MOVBreg (MOVDconst [c])) 3775 // cond: 3776 // result: (MOVDconst [int64(int8(c))]) 3777 for { 3778 v_0 := v.Args[0] 3779 if v_0.Op != OpARM64MOVDconst { 3780 break 3781 } 3782 c := v_0.AuxInt 3783 v.reset(OpARM64MOVDconst) 3784 v.AuxInt = int64(int8(c)) 3785 return true 3786 } 3787 return false 3788 } 3789 func rewriteValueARM64_OpARM64MOVBstore_0(v *Value) bool { 3790 // match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem) 3791 // cond: fitsARM64Offset(off1+off2, 1, sym) 3792 // result: (MOVBstore [off1+off2] {sym} ptr val mem) 3793 for { 3794 off1 := v.AuxInt 3795 sym := v.Aux 3796 v_0 := v.Args[0] 3797 if v_0.Op != OpARM64ADDconst { 3798 break 3799 } 3800 off2 := v_0.AuxInt 3801 ptr := v_0.Args[0] 3802 val := v.Args[1] 3803 mem := v.Args[2] 3804 if !(fitsARM64Offset(off1+off2, 1, sym)) { 3805 break 3806 } 3807 v.reset(OpARM64MOVBstore) 3808 v.AuxInt = off1 + off2 3809 v.Aux = sym 3810 v.AddArg(ptr) 3811 v.AddArg(val) 3812 v.AddArg(mem) 3813 return true 3814 } 3815 // match: (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 3816 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 1, mergeSym(sym1, sym2)) 3817 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 3818 for { 3819 off1 := v.AuxInt 3820 sym1 := v.Aux 3821 v_0 := v.Args[0] 3822 if v_0.Op != OpARM64MOVDaddr { 3823 break 3824 } 3825 off2 := v_0.AuxInt 3826 sym2 := v_0.Aux 3827 ptr := v_0.Args[0] 3828 val := v.Args[1] 3829 mem := v.Args[2] 3830 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 1, mergeSym(sym1, sym2))) { 3831 break 3832 } 3833 v.reset(OpARM64MOVBstore) 3834 v.AuxInt = off1 + off2 3835 v.Aux = mergeSym(sym1, sym2) 3836 v.AddArg(ptr) 3837 v.AddArg(val) 3838 v.AddArg(mem) 3839 return true 3840 } 3841 // match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem) 3842 // cond: 3843 // result: (MOVBstorezero [off] {sym} ptr mem) 3844 for { 3845 off := v.AuxInt 3846 sym := v.Aux 3847 ptr := v.Args[0] 3848 v_1 := v.Args[1] 3849 if v_1.Op != OpARM64MOVDconst { 3850 break 3851 } 3852 if v_1.AuxInt != 0 { 3853 break 3854 } 3855 mem := v.Args[2] 3856 v.reset(OpARM64MOVBstorezero) 3857 v.AuxInt = off 3858 v.Aux = sym 3859 v.AddArg(ptr) 3860 v.AddArg(mem) 3861 return true 3862 } 3863 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem) 3864 // cond: 3865 // result: (MOVBstore [off] {sym} ptr x mem) 3866 for { 3867 off := v.AuxInt 3868 sym := v.Aux 3869 ptr := v.Args[0] 3870 v_1 := v.Args[1] 3871 if v_1.Op != OpARM64MOVBreg { 3872 break 3873 } 3874 x := v_1.Args[0] 3875 mem := v.Args[2] 3876 v.reset(OpARM64MOVBstore) 3877 v.AuxInt = off 3878 v.Aux = sym 3879 v.AddArg(ptr) 3880 v.AddArg(x) 3881 v.AddArg(mem) 3882 return true 3883 } 3884 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem) 3885 // cond: 3886 // result: (MOVBstore [off] {sym} ptr x mem) 3887 for { 3888 off := v.AuxInt 3889 sym := v.Aux 3890 ptr := v.Args[0] 3891 v_1 := v.Args[1] 3892 if v_1.Op != OpARM64MOVBUreg { 3893 break 3894 } 3895 x := v_1.Args[0] 3896 mem := v.Args[2] 3897 v.reset(OpARM64MOVBstore) 3898 v.AuxInt = off 3899 v.Aux = sym 3900 v.AddArg(ptr) 3901 v.AddArg(x) 3902 v.AddArg(mem) 3903 return true 3904 } 3905 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem) 3906 // cond: 3907 // result: (MOVBstore [off] {sym} ptr x mem) 3908 for { 3909 off := v.AuxInt 3910 sym := v.Aux 3911 ptr := v.Args[0] 3912 v_1 := v.Args[1] 3913 if v_1.Op != OpARM64MOVHreg { 3914 break 3915 } 3916 x := v_1.Args[0] 3917 mem := v.Args[2] 3918 v.reset(OpARM64MOVBstore) 3919 v.AuxInt = off 3920 v.Aux = sym 3921 v.AddArg(ptr) 3922 v.AddArg(x) 3923 v.AddArg(mem) 3924 return true 3925 } 3926 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem) 3927 // cond: 3928 // result: (MOVBstore [off] {sym} ptr x mem) 3929 for { 3930 off := v.AuxInt 3931 sym := v.Aux 3932 ptr := v.Args[0] 3933 v_1 := v.Args[1] 3934 if v_1.Op != OpARM64MOVHUreg { 3935 break 3936 } 3937 x := v_1.Args[0] 3938 mem := v.Args[2] 3939 v.reset(OpARM64MOVBstore) 3940 v.AuxInt = off 3941 v.Aux = sym 3942 v.AddArg(ptr) 3943 v.AddArg(x) 3944 v.AddArg(mem) 3945 return true 3946 } 3947 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem) 3948 // cond: 3949 // result: (MOVBstore [off] {sym} ptr x mem) 3950 for { 3951 off := v.AuxInt 3952 sym := v.Aux 3953 ptr := v.Args[0] 3954 v_1 := v.Args[1] 3955 if v_1.Op != OpARM64MOVWreg { 3956 break 3957 } 3958 x := v_1.Args[0] 3959 mem := v.Args[2] 3960 v.reset(OpARM64MOVBstore) 3961 v.AuxInt = off 3962 v.Aux = sym 3963 v.AddArg(ptr) 3964 v.AddArg(x) 3965 v.AddArg(mem) 3966 return true 3967 } 3968 // match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem) 3969 // cond: 3970 // result: (MOVBstore [off] {sym} ptr x mem) 3971 for { 3972 off := v.AuxInt 3973 sym := v.Aux 3974 ptr := v.Args[0] 3975 v_1 := v.Args[1] 3976 if v_1.Op != OpARM64MOVWUreg { 3977 break 3978 } 3979 x := v_1.Args[0] 3980 mem := v.Args[2] 3981 v.reset(OpARM64MOVBstore) 3982 v.AuxInt = off 3983 v.Aux = sym 3984 v.AddArg(ptr) 3985 v.AddArg(x) 3986 v.AddArg(mem) 3987 return true 3988 } 3989 return false 3990 } 3991 func rewriteValueARM64_OpARM64MOVBstorezero_0(v *Value) bool { 3992 // match: (MOVBstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 3993 // cond: fitsARM64Offset(off1+off2, 1, sym) 3994 // result: (MOVBstorezero [off1+off2] {sym} ptr mem) 3995 for { 3996 off1 := v.AuxInt 3997 sym := v.Aux 3998 v_0 := v.Args[0] 3999 if v_0.Op != OpARM64ADDconst { 4000 break 4001 } 4002 off2 := v_0.AuxInt 4003 ptr := v_0.Args[0] 4004 mem := v.Args[1] 4005 if !(fitsARM64Offset(off1+off2, 1, sym)) { 4006 break 4007 } 4008 v.reset(OpARM64MOVBstorezero) 4009 v.AuxInt = off1 + off2 4010 v.Aux = sym 4011 v.AddArg(ptr) 4012 v.AddArg(mem) 4013 return true 4014 } 4015 // match: (MOVBstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4016 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 1, mergeSym(sym1, sym2)) 4017 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4018 for { 4019 off1 := v.AuxInt 4020 sym1 := v.Aux 4021 v_0 := v.Args[0] 4022 if v_0.Op != OpARM64MOVDaddr { 4023 break 4024 } 4025 off2 := v_0.AuxInt 4026 sym2 := v_0.Aux 4027 ptr := v_0.Args[0] 4028 mem := v.Args[1] 4029 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 1, mergeSym(sym1, sym2))) { 4030 break 4031 } 4032 v.reset(OpARM64MOVBstorezero) 4033 v.AuxInt = off1 + off2 4034 v.Aux = mergeSym(sym1, sym2) 4035 v.AddArg(ptr) 4036 v.AddArg(mem) 4037 return true 4038 } 4039 return false 4040 } 4041 func rewriteValueARM64_OpARM64MOVDload_0(v *Value) bool { 4042 // match: (MOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 4043 // cond: fitsARM64Offset(off1+off2, 8, sym) 4044 // result: (MOVDload [off1+off2] {sym} ptr mem) 4045 for { 4046 off1 := v.AuxInt 4047 sym := v.Aux 4048 v_0 := v.Args[0] 4049 if v_0.Op != OpARM64ADDconst { 4050 break 4051 } 4052 off2 := v_0.AuxInt 4053 ptr := v_0.Args[0] 4054 mem := v.Args[1] 4055 if !(fitsARM64Offset(off1+off2, 8, sym)) { 4056 break 4057 } 4058 v.reset(OpARM64MOVDload) 4059 v.AuxInt = off1 + off2 4060 v.Aux = sym 4061 v.AddArg(ptr) 4062 v.AddArg(mem) 4063 return true 4064 } 4065 // match: (MOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4066 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 8, mergeSym(sym1, sym2)) 4067 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4068 for { 4069 off1 := v.AuxInt 4070 sym1 := v.Aux 4071 v_0 := v.Args[0] 4072 if v_0.Op != OpARM64MOVDaddr { 4073 break 4074 } 4075 off2 := v_0.AuxInt 4076 sym2 := v_0.Aux 4077 ptr := v_0.Args[0] 4078 mem := v.Args[1] 4079 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 8, mergeSym(sym1, sym2))) { 4080 break 4081 } 4082 v.reset(OpARM64MOVDload) 4083 v.AuxInt = off1 + off2 4084 v.Aux = mergeSym(sym1, sym2) 4085 v.AddArg(ptr) 4086 v.AddArg(mem) 4087 return true 4088 } 4089 // match: (MOVDload [off] {sym} ptr (MOVDstorezero [off2] {sym2} ptr2 _)) 4090 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4091 // result: (MOVDconst [0]) 4092 for { 4093 off := v.AuxInt 4094 sym := v.Aux 4095 ptr := v.Args[0] 4096 v_1 := v.Args[1] 4097 if v_1.Op != OpARM64MOVDstorezero { 4098 break 4099 } 4100 off2 := v_1.AuxInt 4101 sym2 := v_1.Aux 4102 ptr2 := v_1.Args[0] 4103 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4104 break 4105 } 4106 v.reset(OpARM64MOVDconst) 4107 v.AuxInt = 0 4108 return true 4109 } 4110 return false 4111 } 4112 func rewriteValueARM64_OpARM64MOVDreg_0(v *Value) bool { 4113 // match: (MOVDreg x) 4114 // cond: x.Uses == 1 4115 // result: (MOVDnop x) 4116 for { 4117 x := v.Args[0] 4118 if !(x.Uses == 1) { 4119 break 4120 } 4121 v.reset(OpARM64MOVDnop) 4122 v.AddArg(x) 4123 return true 4124 } 4125 // match: (MOVDreg (MOVDconst [c])) 4126 // cond: 4127 // result: (MOVDconst [c]) 4128 for { 4129 v_0 := v.Args[0] 4130 if v_0.Op != OpARM64MOVDconst { 4131 break 4132 } 4133 c := v_0.AuxInt 4134 v.reset(OpARM64MOVDconst) 4135 v.AuxInt = c 4136 return true 4137 } 4138 return false 4139 } 4140 func rewriteValueARM64_OpARM64MOVDstore_0(v *Value) bool { 4141 // match: (MOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 4142 // cond: fitsARM64Offset(off1+off2, 8, sym) 4143 // result: (MOVDstore [off1+off2] {sym} ptr val mem) 4144 for { 4145 off1 := v.AuxInt 4146 sym := v.Aux 4147 v_0 := v.Args[0] 4148 if v_0.Op != OpARM64ADDconst { 4149 break 4150 } 4151 off2 := v_0.AuxInt 4152 ptr := v_0.Args[0] 4153 val := v.Args[1] 4154 mem := v.Args[2] 4155 if !(fitsARM64Offset(off1+off2, 8, sym)) { 4156 break 4157 } 4158 v.reset(OpARM64MOVDstore) 4159 v.AuxInt = off1 + off2 4160 v.Aux = sym 4161 v.AddArg(ptr) 4162 v.AddArg(val) 4163 v.AddArg(mem) 4164 return true 4165 } 4166 // match: (MOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 4167 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 8, mergeSym(sym1, sym2)) 4168 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4169 for { 4170 off1 := v.AuxInt 4171 sym1 := v.Aux 4172 v_0 := v.Args[0] 4173 if v_0.Op != OpARM64MOVDaddr { 4174 break 4175 } 4176 off2 := v_0.AuxInt 4177 sym2 := v_0.Aux 4178 ptr := v_0.Args[0] 4179 val := v.Args[1] 4180 mem := v.Args[2] 4181 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 8, mergeSym(sym1, sym2))) { 4182 break 4183 } 4184 v.reset(OpARM64MOVDstore) 4185 v.AuxInt = off1 + off2 4186 v.Aux = mergeSym(sym1, sym2) 4187 v.AddArg(ptr) 4188 v.AddArg(val) 4189 v.AddArg(mem) 4190 return true 4191 } 4192 // match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem) 4193 // cond: 4194 // result: (MOVDstorezero [off] {sym} ptr mem) 4195 for { 4196 off := v.AuxInt 4197 sym := v.Aux 4198 ptr := v.Args[0] 4199 v_1 := v.Args[1] 4200 if v_1.Op != OpARM64MOVDconst { 4201 break 4202 } 4203 if v_1.AuxInt != 0 { 4204 break 4205 } 4206 mem := v.Args[2] 4207 v.reset(OpARM64MOVDstorezero) 4208 v.AuxInt = off 4209 v.Aux = sym 4210 v.AddArg(ptr) 4211 v.AddArg(mem) 4212 return true 4213 } 4214 return false 4215 } 4216 func rewriteValueARM64_OpARM64MOVDstorezero_0(v *Value) bool { 4217 // match: (MOVDstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 4218 // cond: fitsARM64Offset(off1+off2, 8, sym) 4219 // result: (MOVDstorezero [off1+off2] {sym} ptr mem) 4220 for { 4221 off1 := v.AuxInt 4222 sym := v.Aux 4223 v_0 := v.Args[0] 4224 if v_0.Op != OpARM64ADDconst { 4225 break 4226 } 4227 off2 := v_0.AuxInt 4228 ptr := v_0.Args[0] 4229 mem := v.Args[1] 4230 if !(fitsARM64Offset(off1+off2, 8, sym)) { 4231 break 4232 } 4233 v.reset(OpARM64MOVDstorezero) 4234 v.AuxInt = off1 + off2 4235 v.Aux = sym 4236 v.AddArg(ptr) 4237 v.AddArg(mem) 4238 return true 4239 } 4240 // match: (MOVDstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4241 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 8, mergeSym(sym1, sym2)) 4242 // result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4243 for { 4244 off1 := v.AuxInt 4245 sym1 := v.Aux 4246 v_0 := v.Args[0] 4247 if v_0.Op != OpARM64MOVDaddr { 4248 break 4249 } 4250 off2 := v_0.AuxInt 4251 sym2 := v_0.Aux 4252 ptr := v_0.Args[0] 4253 mem := v.Args[1] 4254 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 8, mergeSym(sym1, sym2))) { 4255 break 4256 } 4257 v.reset(OpARM64MOVDstorezero) 4258 v.AuxInt = off1 + off2 4259 v.Aux = mergeSym(sym1, sym2) 4260 v.AddArg(ptr) 4261 v.AddArg(mem) 4262 return true 4263 } 4264 return false 4265 } 4266 func rewriteValueARM64_OpARM64MOVHUload_0(v *Value) bool { 4267 // match: (MOVHUload [off1] {sym} (ADDconst [off2] ptr) mem) 4268 // cond: fitsARM64Offset(off1+off2, 2, sym) 4269 // result: (MOVHUload [off1+off2] {sym} ptr mem) 4270 for { 4271 off1 := v.AuxInt 4272 sym := v.Aux 4273 v_0 := v.Args[0] 4274 if v_0.Op != OpARM64ADDconst { 4275 break 4276 } 4277 off2 := v_0.AuxInt 4278 ptr := v_0.Args[0] 4279 mem := v.Args[1] 4280 if !(fitsARM64Offset(off1+off2, 2, sym)) { 4281 break 4282 } 4283 v.reset(OpARM64MOVHUload) 4284 v.AuxInt = off1 + off2 4285 v.Aux = sym 4286 v.AddArg(ptr) 4287 v.AddArg(mem) 4288 return true 4289 } 4290 // match: (MOVHUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4291 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 2, mergeSym(sym1, sym2)) 4292 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4293 for { 4294 off1 := v.AuxInt 4295 sym1 := v.Aux 4296 v_0 := v.Args[0] 4297 if v_0.Op != OpARM64MOVDaddr { 4298 break 4299 } 4300 off2 := v_0.AuxInt 4301 sym2 := v_0.Aux 4302 ptr := v_0.Args[0] 4303 mem := v.Args[1] 4304 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 2, mergeSym(sym1, sym2))) { 4305 break 4306 } 4307 v.reset(OpARM64MOVHUload) 4308 v.AuxInt = off1 + off2 4309 v.Aux = mergeSym(sym1, sym2) 4310 v.AddArg(ptr) 4311 v.AddArg(mem) 4312 return true 4313 } 4314 // match: (MOVHUload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 4315 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4316 // result: (MOVDconst [0]) 4317 for { 4318 off := v.AuxInt 4319 sym := v.Aux 4320 ptr := v.Args[0] 4321 v_1 := v.Args[1] 4322 if v_1.Op != OpARM64MOVHstorezero { 4323 break 4324 } 4325 off2 := v_1.AuxInt 4326 sym2 := v_1.Aux 4327 ptr2 := v_1.Args[0] 4328 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4329 break 4330 } 4331 v.reset(OpARM64MOVDconst) 4332 v.AuxInt = 0 4333 return true 4334 } 4335 return false 4336 } 4337 func rewriteValueARM64_OpARM64MOVHUreg_0(v *Value) bool { 4338 // match: (MOVHUreg x:(MOVBUload _ _)) 4339 // cond: 4340 // result: (MOVDreg x) 4341 for { 4342 x := v.Args[0] 4343 if x.Op != OpARM64MOVBUload { 4344 break 4345 } 4346 v.reset(OpARM64MOVDreg) 4347 v.AddArg(x) 4348 return true 4349 } 4350 // match: (MOVHUreg x:(MOVHUload _ _)) 4351 // cond: 4352 // result: (MOVDreg x) 4353 for { 4354 x := v.Args[0] 4355 if x.Op != OpARM64MOVHUload { 4356 break 4357 } 4358 v.reset(OpARM64MOVDreg) 4359 v.AddArg(x) 4360 return true 4361 } 4362 // match: (MOVHUreg x:(MOVBUreg _)) 4363 // cond: 4364 // result: (MOVDreg x) 4365 for { 4366 x := v.Args[0] 4367 if x.Op != OpARM64MOVBUreg { 4368 break 4369 } 4370 v.reset(OpARM64MOVDreg) 4371 v.AddArg(x) 4372 return true 4373 } 4374 // match: (MOVHUreg x:(MOVHUreg _)) 4375 // cond: 4376 // result: (MOVDreg x) 4377 for { 4378 x := v.Args[0] 4379 if x.Op != OpARM64MOVHUreg { 4380 break 4381 } 4382 v.reset(OpARM64MOVDreg) 4383 v.AddArg(x) 4384 return true 4385 } 4386 // match: (MOVHUreg (MOVDconst [c])) 4387 // cond: 4388 // result: (MOVDconst [int64(uint16(c))]) 4389 for { 4390 v_0 := v.Args[0] 4391 if v_0.Op != OpARM64MOVDconst { 4392 break 4393 } 4394 c := v_0.AuxInt 4395 v.reset(OpARM64MOVDconst) 4396 v.AuxInt = int64(uint16(c)) 4397 return true 4398 } 4399 return false 4400 } 4401 func rewriteValueARM64_OpARM64MOVHload_0(v *Value) bool { 4402 // match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem) 4403 // cond: fitsARM64Offset(off1+off2, 2, sym) 4404 // result: (MOVHload [off1+off2] {sym} ptr mem) 4405 for { 4406 off1 := v.AuxInt 4407 sym := v.Aux 4408 v_0 := v.Args[0] 4409 if v_0.Op != OpARM64ADDconst { 4410 break 4411 } 4412 off2 := v_0.AuxInt 4413 ptr := v_0.Args[0] 4414 mem := v.Args[1] 4415 if !(fitsARM64Offset(off1+off2, 2, sym)) { 4416 break 4417 } 4418 v.reset(OpARM64MOVHload) 4419 v.AuxInt = off1 + off2 4420 v.Aux = sym 4421 v.AddArg(ptr) 4422 v.AddArg(mem) 4423 return true 4424 } 4425 // match: (MOVHload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4426 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 2, mergeSym(sym1, sym2)) 4427 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4428 for { 4429 off1 := v.AuxInt 4430 sym1 := v.Aux 4431 v_0 := v.Args[0] 4432 if v_0.Op != OpARM64MOVDaddr { 4433 break 4434 } 4435 off2 := v_0.AuxInt 4436 sym2 := v_0.Aux 4437 ptr := v_0.Args[0] 4438 mem := v.Args[1] 4439 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 2, mergeSym(sym1, sym2))) { 4440 break 4441 } 4442 v.reset(OpARM64MOVHload) 4443 v.AuxInt = off1 + off2 4444 v.Aux = mergeSym(sym1, sym2) 4445 v.AddArg(ptr) 4446 v.AddArg(mem) 4447 return true 4448 } 4449 // match: (MOVHload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 4450 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4451 // result: (MOVDconst [0]) 4452 for { 4453 off := v.AuxInt 4454 sym := v.Aux 4455 ptr := v.Args[0] 4456 v_1 := v.Args[1] 4457 if v_1.Op != OpARM64MOVHstorezero { 4458 break 4459 } 4460 off2 := v_1.AuxInt 4461 sym2 := v_1.Aux 4462 ptr2 := v_1.Args[0] 4463 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4464 break 4465 } 4466 v.reset(OpARM64MOVDconst) 4467 v.AuxInt = 0 4468 return true 4469 } 4470 return false 4471 } 4472 func rewriteValueARM64_OpARM64MOVHreg_0(v *Value) bool { 4473 // match: (MOVHreg x:(MOVBload _ _)) 4474 // cond: 4475 // result: (MOVDreg x) 4476 for { 4477 x := v.Args[0] 4478 if x.Op != OpARM64MOVBload { 4479 break 4480 } 4481 v.reset(OpARM64MOVDreg) 4482 v.AddArg(x) 4483 return true 4484 } 4485 // match: (MOVHreg x:(MOVBUload _ _)) 4486 // cond: 4487 // result: (MOVDreg x) 4488 for { 4489 x := v.Args[0] 4490 if x.Op != OpARM64MOVBUload { 4491 break 4492 } 4493 v.reset(OpARM64MOVDreg) 4494 v.AddArg(x) 4495 return true 4496 } 4497 // match: (MOVHreg x:(MOVHload _ _)) 4498 // cond: 4499 // result: (MOVDreg x) 4500 for { 4501 x := v.Args[0] 4502 if x.Op != OpARM64MOVHload { 4503 break 4504 } 4505 v.reset(OpARM64MOVDreg) 4506 v.AddArg(x) 4507 return true 4508 } 4509 // match: (MOVHreg x:(MOVBreg _)) 4510 // cond: 4511 // result: (MOVDreg x) 4512 for { 4513 x := v.Args[0] 4514 if x.Op != OpARM64MOVBreg { 4515 break 4516 } 4517 v.reset(OpARM64MOVDreg) 4518 v.AddArg(x) 4519 return true 4520 } 4521 // match: (MOVHreg x:(MOVBUreg _)) 4522 // cond: 4523 // result: (MOVDreg x) 4524 for { 4525 x := v.Args[0] 4526 if x.Op != OpARM64MOVBUreg { 4527 break 4528 } 4529 v.reset(OpARM64MOVDreg) 4530 v.AddArg(x) 4531 return true 4532 } 4533 // match: (MOVHreg x:(MOVHreg _)) 4534 // cond: 4535 // result: (MOVDreg x) 4536 for { 4537 x := v.Args[0] 4538 if x.Op != OpARM64MOVHreg { 4539 break 4540 } 4541 v.reset(OpARM64MOVDreg) 4542 v.AddArg(x) 4543 return true 4544 } 4545 // match: (MOVHreg (MOVDconst [c])) 4546 // cond: 4547 // result: (MOVDconst [int64(int16(c))]) 4548 for { 4549 v_0 := v.Args[0] 4550 if v_0.Op != OpARM64MOVDconst { 4551 break 4552 } 4553 c := v_0.AuxInt 4554 v.reset(OpARM64MOVDconst) 4555 v.AuxInt = int64(int16(c)) 4556 return true 4557 } 4558 return false 4559 } 4560 func rewriteValueARM64_OpARM64MOVHstore_0(v *Value) bool { 4561 // match: (MOVHstore [off1] {sym} (ADDconst [off2] ptr) val mem) 4562 // cond: fitsARM64Offset(off1+off2, 2, sym) 4563 // result: (MOVHstore [off1+off2] {sym} ptr val mem) 4564 for { 4565 off1 := v.AuxInt 4566 sym := v.Aux 4567 v_0 := v.Args[0] 4568 if v_0.Op != OpARM64ADDconst { 4569 break 4570 } 4571 off2 := v_0.AuxInt 4572 ptr := v_0.Args[0] 4573 val := v.Args[1] 4574 mem := v.Args[2] 4575 if !(fitsARM64Offset(off1+off2, 2, sym)) { 4576 break 4577 } 4578 v.reset(OpARM64MOVHstore) 4579 v.AuxInt = off1 + off2 4580 v.Aux = sym 4581 v.AddArg(ptr) 4582 v.AddArg(val) 4583 v.AddArg(mem) 4584 return true 4585 } 4586 // match: (MOVHstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 4587 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 2, mergeSym(sym1, sym2)) 4588 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4589 for { 4590 off1 := v.AuxInt 4591 sym1 := v.Aux 4592 v_0 := v.Args[0] 4593 if v_0.Op != OpARM64MOVDaddr { 4594 break 4595 } 4596 off2 := v_0.AuxInt 4597 sym2 := v_0.Aux 4598 ptr := v_0.Args[0] 4599 val := v.Args[1] 4600 mem := v.Args[2] 4601 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 2, mergeSym(sym1, sym2))) { 4602 break 4603 } 4604 v.reset(OpARM64MOVHstore) 4605 v.AuxInt = off1 + off2 4606 v.Aux = mergeSym(sym1, sym2) 4607 v.AddArg(ptr) 4608 v.AddArg(val) 4609 v.AddArg(mem) 4610 return true 4611 } 4612 // match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem) 4613 // cond: 4614 // result: (MOVHstorezero [off] {sym} ptr mem) 4615 for { 4616 off := v.AuxInt 4617 sym := v.Aux 4618 ptr := v.Args[0] 4619 v_1 := v.Args[1] 4620 if v_1.Op != OpARM64MOVDconst { 4621 break 4622 } 4623 if v_1.AuxInt != 0 { 4624 break 4625 } 4626 mem := v.Args[2] 4627 v.reset(OpARM64MOVHstorezero) 4628 v.AuxInt = off 4629 v.Aux = sym 4630 v.AddArg(ptr) 4631 v.AddArg(mem) 4632 return true 4633 } 4634 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem) 4635 // cond: 4636 // result: (MOVHstore [off] {sym} ptr x mem) 4637 for { 4638 off := v.AuxInt 4639 sym := v.Aux 4640 ptr := v.Args[0] 4641 v_1 := v.Args[1] 4642 if v_1.Op != OpARM64MOVHreg { 4643 break 4644 } 4645 x := v_1.Args[0] 4646 mem := v.Args[2] 4647 v.reset(OpARM64MOVHstore) 4648 v.AuxInt = off 4649 v.Aux = sym 4650 v.AddArg(ptr) 4651 v.AddArg(x) 4652 v.AddArg(mem) 4653 return true 4654 } 4655 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem) 4656 // cond: 4657 // result: (MOVHstore [off] {sym} ptr x mem) 4658 for { 4659 off := v.AuxInt 4660 sym := v.Aux 4661 ptr := v.Args[0] 4662 v_1 := v.Args[1] 4663 if v_1.Op != OpARM64MOVHUreg { 4664 break 4665 } 4666 x := v_1.Args[0] 4667 mem := v.Args[2] 4668 v.reset(OpARM64MOVHstore) 4669 v.AuxInt = off 4670 v.Aux = sym 4671 v.AddArg(ptr) 4672 v.AddArg(x) 4673 v.AddArg(mem) 4674 return true 4675 } 4676 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem) 4677 // cond: 4678 // result: (MOVHstore [off] {sym} ptr x mem) 4679 for { 4680 off := v.AuxInt 4681 sym := v.Aux 4682 ptr := v.Args[0] 4683 v_1 := v.Args[1] 4684 if v_1.Op != OpARM64MOVWreg { 4685 break 4686 } 4687 x := v_1.Args[0] 4688 mem := v.Args[2] 4689 v.reset(OpARM64MOVHstore) 4690 v.AuxInt = off 4691 v.Aux = sym 4692 v.AddArg(ptr) 4693 v.AddArg(x) 4694 v.AddArg(mem) 4695 return true 4696 } 4697 // match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem) 4698 // cond: 4699 // result: (MOVHstore [off] {sym} ptr x mem) 4700 for { 4701 off := v.AuxInt 4702 sym := v.Aux 4703 ptr := v.Args[0] 4704 v_1 := v.Args[1] 4705 if v_1.Op != OpARM64MOVWUreg { 4706 break 4707 } 4708 x := v_1.Args[0] 4709 mem := v.Args[2] 4710 v.reset(OpARM64MOVHstore) 4711 v.AuxInt = off 4712 v.Aux = sym 4713 v.AddArg(ptr) 4714 v.AddArg(x) 4715 v.AddArg(mem) 4716 return true 4717 } 4718 return false 4719 } 4720 func rewriteValueARM64_OpARM64MOVHstorezero_0(v *Value) bool { 4721 // match: (MOVHstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 4722 // cond: fitsARM64Offset(off1+off2, 2, sym) 4723 // result: (MOVHstorezero [off1+off2] {sym} ptr mem) 4724 for { 4725 off1 := v.AuxInt 4726 sym := v.Aux 4727 v_0 := v.Args[0] 4728 if v_0.Op != OpARM64ADDconst { 4729 break 4730 } 4731 off2 := v_0.AuxInt 4732 ptr := v_0.Args[0] 4733 mem := v.Args[1] 4734 if !(fitsARM64Offset(off1+off2, 2, sym)) { 4735 break 4736 } 4737 v.reset(OpARM64MOVHstorezero) 4738 v.AuxInt = off1 + off2 4739 v.Aux = sym 4740 v.AddArg(ptr) 4741 v.AddArg(mem) 4742 return true 4743 } 4744 // match: (MOVHstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4745 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 2, mergeSym(sym1, sym2)) 4746 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4747 for { 4748 off1 := v.AuxInt 4749 sym1 := v.Aux 4750 v_0 := v.Args[0] 4751 if v_0.Op != OpARM64MOVDaddr { 4752 break 4753 } 4754 off2 := v_0.AuxInt 4755 sym2 := v_0.Aux 4756 ptr := v_0.Args[0] 4757 mem := v.Args[1] 4758 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 2, mergeSym(sym1, sym2))) { 4759 break 4760 } 4761 v.reset(OpARM64MOVHstorezero) 4762 v.AuxInt = off1 + off2 4763 v.Aux = mergeSym(sym1, sym2) 4764 v.AddArg(ptr) 4765 v.AddArg(mem) 4766 return true 4767 } 4768 return false 4769 } 4770 func rewriteValueARM64_OpARM64MOVWUload_0(v *Value) bool { 4771 // match: (MOVWUload [off1] {sym} (ADDconst [off2] ptr) mem) 4772 // cond: fitsARM64Offset(off1+off2, 4, sym) 4773 // result: (MOVWUload [off1+off2] {sym} ptr mem) 4774 for { 4775 off1 := v.AuxInt 4776 sym := v.Aux 4777 v_0 := v.Args[0] 4778 if v_0.Op != OpARM64ADDconst { 4779 break 4780 } 4781 off2 := v_0.AuxInt 4782 ptr := v_0.Args[0] 4783 mem := v.Args[1] 4784 if !(fitsARM64Offset(off1+off2, 4, sym)) { 4785 break 4786 } 4787 v.reset(OpARM64MOVWUload) 4788 v.AuxInt = off1 + off2 4789 v.Aux = sym 4790 v.AddArg(ptr) 4791 v.AddArg(mem) 4792 return true 4793 } 4794 // match: (MOVWUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4795 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 4, mergeSym(sym1, sym2)) 4796 // result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4797 for { 4798 off1 := v.AuxInt 4799 sym1 := v.Aux 4800 v_0 := v.Args[0] 4801 if v_0.Op != OpARM64MOVDaddr { 4802 break 4803 } 4804 off2 := v_0.AuxInt 4805 sym2 := v_0.Aux 4806 ptr := v_0.Args[0] 4807 mem := v.Args[1] 4808 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 4, mergeSym(sym1, sym2))) { 4809 break 4810 } 4811 v.reset(OpARM64MOVWUload) 4812 v.AuxInt = off1 + off2 4813 v.Aux = mergeSym(sym1, sym2) 4814 v.AddArg(ptr) 4815 v.AddArg(mem) 4816 return true 4817 } 4818 // match: (MOVWUload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 4819 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4820 // result: (MOVDconst [0]) 4821 for { 4822 off := v.AuxInt 4823 sym := v.Aux 4824 ptr := v.Args[0] 4825 v_1 := v.Args[1] 4826 if v_1.Op != OpARM64MOVWstorezero { 4827 break 4828 } 4829 off2 := v_1.AuxInt 4830 sym2 := v_1.Aux 4831 ptr2 := v_1.Args[0] 4832 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4833 break 4834 } 4835 v.reset(OpARM64MOVDconst) 4836 v.AuxInt = 0 4837 return true 4838 } 4839 return false 4840 } 4841 func rewriteValueARM64_OpARM64MOVWUreg_0(v *Value) bool { 4842 // match: (MOVWUreg x:(MOVBUload _ _)) 4843 // cond: 4844 // result: (MOVDreg x) 4845 for { 4846 x := v.Args[0] 4847 if x.Op != OpARM64MOVBUload { 4848 break 4849 } 4850 v.reset(OpARM64MOVDreg) 4851 v.AddArg(x) 4852 return true 4853 } 4854 // match: (MOVWUreg x:(MOVHUload _ _)) 4855 // cond: 4856 // result: (MOVDreg x) 4857 for { 4858 x := v.Args[0] 4859 if x.Op != OpARM64MOVHUload { 4860 break 4861 } 4862 v.reset(OpARM64MOVDreg) 4863 v.AddArg(x) 4864 return true 4865 } 4866 // match: (MOVWUreg x:(MOVWUload _ _)) 4867 // cond: 4868 // result: (MOVDreg x) 4869 for { 4870 x := v.Args[0] 4871 if x.Op != OpARM64MOVWUload { 4872 break 4873 } 4874 v.reset(OpARM64MOVDreg) 4875 v.AddArg(x) 4876 return true 4877 } 4878 // match: (MOVWUreg x:(MOVBUreg _)) 4879 // cond: 4880 // result: (MOVDreg x) 4881 for { 4882 x := v.Args[0] 4883 if x.Op != OpARM64MOVBUreg { 4884 break 4885 } 4886 v.reset(OpARM64MOVDreg) 4887 v.AddArg(x) 4888 return true 4889 } 4890 // match: (MOVWUreg x:(MOVHUreg _)) 4891 // cond: 4892 // result: (MOVDreg x) 4893 for { 4894 x := v.Args[0] 4895 if x.Op != OpARM64MOVHUreg { 4896 break 4897 } 4898 v.reset(OpARM64MOVDreg) 4899 v.AddArg(x) 4900 return true 4901 } 4902 // match: (MOVWUreg x:(MOVWUreg _)) 4903 // cond: 4904 // result: (MOVDreg x) 4905 for { 4906 x := v.Args[0] 4907 if x.Op != OpARM64MOVWUreg { 4908 break 4909 } 4910 v.reset(OpARM64MOVDreg) 4911 v.AddArg(x) 4912 return true 4913 } 4914 // match: (MOVWUreg (MOVDconst [c])) 4915 // cond: 4916 // result: (MOVDconst [int64(uint32(c))]) 4917 for { 4918 v_0 := v.Args[0] 4919 if v_0.Op != OpARM64MOVDconst { 4920 break 4921 } 4922 c := v_0.AuxInt 4923 v.reset(OpARM64MOVDconst) 4924 v.AuxInt = int64(uint32(c)) 4925 return true 4926 } 4927 return false 4928 } 4929 func rewriteValueARM64_OpARM64MOVWload_0(v *Value) bool { 4930 // match: (MOVWload [off1] {sym} (ADDconst [off2] ptr) mem) 4931 // cond: fitsARM64Offset(off1+off2, 4, sym) 4932 // result: (MOVWload [off1+off2] {sym} ptr mem) 4933 for { 4934 off1 := v.AuxInt 4935 sym := v.Aux 4936 v_0 := v.Args[0] 4937 if v_0.Op != OpARM64ADDconst { 4938 break 4939 } 4940 off2 := v_0.AuxInt 4941 ptr := v_0.Args[0] 4942 mem := v.Args[1] 4943 if !(fitsARM64Offset(off1+off2, 4, sym)) { 4944 break 4945 } 4946 v.reset(OpARM64MOVWload) 4947 v.AuxInt = off1 + off2 4948 v.Aux = sym 4949 v.AddArg(ptr) 4950 v.AddArg(mem) 4951 return true 4952 } 4953 // match: (MOVWload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4954 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 4, mergeSym(sym1, sym2)) 4955 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4956 for { 4957 off1 := v.AuxInt 4958 sym1 := v.Aux 4959 v_0 := v.Args[0] 4960 if v_0.Op != OpARM64MOVDaddr { 4961 break 4962 } 4963 off2 := v_0.AuxInt 4964 sym2 := v_0.Aux 4965 ptr := v_0.Args[0] 4966 mem := v.Args[1] 4967 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 4, mergeSym(sym1, sym2))) { 4968 break 4969 } 4970 v.reset(OpARM64MOVWload) 4971 v.AuxInt = off1 + off2 4972 v.Aux = mergeSym(sym1, sym2) 4973 v.AddArg(ptr) 4974 v.AddArg(mem) 4975 return true 4976 } 4977 // match: (MOVWload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 4978 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4979 // result: (MOVDconst [0]) 4980 for { 4981 off := v.AuxInt 4982 sym := v.Aux 4983 ptr := v.Args[0] 4984 v_1 := v.Args[1] 4985 if v_1.Op != OpARM64MOVWstorezero { 4986 break 4987 } 4988 off2 := v_1.AuxInt 4989 sym2 := v_1.Aux 4990 ptr2 := v_1.Args[0] 4991 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4992 break 4993 } 4994 v.reset(OpARM64MOVDconst) 4995 v.AuxInt = 0 4996 return true 4997 } 4998 return false 4999 } 5000 func rewriteValueARM64_OpARM64MOVWreg_0(v *Value) bool { 5001 // match: (MOVWreg x:(MOVBload _ _)) 5002 // cond: 5003 // result: (MOVDreg x) 5004 for { 5005 x := v.Args[0] 5006 if x.Op != OpARM64MOVBload { 5007 break 5008 } 5009 v.reset(OpARM64MOVDreg) 5010 v.AddArg(x) 5011 return true 5012 } 5013 // match: (MOVWreg x:(MOVBUload _ _)) 5014 // cond: 5015 // result: (MOVDreg x) 5016 for { 5017 x := v.Args[0] 5018 if x.Op != OpARM64MOVBUload { 5019 break 5020 } 5021 v.reset(OpARM64MOVDreg) 5022 v.AddArg(x) 5023 return true 5024 } 5025 // match: (MOVWreg x:(MOVHload _ _)) 5026 // cond: 5027 // result: (MOVDreg x) 5028 for { 5029 x := v.Args[0] 5030 if x.Op != OpARM64MOVHload { 5031 break 5032 } 5033 v.reset(OpARM64MOVDreg) 5034 v.AddArg(x) 5035 return true 5036 } 5037 // match: (MOVWreg x:(MOVHUload _ _)) 5038 // cond: 5039 // result: (MOVDreg x) 5040 for { 5041 x := v.Args[0] 5042 if x.Op != OpARM64MOVHUload { 5043 break 5044 } 5045 v.reset(OpARM64MOVDreg) 5046 v.AddArg(x) 5047 return true 5048 } 5049 // match: (MOVWreg x:(MOVWload _ _)) 5050 // cond: 5051 // result: (MOVDreg x) 5052 for { 5053 x := v.Args[0] 5054 if x.Op != OpARM64MOVWload { 5055 break 5056 } 5057 v.reset(OpARM64MOVDreg) 5058 v.AddArg(x) 5059 return true 5060 } 5061 // match: (MOVWreg x:(MOVBreg _)) 5062 // cond: 5063 // result: (MOVDreg x) 5064 for { 5065 x := v.Args[0] 5066 if x.Op != OpARM64MOVBreg { 5067 break 5068 } 5069 v.reset(OpARM64MOVDreg) 5070 v.AddArg(x) 5071 return true 5072 } 5073 // match: (MOVWreg x:(MOVBUreg _)) 5074 // cond: 5075 // result: (MOVDreg x) 5076 for { 5077 x := v.Args[0] 5078 if x.Op != OpARM64MOVBUreg { 5079 break 5080 } 5081 v.reset(OpARM64MOVDreg) 5082 v.AddArg(x) 5083 return true 5084 } 5085 // match: (MOVWreg x:(MOVHreg _)) 5086 // cond: 5087 // result: (MOVDreg x) 5088 for { 5089 x := v.Args[0] 5090 if x.Op != OpARM64MOVHreg { 5091 break 5092 } 5093 v.reset(OpARM64MOVDreg) 5094 v.AddArg(x) 5095 return true 5096 } 5097 // match: (MOVWreg x:(MOVHreg _)) 5098 // cond: 5099 // result: (MOVDreg x) 5100 for { 5101 x := v.Args[0] 5102 if x.Op != OpARM64MOVHreg { 5103 break 5104 } 5105 v.reset(OpARM64MOVDreg) 5106 v.AddArg(x) 5107 return true 5108 } 5109 // match: (MOVWreg x:(MOVWreg _)) 5110 // cond: 5111 // result: (MOVDreg x) 5112 for { 5113 x := v.Args[0] 5114 if x.Op != OpARM64MOVWreg { 5115 break 5116 } 5117 v.reset(OpARM64MOVDreg) 5118 v.AddArg(x) 5119 return true 5120 } 5121 return false 5122 } 5123 func rewriteValueARM64_OpARM64MOVWreg_10(v *Value) bool { 5124 // match: (MOVWreg (MOVDconst [c])) 5125 // cond: 5126 // result: (MOVDconst [int64(int32(c))]) 5127 for { 5128 v_0 := v.Args[0] 5129 if v_0.Op != OpARM64MOVDconst { 5130 break 5131 } 5132 c := v_0.AuxInt 5133 v.reset(OpARM64MOVDconst) 5134 v.AuxInt = int64(int32(c)) 5135 return true 5136 } 5137 return false 5138 } 5139 func rewriteValueARM64_OpARM64MOVWstore_0(v *Value) bool { 5140 // match: (MOVWstore [off1] {sym} (ADDconst [off2] ptr) val mem) 5141 // cond: fitsARM64Offset(off1+off2, 4, sym) 5142 // result: (MOVWstore [off1+off2] {sym} ptr val mem) 5143 for { 5144 off1 := v.AuxInt 5145 sym := v.Aux 5146 v_0 := v.Args[0] 5147 if v_0.Op != OpARM64ADDconst { 5148 break 5149 } 5150 off2 := v_0.AuxInt 5151 ptr := v_0.Args[0] 5152 val := v.Args[1] 5153 mem := v.Args[2] 5154 if !(fitsARM64Offset(off1+off2, 4, sym)) { 5155 break 5156 } 5157 v.reset(OpARM64MOVWstore) 5158 v.AuxInt = off1 + off2 5159 v.Aux = sym 5160 v.AddArg(ptr) 5161 v.AddArg(val) 5162 v.AddArg(mem) 5163 return true 5164 } 5165 // match: (MOVWstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 5166 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 4, mergeSym(sym1, sym2)) 5167 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 5168 for { 5169 off1 := v.AuxInt 5170 sym1 := v.Aux 5171 v_0 := v.Args[0] 5172 if v_0.Op != OpARM64MOVDaddr { 5173 break 5174 } 5175 off2 := v_0.AuxInt 5176 sym2 := v_0.Aux 5177 ptr := v_0.Args[0] 5178 val := v.Args[1] 5179 mem := v.Args[2] 5180 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 4, mergeSym(sym1, sym2))) { 5181 break 5182 } 5183 v.reset(OpARM64MOVWstore) 5184 v.AuxInt = off1 + off2 5185 v.Aux = mergeSym(sym1, sym2) 5186 v.AddArg(ptr) 5187 v.AddArg(val) 5188 v.AddArg(mem) 5189 return true 5190 } 5191 // match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem) 5192 // cond: 5193 // result: (MOVWstorezero [off] {sym} ptr mem) 5194 for { 5195 off := v.AuxInt 5196 sym := v.Aux 5197 ptr := v.Args[0] 5198 v_1 := v.Args[1] 5199 if v_1.Op != OpARM64MOVDconst { 5200 break 5201 } 5202 if v_1.AuxInt != 0 { 5203 break 5204 } 5205 mem := v.Args[2] 5206 v.reset(OpARM64MOVWstorezero) 5207 v.AuxInt = off 5208 v.Aux = sym 5209 v.AddArg(ptr) 5210 v.AddArg(mem) 5211 return true 5212 } 5213 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem) 5214 // cond: 5215 // result: (MOVWstore [off] {sym} ptr x mem) 5216 for { 5217 off := v.AuxInt 5218 sym := v.Aux 5219 ptr := v.Args[0] 5220 v_1 := v.Args[1] 5221 if v_1.Op != OpARM64MOVWreg { 5222 break 5223 } 5224 x := v_1.Args[0] 5225 mem := v.Args[2] 5226 v.reset(OpARM64MOVWstore) 5227 v.AuxInt = off 5228 v.Aux = sym 5229 v.AddArg(ptr) 5230 v.AddArg(x) 5231 v.AddArg(mem) 5232 return true 5233 } 5234 // match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem) 5235 // cond: 5236 // result: (MOVWstore [off] {sym} ptr x mem) 5237 for { 5238 off := v.AuxInt 5239 sym := v.Aux 5240 ptr := v.Args[0] 5241 v_1 := v.Args[1] 5242 if v_1.Op != OpARM64MOVWUreg { 5243 break 5244 } 5245 x := v_1.Args[0] 5246 mem := v.Args[2] 5247 v.reset(OpARM64MOVWstore) 5248 v.AuxInt = off 5249 v.Aux = sym 5250 v.AddArg(ptr) 5251 v.AddArg(x) 5252 v.AddArg(mem) 5253 return true 5254 } 5255 return false 5256 } 5257 func rewriteValueARM64_OpARM64MOVWstorezero_0(v *Value) bool { 5258 // match: (MOVWstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 5259 // cond: fitsARM64Offset(off1+off2, 4, sym) 5260 // result: (MOVWstorezero [off1+off2] {sym} ptr mem) 5261 for { 5262 off1 := v.AuxInt 5263 sym := v.Aux 5264 v_0 := v.Args[0] 5265 if v_0.Op != OpARM64ADDconst { 5266 break 5267 } 5268 off2 := v_0.AuxInt 5269 ptr := v_0.Args[0] 5270 mem := v.Args[1] 5271 if !(fitsARM64Offset(off1+off2, 4, sym)) { 5272 break 5273 } 5274 v.reset(OpARM64MOVWstorezero) 5275 v.AuxInt = off1 + off2 5276 v.Aux = sym 5277 v.AddArg(ptr) 5278 v.AddArg(mem) 5279 return true 5280 } 5281 // match: (MOVWstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5282 // cond: canMergeSym(sym1,sym2) && fitsARM64Offset(off1+off2, 4, mergeSym(sym1, sym2)) 5283 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5284 for { 5285 off1 := v.AuxInt 5286 sym1 := v.Aux 5287 v_0 := v.Args[0] 5288 if v_0.Op != OpARM64MOVDaddr { 5289 break 5290 } 5291 off2 := v_0.AuxInt 5292 sym2 := v_0.Aux 5293 ptr := v_0.Args[0] 5294 mem := v.Args[1] 5295 if !(canMergeSym(sym1, sym2) && fitsARM64Offset(off1+off2, 4, mergeSym(sym1, sym2))) { 5296 break 5297 } 5298 v.reset(OpARM64MOVWstorezero) 5299 v.AuxInt = off1 + off2 5300 v.Aux = mergeSym(sym1, sym2) 5301 v.AddArg(ptr) 5302 v.AddArg(mem) 5303 return true 5304 } 5305 return false 5306 } 5307 func rewriteValueARM64_OpARM64MUL_0(v *Value) bool { 5308 // match: (MUL x (MOVDconst [-1])) 5309 // cond: 5310 // result: (NEG x) 5311 for { 5312 x := v.Args[0] 5313 v_1 := v.Args[1] 5314 if v_1.Op != OpARM64MOVDconst { 5315 break 5316 } 5317 if v_1.AuxInt != -1 { 5318 break 5319 } 5320 v.reset(OpARM64NEG) 5321 v.AddArg(x) 5322 return true 5323 } 5324 // match: (MUL (MOVDconst [-1]) x) 5325 // cond: 5326 // result: (NEG x) 5327 for { 5328 v_0 := v.Args[0] 5329 if v_0.Op != OpARM64MOVDconst { 5330 break 5331 } 5332 if v_0.AuxInt != -1 { 5333 break 5334 } 5335 x := v.Args[1] 5336 v.reset(OpARM64NEG) 5337 v.AddArg(x) 5338 return true 5339 } 5340 // match: (MUL _ (MOVDconst [0])) 5341 // cond: 5342 // result: (MOVDconst [0]) 5343 for { 5344 v_1 := v.Args[1] 5345 if v_1.Op != OpARM64MOVDconst { 5346 break 5347 } 5348 if v_1.AuxInt != 0 { 5349 break 5350 } 5351 v.reset(OpARM64MOVDconst) 5352 v.AuxInt = 0 5353 return true 5354 } 5355 // match: (MUL (MOVDconst [0]) _) 5356 // cond: 5357 // result: (MOVDconst [0]) 5358 for { 5359 v_0 := v.Args[0] 5360 if v_0.Op != OpARM64MOVDconst { 5361 break 5362 } 5363 if v_0.AuxInt != 0 { 5364 break 5365 } 5366 v.reset(OpARM64MOVDconst) 5367 v.AuxInt = 0 5368 return true 5369 } 5370 // match: (MUL x (MOVDconst [1])) 5371 // cond: 5372 // result: x 5373 for { 5374 x := v.Args[0] 5375 v_1 := v.Args[1] 5376 if v_1.Op != OpARM64MOVDconst { 5377 break 5378 } 5379 if v_1.AuxInt != 1 { 5380 break 5381 } 5382 v.reset(OpCopy) 5383 v.Type = x.Type 5384 v.AddArg(x) 5385 return true 5386 } 5387 // match: (MUL (MOVDconst [1]) x) 5388 // cond: 5389 // result: x 5390 for { 5391 v_0 := v.Args[0] 5392 if v_0.Op != OpARM64MOVDconst { 5393 break 5394 } 5395 if v_0.AuxInt != 1 { 5396 break 5397 } 5398 x := v.Args[1] 5399 v.reset(OpCopy) 5400 v.Type = x.Type 5401 v.AddArg(x) 5402 return true 5403 } 5404 // match: (MUL x (MOVDconst [c])) 5405 // cond: isPowerOfTwo(c) 5406 // result: (SLLconst [log2(c)] x) 5407 for { 5408 x := v.Args[0] 5409 v_1 := v.Args[1] 5410 if v_1.Op != OpARM64MOVDconst { 5411 break 5412 } 5413 c := v_1.AuxInt 5414 if !(isPowerOfTwo(c)) { 5415 break 5416 } 5417 v.reset(OpARM64SLLconst) 5418 v.AuxInt = log2(c) 5419 v.AddArg(x) 5420 return true 5421 } 5422 // match: (MUL (MOVDconst [c]) x) 5423 // cond: isPowerOfTwo(c) 5424 // result: (SLLconst [log2(c)] x) 5425 for { 5426 v_0 := v.Args[0] 5427 if v_0.Op != OpARM64MOVDconst { 5428 break 5429 } 5430 c := v_0.AuxInt 5431 x := v.Args[1] 5432 if !(isPowerOfTwo(c)) { 5433 break 5434 } 5435 v.reset(OpARM64SLLconst) 5436 v.AuxInt = log2(c) 5437 v.AddArg(x) 5438 return true 5439 } 5440 // match: (MUL x (MOVDconst [c])) 5441 // cond: isPowerOfTwo(c-1) && c >= 3 5442 // result: (ADDshiftLL x x [log2(c-1)]) 5443 for { 5444 x := v.Args[0] 5445 v_1 := v.Args[1] 5446 if v_1.Op != OpARM64MOVDconst { 5447 break 5448 } 5449 c := v_1.AuxInt 5450 if !(isPowerOfTwo(c-1) && c >= 3) { 5451 break 5452 } 5453 v.reset(OpARM64ADDshiftLL) 5454 v.AuxInt = log2(c - 1) 5455 v.AddArg(x) 5456 v.AddArg(x) 5457 return true 5458 } 5459 // match: (MUL (MOVDconst [c]) x) 5460 // cond: isPowerOfTwo(c-1) && c >= 3 5461 // result: (ADDshiftLL x x [log2(c-1)]) 5462 for { 5463 v_0 := v.Args[0] 5464 if v_0.Op != OpARM64MOVDconst { 5465 break 5466 } 5467 c := v_0.AuxInt 5468 x := v.Args[1] 5469 if !(isPowerOfTwo(c-1) && c >= 3) { 5470 break 5471 } 5472 v.reset(OpARM64ADDshiftLL) 5473 v.AuxInt = log2(c - 1) 5474 v.AddArg(x) 5475 v.AddArg(x) 5476 return true 5477 } 5478 return false 5479 } 5480 func rewriteValueARM64_OpARM64MUL_10(v *Value) bool { 5481 b := v.Block 5482 _ = b 5483 // match: (MUL x (MOVDconst [c])) 5484 // cond: isPowerOfTwo(c+1) && c >= 7 5485 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 5486 for { 5487 x := v.Args[0] 5488 v_1 := v.Args[1] 5489 if v_1.Op != OpARM64MOVDconst { 5490 break 5491 } 5492 c := v_1.AuxInt 5493 if !(isPowerOfTwo(c+1) && c >= 7) { 5494 break 5495 } 5496 v.reset(OpARM64ADDshiftLL) 5497 v.AuxInt = log2(c + 1) 5498 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5499 v0.AddArg(x) 5500 v.AddArg(v0) 5501 v.AddArg(x) 5502 return true 5503 } 5504 // match: (MUL (MOVDconst [c]) x) 5505 // cond: isPowerOfTwo(c+1) && c >= 7 5506 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 5507 for { 5508 v_0 := v.Args[0] 5509 if v_0.Op != OpARM64MOVDconst { 5510 break 5511 } 5512 c := v_0.AuxInt 5513 x := v.Args[1] 5514 if !(isPowerOfTwo(c+1) && c >= 7) { 5515 break 5516 } 5517 v.reset(OpARM64ADDshiftLL) 5518 v.AuxInt = log2(c + 1) 5519 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5520 v0.AddArg(x) 5521 v.AddArg(v0) 5522 v.AddArg(x) 5523 return true 5524 } 5525 // match: (MUL x (MOVDconst [c])) 5526 // cond: c%3 == 0 && isPowerOfTwo(c/3) 5527 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 5528 for { 5529 x := v.Args[0] 5530 v_1 := v.Args[1] 5531 if v_1.Op != OpARM64MOVDconst { 5532 break 5533 } 5534 c := v_1.AuxInt 5535 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 5536 break 5537 } 5538 v.reset(OpARM64SLLconst) 5539 v.AuxInt = log2(c / 3) 5540 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5541 v0.AuxInt = 1 5542 v0.AddArg(x) 5543 v0.AddArg(x) 5544 v.AddArg(v0) 5545 return true 5546 } 5547 // match: (MUL (MOVDconst [c]) x) 5548 // cond: c%3 == 0 && isPowerOfTwo(c/3) 5549 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 5550 for { 5551 v_0 := v.Args[0] 5552 if v_0.Op != OpARM64MOVDconst { 5553 break 5554 } 5555 c := v_0.AuxInt 5556 x := v.Args[1] 5557 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 5558 break 5559 } 5560 v.reset(OpARM64SLLconst) 5561 v.AuxInt = log2(c / 3) 5562 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5563 v0.AuxInt = 1 5564 v0.AddArg(x) 5565 v0.AddArg(x) 5566 v.AddArg(v0) 5567 return true 5568 } 5569 // match: (MUL x (MOVDconst [c])) 5570 // cond: c%5 == 0 && isPowerOfTwo(c/5) 5571 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 5572 for { 5573 x := v.Args[0] 5574 v_1 := v.Args[1] 5575 if v_1.Op != OpARM64MOVDconst { 5576 break 5577 } 5578 c := v_1.AuxInt 5579 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 5580 break 5581 } 5582 v.reset(OpARM64SLLconst) 5583 v.AuxInt = log2(c / 5) 5584 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5585 v0.AuxInt = 2 5586 v0.AddArg(x) 5587 v0.AddArg(x) 5588 v.AddArg(v0) 5589 return true 5590 } 5591 // match: (MUL (MOVDconst [c]) x) 5592 // cond: c%5 == 0 && isPowerOfTwo(c/5) 5593 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 5594 for { 5595 v_0 := v.Args[0] 5596 if v_0.Op != OpARM64MOVDconst { 5597 break 5598 } 5599 c := v_0.AuxInt 5600 x := v.Args[1] 5601 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 5602 break 5603 } 5604 v.reset(OpARM64SLLconst) 5605 v.AuxInt = log2(c / 5) 5606 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5607 v0.AuxInt = 2 5608 v0.AddArg(x) 5609 v0.AddArg(x) 5610 v.AddArg(v0) 5611 return true 5612 } 5613 // match: (MUL x (MOVDconst [c])) 5614 // cond: c%7 == 0 && isPowerOfTwo(c/7) 5615 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 5616 for { 5617 x := v.Args[0] 5618 v_1 := v.Args[1] 5619 if v_1.Op != OpARM64MOVDconst { 5620 break 5621 } 5622 c := v_1.AuxInt 5623 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 5624 break 5625 } 5626 v.reset(OpARM64SLLconst) 5627 v.AuxInt = log2(c / 7) 5628 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5629 v0.AuxInt = 3 5630 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5631 v1.AddArg(x) 5632 v0.AddArg(v1) 5633 v0.AddArg(x) 5634 v.AddArg(v0) 5635 return true 5636 } 5637 // match: (MUL (MOVDconst [c]) x) 5638 // cond: c%7 == 0 && isPowerOfTwo(c/7) 5639 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 5640 for { 5641 v_0 := v.Args[0] 5642 if v_0.Op != OpARM64MOVDconst { 5643 break 5644 } 5645 c := v_0.AuxInt 5646 x := v.Args[1] 5647 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 5648 break 5649 } 5650 v.reset(OpARM64SLLconst) 5651 v.AuxInt = log2(c / 7) 5652 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5653 v0.AuxInt = 3 5654 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5655 v1.AddArg(x) 5656 v0.AddArg(v1) 5657 v0.AddArg(x) 5658 v.AddArg(v0) 5659 return true 5660 } 5661 // match: (MUL x (MOVDconst [c])) 5662 // cond: c%9 == 0 && isPowerOfTwo(c/9) 5663 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 5664 for { 5665 x := v.Args[0] 5666 v_1 := v.Args[1] 5667 if v_1.Op != OpARM64MOVDconst { 5668 break 5669 } 5670 c := v_1.AuxInt 5671 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 5672 break 5673 } 5674 v.reset(OpARM64SLLconst) 5675 v.AuxInt = log2(c / 9) 5676 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5677 v0.AuxInt = 3 5678 v0.AddArg(x) 5679 v0.AddArg(x) 5680 v.AddArg(v0) 5681 return true 5682 } 5683 // match: (MUL (MOVDconst [c]) x) 5684 // cond: c%9 == 0 && isPowerOfTwo(c/9) 5685 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 5686 for { 5687 v_0 := v.Args[0] 5688 if v_0.Op != OpARM64MOVDconst { 5689 break 5690 } 5691 c := v_0.AuxInt 5692 x := v.Args[1] 5693 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 5694 break 5695 } 5696 v.reset(OpARM64SLLconst) 5697 v.AuxInt = log2(c / 9) 5698 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5699 v0.AuxInt = 3 5700 v0.AddArg(x) 5701 v0.AddArg(x) 5702 v.AddArg(v0) 5703 return true 5704 } 5705 return false 5706 } 5707 func rewriteValueARM64_OpARM64MUL_20(v *Value) bool { 5708 // match: (MUL (MOVDconst [c]) (MOVDconst [d])) 5709 // cond: 5710 // result: (MOVDconst [c*d]) 5711 for { 5712 v_0 := v.Args[0] 5713 if v_0.Op != OpARM64MOVDconst { 5714 break 5715 } 5716 c := v_0.AuxInt 5717 v_1 := v.Args[1] 5718 if v_1.Op != OpARM64MOVDconst { 5719 break 5720 } 5721 d := v_1.AuxInt 5722 v.reset(OpARM64MOVDconst) 5723 v.AuxInt = c * d 5724 return true 5725 } 5726 // match: (MUL (MOVDconst [d]) (MOVDconst [c])) 5727 // cond: 5728 // result: (MOVDconst [c*d]) 5729 for { 5730 v_0 := v.Args[0] 5731 if v_0.Op != OpARM64MOVDconst { 5732 break 5733 } 5734 d := v_0.AuxInt 5735 v_1 := v.Args[1] 5736 if v_1.Op != OpARM64MOVDconst { 5737 break 5738 } 5739 c := v_1.AuxInt 5740 v.reset(OpARM64MOVDconst) 5741 v.AuxInt = c * d 5742 return true 5743 } 5744 return false 5745 } 5746 func rewriteValueARM64_OpARM64MULW_0(v *Value) bool { 5747 // match: (MULW x (MOVDconst [c])) 5748 // cond: int32(c)==-1 5749 // result: (NEG x) 5750 for { 5751 x := v.Args[0] 5752 v_1 := v.Args[1] 5753 if v_1.Op != OpARM64MOVDconst { 5754 break 5755 } 5756 c := v_1.AuxInt 5757 if !(int32(c) == -1) { 5758 break 5759 } 5760 v.reset(OpARM64NEG) 5761 v.AddArg(x) 5762 return true 5763 } 5764 // match: (MULW (MOVDconst [c]) x) 5765 // cond: int32(c)==-1 5766 // result: (NEG x) 5767 for { 5768 v_0 := v.Args[0] 5769 if v_0.Op != OpARM64MOVDconst { 5770 break 5771 } 5772 c := v_0.AuxInt 5773 x := v.Args[1] 5774 if !(int32(c) == -1) { 5775 break 5776 } 5777 v.reset(OpARM64NEG) 5778 v.AddArg(x) 5779 return true 5780 } 5781 // match: (MULW _ (MOVDconst [c])) 5782 // cond: int32(c)==0 5783 // result: (MOVDconst [0]) 5784 for { 5785 v_1 := v.Args[1] 5786 if v_1.Op != OpARM64MOVDconst { 5787 break 5788 } 5789 c := v_1.AuxInt 5790 if !(int32(c) == 0) { 5791 break 5792 } 5793 v.reset(OpARM64MOVDconst) 5794 v.AuxInt = 0 5795 return true 5796 } 5797 // match: (MULW (MOVDconst [c]) _) 5798 // cond: int32(c)==0 5799 // result: (MOVDconst [0]) 5800 for { 5801 v_0 := v.Args[0] 5802 if v_0.Op != OpARM64MOVDconst { 5803 break 5804 } 5805 c := v_0.AuxInt 5806 if !(int32(c) == 0) { 5807 break 5808 } 5809 v.reset(OpARM64MOVDconst) 5810 v.AuxInt = 0 5811 return true 5812 } 5813 // match: (MULW x (MOVDconst [c])) 5814 // cond: int32(c)==1 5815 // result: x 5816 for { 5817 x := v.Args[0] 5818 v_1 := v.Args[1] 5819 if v_1.Op != OpARM64MOVDconst { 5820 break 5821 } 5822 c := v_1.AuxInt 5823 if !(int32(c) == 1) { 5824 break 5825 } 5826 v.reset(OpCopy) 5827 v.Type = x.Type 5828 v.AddArg(x) 5829 return true 5830 } 5831 // match: (MULW (MOVDconst [c]) x) 5832 // cond: int32(c)==1 5833 // result: x 5834 for { 5835 v_0 := v.Args[0] 5836 if v_0.Op != OpARM64MOVDconst { 5837 break 5838 } 5839 c := v_0.AuxInt 5840 x := v.Args[1] 5841 if !(int32(c) == 1) { 5842 break 5843 } 5844 v.reset(OpCopy) 5845 v.Type = x.Type 5846 v.AddArg(x) 5847 return true 5848 } 5849 // match: (MULW x (MOVDconst [c])) 5850 // cond: isPowerOfTwo(c) 5851 // result: (SLLconst [log2(c)] x) 5852 for { 5853 x := v.Args[0] 5854 v_1 := v.Args[1] 5855 if v_1.Op != OpARM64MOVDconst { 5856 break 5857 } 5858 c := v_1.AuxInt 5859 if !(isPowerOfTwo(c)) { 5860 break 5861 } 5862 v.reset(OpARM64SLLconst) 5863 v.AuxInt = log2(c) 5864 v.AddArg(x) 5865 return true 5866 } 5867 // match: (MULW (MOVDconst [c]) x) 5868 // cond: isPowerOfTwo(c) 5869 // result: (SLLconst [log2(c)] x) 5870 for { 5871 v_0 := v.Args[0] 5872 if v_0.Op != OpARM64MOVDconst { 5873 break 5874 } 5875 c := v_0.AuxInt 5876 x := v.Args[1] 5877 if !(isPowerOfTwo(c)) { 5878 break 5879 } 5880 v.reset(OpARM64SLLconst) 5881 v.AuxInt = log2(c) 5882 v.AddArg(x) 5883 return true 5884 } 5885 // match: (MULW x (MOVDconst [c])) 5886 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 5887 // result: (ADDshiftLL x x [log2(c-1)]) 5888 for { 5889 x := v.Args[0] 5890 v_1 := v.Args[1] 5891 if v_1.Op != OpARM64MOVDconst { 5892 break 5893 } 5894 c := v_1.AuxInt 5895 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 5896 break 5897 } 5898 v.reset(OpARM64ADDshiftLL) 5899 v.AuxInt = log2(c - 1) 5900 v.AddArg(x) 5901 v.AddArg(x) 5902 return true 5903 } 5904 // match: (MULW (MOVDconst [c]) x) 5905 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 5906 // result: (ADDshiftLL x x [log2(c-1)]) 5907 for { 5908 v_0 := v.Args[0] 5909 if v_0.Op != OpARM64MOVDconst { 5910 break 5911 } 5912 c := v_0.AuxInt 5913 x := v.Args[1] 5914 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 5915 break 5916 } 5917 v.reset(OpARM64ADDshiftLL) 5918 v.AuxInt = log2(c - 1) 5919 v.AddArg(x) 5920 v.AddArg(x) 5921 return true 5922 } 5923 return false 5924 } 5925 func rewriteValueARM64_OpARM64MULW_10(v *Value) bool { 5926 b := v.Block 5927 _ = b 5928 // match: (MULW x (MOVDconst [c])) 5929 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 5930 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 5931 for { 5932 x := v.Args[0] 5933 v_1 := v.Args[1] 5934 if v_1.Op != OpARM64MOVDconst { 5935 break 5936 } 5937 c := v_1.AuxInt 5938 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 5939 break 5940 } 5941 v.reset(OpARM64ADDshiftLL) 5942 v.AuxInt = log2(c + 1) 5943 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5944 v0.AddArg(x) 5945 v.AddArg(v0) 5946 v.AddArg(x) 5947 return true 5948 } 5949 // match: (MULW (MOVDconst [c]) x) 5950 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 5951 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 5952 for { 5953 v_0 := v.Args[0] 5954 if v_0.Op != OpARM64MOVDconst { 5955 break 5956 } 5957 c := v_0.AuxInt 5958 x := v.Args[1] 5959 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 5960 break 5961 } 5962 v.reset(OpARM64ADDshiftLL) 5963 v.AuxInt = log2(c + 1) 5964 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5965 v0.AddArg(x) 5966 v.AddArg(v0) 5967 v.AddArg(x) 5968 return true 5969 } 5970 // match: (MULW x (MOVDconst [c])) 5971 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 5972 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 5973 for { 5974 x := v.Args[0] 5975 v_1 := v.Args[1] 5976 if v_1.Op != OpARM64MOVDconst { 5977 break 5978 } 5979 c := v_1.AuxInt 5980 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 5981 break 5982 } 5983 v.reset(OpARM64SLLconst) 5984 v.AuxInt = log2(c / 3) 5985 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5986 v0.AuxInt = 1 5987 v0.AddArg(x) 5988 v0.AddArg(x) 5989 v.AddArg(v0) 5990 return true 5991 } 5992 // match: (MULW (MOVDconst [c]) x) 5993 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 5994 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 5995 for { 5996 v_0 := v.Args[0] 5997 if v_0.Op != OpARM64MOVDconst { 5998 break 5999 } 6000 c := v_0.AuxInt 6001 x := v.Args[1] 6002 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 6003 break 6004 } 6005 v.reset(OpARM64SLLconst) 6006 v.AuxInt = log2(c / 3) 6007 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6008 v0.AuxInt = 1 6009 v0.AddArg(x) 6010 v0.AddArg(x) 6011 v.AddArg(v0) 6012 return true 6013 } 6014 // match: (MULW x (MOVDconst [c])) 6015 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 6016 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 6017 for { 6018 x := v.Args[0] 6019 v_1 := v.Args[1] 6020 if v_1.Op != OpARM64MOVDconst { 6021 break 6022 } 6023 c := v_1.AuxInt 6024 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 6025 break 6026 } 6027 v.reset(OpARM64SLLconst) 6028 v.AuxInt = log2(c / 5) 6029 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6030 v0.AuxInt = 2 6031 v0.AddArg(x) 6032 v0.AddArg(x) 6033 v.AddArg(v0) 6034 return true 6035 } 6036 // match: (MULW (MOVDconst [c]) x) 6037 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 6038 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 6039 for { 6040 v_0 := v.Args[0] 6041 if v_0.Op != OpARM64MOVDconst { 6042 break 6043 } 6044 c := v_0.AuxInt 6045 x := v.Args[1] 6046 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 6047 break 6048 } 6049 v.reset(OpARM64SLLconst) 6050 v.AuxInt = log2(c / 5) 6051 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6052 v0.AuxInt = 2 6053 v0.AddArg(x) 6054 v0.AddArg(x) 6055 v.AddArg(v0) 6056 return true 6057 } 6058 // match: (MULW x (MOVDconst [c])) 6059 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 6060 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 6061 for { 6062 x := v.Args[0] 6063 v_1 := v.Args[1] 6064 if v_1.Op != OpARM64MOVDconst { 6065 break 6066 } 6067 c := v_1.AuxInt 6068 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 6069 break 6070 } 6071 v.reset(OpARM64SLLconst) 6072 v.AuxInt = log2(c / 7) 6073 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6074 v0.AuxInt = 3 6075 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 6076 v1.AddArg(x) 6077 v0.AddArg(v1) 6078 v0.AddArg(x) 6079 v.AddArg(v0) 6080 return true 6081 } 6082 // match: (MULW (MOVDconst [c]) x) 6083 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 6084 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 6085 for { 6086 v_0 := v.Args[0] 6087 if v_0.Op != OpARM64MOVDconst { 6088 break 6089 } 6090 c := v_0.AuxInt 6091 x := v.Args[1] 6092 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 6093 break 6094 } 6095 v.reset(OpARM64SLLconst) 6096 v.AuxInt = log2(c / 7) 6097 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6098 v0.AuxInt = 3 6099 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 6100 v1.AddArg(x) 6101 v0.AddArg(v1) 6102 v0.AddArg(x) 6103 v.AddArg(v0) 6104 return true 6105 } 6106 // match: (MULW x (MOVDconst [c])) 6107 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 6108 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 6109 for { 6110 x := v.Args[0] 6111 v_1 := v.Args[1] 6112 if v_1.Op != OpARM64MOVDconst { 6113 break 6114 } 6115 c := v_1.AuxInt 6116 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 6117 break 6118 } 6119 v.reset(OpARM64SLLconst) 6120 v.AuxInt = log2(c / 9) 6121 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6122 v0.AuxInt = 3 6123 v0.AddArg(x) 6124 v0.AddArg(x) 6125 v.AddArg(v0) 6126 return true 6127 } 6128 // match: (MULW (MOVDconst [c]) x) 6129 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 6130 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 6131 for { 6132 v_0 := v.Args[0] 6133 if v_0.Op != OpARM64MOVDconst { 6134 break 6135 } 6136 c := v_0.AuxInt 6137 x := v.Args[1] 6138 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 6139 break 6140 } 6141 v.reset(OpARM64SLLconst) 6142 v.AuxInt = log2(c / 9) 6143 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6144 v0.AuxInt = 3 6145 v0.AddArg(x) 6146 v0.AddArg(x) 6147 v.AddArg(v0) 6148 return true 6149 } 6150 return false 6151 } 6152 func rewriteValueARM64_OpARM64MULW_20(v *Value) bool { 6153 // match: (MULW (MOVDconst [c]) (MOVDconst [d])) 6154 // cond: 6155 // result: (MOVDconst [int64(int32(c)*int32(d))]) 6156 for { 6157 v_0 := v.Args[0] 6158 if v_0.Op != OpARM64MOVDconst { 6159 break 6160 } 6161 c := v_0.AuxInt 6162 v_1 := v.Args[1] 6163 if v_1.Op != OpARM64MOVDconst { 6164 break 6165 } 6166 d := v_1.AuxInt 6167 v.reset(OpARM64MOVDconst) 6168 v.AuxInt = int64(int32(c) * int32(d)) 6169 return true 6170 } 6171 // match: (MULW (MOVDconst [d]) (MOVDconst [c])) 6172 // cond: 6173 // result: (MOVDconst [int64(int32(c)*int32(d))]) 6174 for { 6175 v_0 := v.Args[0] 6176 if v_0.Op != OpARM64MOVDconst { 6177 break 6178 } 6179 d := v_0.AuxInt 6180 v_1 := v.Args[1] 6181 if v_1.Op != OpARM64MOVDconst { 6182 break 6183 } 6184 c := v_1.AuxInt 6185 v.reset(OpARM64MOVDconst) 6186 v.AuxInt = int64(int32(c) * int32(d)) 6187 return true 6188 } 6189 return false 6190 } 6191 func rewriteValueARM64_OpARM64MVN_0(v *Value) bool { 6192 // match: (MVN (MOVDconst [c])) 6193 // cond: 6194 // result: (MOVDconst [^c]) 6195 for { 6196 v_0 := v.Args[0] 6197 if v_0.Op != OpARM64MOVDconst { 6198 break 6199 } 6200 c := v_0.AuxInt 6201 v.reset(OpARM64MOVDconst) 6202 v.AuxInt = ^c 6203 return true 6204 } 6205 return false 6206 } 6207 func rewriteValueARM64_OpARM64NEG_0(v *Value) bool { 6208 // match: (NEG (MOVDconst [c])) 6209 // cond: 6210 // result: (MOVDconst [-c]) 6211 for { 6212 v_0 := v.Args[0] 6213 if v_0.Op != OpARM64MOVDconst { 6214 break 6215 } 6216 c := v_0.AuxInt 6217 v.reset(OpARM64MOVDconst) 6218 v.AuxInt = -c 6219 return true 6220 } 6221 return false 6222 } 6223 func rewriteValueARM64_OpARM64NotEqual_0(v *Value) bool { 6224 // match: (NotEqual (FlagEQ)) 6225 // cond: 6226 // result: (MOVDconst [0]) 6227 for { 6228 v_0 := v.Args[0] 6229 if v_0.Op != OpARM64FlagEQ { 6230 break 6231 } 6232 v.reset(OpARM64MOVDconst) 6233 v.AuxInt = 0 6234 return true 6235 } 6236 // match: (NotEqual (FlagLT_ULT)) 6237 // cond: 6238 // result: (MOVDconst [1]) 6239 for { 6240 v_0 := v.Args[0] 6241 if v_0.Op != OpARM64FlagLT_ULT { 6242 break 6243 } 6244 v.reset(OpARM64MOVDconst) 6245 v.AuxInt = 1 6246 return true 6247 } 6248 // match: (NotEqual (FlagLT_UGT)) 6249 // cond: 6250 // result: (MOVDconst [1]) 6251 for { 6252 v_0 := v.Args[0] 6253 if v_0.Op != OpARM64FlagLT_UGT { 6254 break 6255 } 6256 v.reset(OpARM64MOVDconst) 6257 v.AuxInt = 1 6258 return true 6259 } 6260 // match: (NotEqual (FlagGT_ULT)) 6261 // cond: 6262 // result: (MOVDconst [1]) 6263 for { 6264 v_0 := v.Args[0] 6265 if v_0.Op != OpARM64FlagGT_ULT { 6266 break 6267 } 6268 v.reset(OpARM64MOVDconst) 6269 v.AuxInt = 1 6270 return true 6271 } 6272 // match: (NotEqual (FlagGT_UGT)) 6273 // cond: 6274 // result: (MOVDconst [1]) 6275 for { 6276 v_0 := v.Args[0] 6277 if v_0.Op != OpARM64FlagGT_UGT { 6278 break 6279 } 6280 v.reset(OpARM64MOVDconst) 6281 v.AuxInt = 1 6282 return true 6283 } 6284 // match: (NotEqual (InvertFlags x)) 6285 // cond: 6286 // result: (NotEqual x) 6287 for { 6288 v_0 := v.Args[0] 6289 if v_0.Op != OpARM64InvertFlags { 6290 break 6291 } 6292 x := v_0.Args[0] 6293 v.reset(OpARM64NotEqual) 6294 v.AddArg(x) 6295 return true 6296 } 6297 return false 6298 } 6299 func rewriteValueARM64_OpARM64OR_0(v *Value) bool { 6300 b := v.Block 6301 _ = b 6302 // match: (OR x (MOVDconst [c])) 6303 // cond: 6304 // result: (ORconst [c] x) 6305 for { 6306 x := v.Args[0] 6307 v_1 := v.Args[1] 6308 if v_1.Op != OpARM64MOVDconst { 6309 break 6310 } 6311 c := v_1.AuxInt 6312 v.reset(OpARM64ORconst) 6313 v.AuxInt = c 6314 v.AddArg(x) 6315 return true 6316 } 6317 // match: (OR (MOVDconst [c]) x) 6318 // cond: 6319 // result: (ORconst [c] x) 6320 for { 6321 v_0 := v.Args[0] 6322 if v_0.Op != OpARM64MOVDconst { 6323 break 6324 } 6325 c := v_0.AuxInt 6326 x := v.Args[1] 6327 v.reset(OpARM64ORconst) 6328 v.AuxInt = c 6329 v.AddArg(x) 6330 return true 6331 } 6332 // match: (OR x x) 6333 // cond: 6334 // result: x 6335 for { 6336 x := v.Args[0] 6337 if x != v.Args[1] { 6338 break 6339 } 6340 v.reset(OpCopy) 6341 v.Type = x.Type 6342 v.AddArg(x) 6343 return true 6344 } 6345 // match: (OR x (SLLconst [c] y)) 6346 // cond: 6347 // result: (ORshiftLL x y [c]) 6348 for { 6349 x := v.Args[0] 6350 v_1 := v.Args[1] 6351 if v_1.Op != OpARM64SLLconst { 6352 break 6353 } 6354 c := v_1.AuxInt 6355 y := v_1.Args[0] 6356 v.reset(OpARM64ORshiftLL) 6357 v.AuxInt = c 6358 v.AddArg(x) 6359 v.AddArg(y) 6360 return true 6361 } 6362 // match: (OR (SLLconst [c] y) x) 6363 // cond: 6364 // result: (ORshiftLL x y [c]) 6365 for { 6366 v_0 := v.Args[0] 6367 if v_0.Op != OpARM64SLLconst { 6368 break 6369 } 6370 c := v_0.AuxInt 6371 y := v_0.Args[0] 6372 x := v.Args[1] 6373 v.reset(OpARM64ORshiftLL) 6374 v.AuxInt = c 6375 v.AddArg(x) 6376 v.AddArg(y) 6377 return true 6378 } 6379 // match: (OR x (SRLconst [c] y)) 6380 // cond: 6381 // result: (ORshiftRL x y [c]) 6382 for { 6383 x := v.Args[0] 6384 v_1 := v.Args[1] 6385 if v_1.Op != OpARM64SRLconst { 6386 break 6387 } 6388 c := v_1.AuxInt 6389 y := v_1.Args[0] 6390 v.reset(OpARM64ORshiftRL) 6391 v.AuxInt = c 6392 v.AddArg(x) 6393 v.AddArg(y) 6394 return true 6395 } 6396 // match: (OR (SRLconst [c] y) x) 6397 // cond: 6398 // result: (ORshiftRL x y [c]) 6399 for { 6400 v_0 := v.Args[0] 6401 if v_0.Op != OpARM64SRLconst { 6402 break 6403 } 6404 c := v_0.AuxInt 6405 y := v_0.Args[0] 6406 x := v.Args[1] 6407 v.reset(OpARM64ORshiftRL) 6408 v.AuxInt = c 6409 v.AddArg(x) 6410 v.AddArg(y) 6411 return true 6412 } 6413 // match: (OR x (SRAconst [c] y)) 6414 // cond: 6415 // result: (ORshiftRA x y [c]) 6416 for { 6417 x := v.Args[0] 6418 v_1 := v.Args[1] 6419 if v_1.Op != OpARM64SRAconst { 6420 break 6421 } 6422 c := v_1.AuxInt 6423 y := v_1.Args[0] 6424 v.reset(OpARM64ORshiftRA) 6425 v.AuxInt = c 6426 v.AddArg(x) 6427 v.AddArg(y) 6428 return true 6429 } 6430 // match: (OR (SRAconst [c] y) x) 6431 // cond: 6432 // result: (ORshiftRA x y [c]) 6433 for { 6434 v_0 := v.Args[0] 6435 if v_0.Op != OpARM64SRAconst { 6436 break 6437 } 6438 c := v_0.AuxInt 6439 y := v_0.Args[0] 6440 x := v.Args[1] 6441 v.reset(OpARM64ORshiftRA) 6442 v.AuxInt = c 6443 v.AddArg(x) 6444 v.AddArg(y) 6445 return true 6446 } 6447 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i1] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i0] {s} p mem))) 6448 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 6449 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 6450 for { 6451 t := v.Type 6452 o0 := v.Args[0] 6453 if o0.Op != OpARM64ORshiftLL { 6454 break 6455 } 6456 if o0.AuxInt != 8 { 6457 break 6458 } 6459 o1 := o0.Args[0] 6460 if o1.Op != OpARM64ORshiftLL { 6461 break 6462 } 6463 if o1.AuxInt != 16 { 6464 break 6465 } 6466 s0 := o1.Args[0] 6467 if s0.Op != OpARM64SLLconst { 6468 break 6469 } 6470 if s0.AuxInt != 24 { 6471 break 6472 } 6473 y0 := s0.Args[0] 6474 if y0.Op != OpARM64MOVDnop { 6475 break 6476 } 6477 x0 := y0.Args[0] 6478 if x0.Op != OpARM64MOVBUload { 6479 break 6480 } 6481 i3 := x0.AuxInt 6482 s := x0.Aux 6483 p := x0.Args[0] 6484 mem := x0.Args[1] 6485 y1 := o1.Args[1] 6486 if y1.Op != OpARM64MOVDnop { 6487 break 6488 } 6489 x1 := y1.Args[0] 6490 if x1.Op != OpARM64MOVBUload { 6491 break 6492 } 6493 i2 := x1.AuxInt 6494 if x1.Aux != s { 6495 break 6496 } 6497 if p != x1.Args[0] { 6498 break 6499 } 6500 if mem != x1.Args[1] { 6501 break 6502 } 6503 y2 := o0.Args[1] 6504 if y2.Op != OpARM64MOVDnop { 6505 break 6506 } 6507 x2 := y2.Args[0] 6508 if x2.Op != OpARM64MOVBUload { 6509 break 6510 } 6511 i1 := x2.AuxInt 6512 if x2.Aux != s { 6513 break 6514 } 6515 if p != x2.Args[0] { 6516 break 6517 } 6518 if mem != x2.Args[1] { 6519 break 6520 } 6521 y3 := v.Args[1] 6522 if y3.Op != OpARM64MOVDnop { 6523 break 6524 } 6525 x3 := y3.Args[0] 6526 if x3.Op != OpARM64MOVBUload { 6527 break 6528 } 6529 i0 := x3.AuxInt 6530 if x3.Aux != s { 6531 break 6532 } 6533 if p != x3.Args[0] { 6534 break 6535 } 6536 if mem != x3.Args[1] { 6537 break 6538 } 6539 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 6540 break 6541 } 6542 b = mergePoint(b, x0, x1, x2, x3) 6543 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 6544 v.reset(OpCopy) 6545 v.AddArg(v0) 6546 v0.Aux = s 6547 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 6548 v1.AuxInt = i0 6549 v1.AddArg(p) 6550 v0.AddArg(v1) 6551 v0.AddArg(mem) 6552 return true 6553 } 6554 return false 6555 } 6556 func rewriteValueARM64_OpARM64OR_10(v *Value) bool { 6557 b := v.Block 6558 _ = b 6559 // match: (OR <t> y3:(MOVDnop x3:(MOVBUload [i0] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i1] {s} p mem)))) 6560 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 6561 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 6562 for { 6563 t := v.Type 6564 y3 := v.Args[0] 6565 if y3.Op != OpARM64MOVDnop { 6566 break 6567 } 6568 x3 := y3.Args[0] 6569 if x3.Op != OpARM64MOVBUload { 6570 break 6571 } 6572 i0 := x3.AuxInt 6573 s := x3.Aux 6574 p := x3.Args[0] 6575 mem := x3.Args[1] 6576 o0 := v.Args[1] 6577 if o0.Op != OpARM64ORshiftLL { 6578 break 6579 } 6580 if o0.AuxInt != 8 { 6581 break 6582 } 6583 o1 := o0.Args[0] 6584 if o1.Op != OpARM64ORshiftLL { 6585 break 6586 } 6587 if o1.AuxInt != 16 { 6588 break 6589 } 6590 s0 := o1.Args[0] 6591 if s0.Op != OpARM64SLLconst { 6592 break 6593 } 6594 if s0.AuxInt != 24 { 6595 break 6596 } 6597 y0 := s0.Args[0] 6598 if y0.Op != OpARM64MOVDnop { 6599 break 6600 } 6601 x0 := y0.Args[0] 6602 if x0.Op != OpARM64MOVBUload { 6603 break 6604 } 6605 i3 := x0.AuxInt 6606 if x0.Aux != s { 6607 break 6608 } 6609 if p != x0.Args[0] { 6610 break 6611 } 6612 if mem != x0.Args[1] { 6613 break 6614 } 6615 y1 := o1.Args[1] 6616 if y1.Op != OpARM64MOVDnop { 6617 break 6618 } 6619 x1 := y1.Args[0] 6620 if x1.Op != OpARM64MOVBUload { 6621 break 6622 } 6623 i2 := x1.AuxInt 6624 if x1.Aux != s { 6625 break 6626 } 6627 if p != x1.Args[0] { 6628 break 6629 } 6630 if mem != x1.Args[1] { 6631 break 6632 } 6633 y2 := o0.Args[1] 6634 if y2.Op != OpARM64MOVDnop { 6635 break 6636 } 6637 x2 := y2.Args[0] 6638 if x2.Op != OpARM64MOVBUload { 6639 break 6640 } 6641 i1 := x2.AuxInt 6642 if x2.Aux != s { 6643 break 6644 } 6645 if p != x2.Args[0] { 6646 break 6647 } 6648 if mem != x2.Args[1] { 6649 break 6650 } 6651 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 6652 break 6653 } 6654 b = mergePoint(b, x0, x1, x2, x3) 6655 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 6656 v.reset(OpCopy) 6657 v.AddArg(v0) 6658 v0.Aux = s 6659 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 6660 v1.AuxInt = i0 6661 v1.AddArg(p) 6662 v0.AddArg(v1) 6663 v0.AddArg(mem) 6664 return true 6665 } 6666 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem))) 6667 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 6668 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 6669 for { 6670 t := v.Type 6671 o0 := v.Args[0] 6672 if o0.Op != OpARM64ORshiftLL { 6673 break 6674 } 6675 if o0.AuxInt != 8 { 6676 break 6677 } 6678 o1 := o0.Args[0] 6679 if o1.Op != OpARM64ORshiftLL { 6680 break 6681 } 6682 if o1.AuxInt != 16 { 6683 break 6684 } 6685 o2 := o1.Args[0] 6686 if o2.Op != OpARM64ORshiftLL { 6687 break 6688 } 6689 if o2.AuxInt != 24 { 6690 break 6691 } 6692 o3 := o2.Args[0] 6693 if o3.Op != OpARM64ORshiftLL { 6694 break 6695 } 6696 if o3.AuxInt != 32 { 6697 break 6698 } 6699 o4 := o3.Args[0] 6700 if o4.Op != OpARM64ORshiftLL { 6701 break 6702 } 6703 if o4.AuxInt != 40 { 6704 break 6705 } 6706 o5 := o4.Args[0] 6707 if o5.Op != OpARM64ORshiftLL { 6708 break 6709 } 6710 if o5.AuxInt != 48 { 6711 break 6712 } 6713 s0 := o5.Args[0] 6714 if s0.Op != OpARM64SLLconst { 6715 break 6716 } 6717 if s0.AuxInt != 56 { 6718 break 6719 } 6720 y0 := s0.Args[0] 6721 if y0.Op != OpARM64MOVDnop { 6722 break 6723 } 6724 x0 := y0.Args[0] 6725 if x0.Op != OpARM64MOVBUload { 6726 break 6727 } 6728 i7 := x0.AuxInt 6729 s := x0.Aux 6730 p := x0.Args[0] 6731 mem := x0.Args[1] 6732 y1 := o5.Args[1] 6733 if y1.Op != OpARM64MOVDnop { 6734 break 6735 } 6736 x1 := y1.Args[0] 6737 if x1.Op != OpARM64MOVBUload { 6738 break 6739 } 6740 i6 := x1.AuxInt 6741 if x1.Aux != s { 6742 break 6743 } 6744 if p != x1.Args[0] { 6745 break 6746 } 6747 if mem != x1.Args[1] { 6748 break 6749 } 6750 y2 := o4.Args[1] 6751 if y2.Op != OpARM64MOVDnop { 6752 break 6753 } 6754 x2 := y2.Args[0] 6755 if x2.Op != OpARM64MOVBUload { 6756 break 6757 } 6758 i5 := x2.AuxInt 6759 if x2.Aux != s { 6760 break 6761 } 6762 if p != x2.Args[0] { 6763 break 6764 } 6765 if mem != x2.Args[1] { 6766 break 6767 } 6768 y3 := o3.Args[1] 6769 if y3.Op != OpARM64MOVDnop { 6770 break 6771 } 6772 x3 := y3.Args[0] 6773 if x3.Op != OpARM64MOVBUload { 6774 break 6775 } 6776 i4 := x3.AuxInt 6777 if x3.Aux != s { 6778 break 6779 } 6780 if p != x3.Args[0] { 6781 break 6782 } 6783 if mem != x3.Args[1] { 6784 break 6785 } 6786 y4 := o2.Args[1] 6787 if y4.Op != OpARM64MOVDnop { 6788 break 6789 } 6790 x4 := y4.Args[0] 6791 if x4.Op != OpARM64MOVBUload { 6792 break 6793 } 6794 i3 := x4.AuxInt 6795 if x4.Aux != s { 6796 break 6797 } 6798 if p != x4.Args[0] { 6799 break 6800 } 6801 if mem != x4.Args[1] { 6802 break 6803 } 6804 y5 := o1.Args[1] 6805 if y5.Op != OpARM64MOVDnop { 6806 break 6807 } 6808 x5 := y5.Args[0] 6809 if x5.Op != OpARM64MOVBUload { 6810 break 6811 } 6812 i2 := x5.AuxInt 6813 if x5.Aux != s { 6814 break 6815 } 6816 if p != x5.Args[0] { 6817 break 6818 } 6819 if mem != x5.Args[1] { 6820 break 6821 } 6822 y6 := o0.Args[1] 6823 if y6.Op != OpARM64MOVDnop { 6824 break 6825 } 6826 x6 := y6.Args[0] 6827 if x6.Op != OpARM64MOVBUload { 6828 break 6829 } 6830 i1 := x6.AuxInt 6831 if x6.Aux != s { 6832 break 6833 } 6834 if p != x6.Args[0] { 6835 break 6836 } 6837 if mem != x6.Args[1] { 6838 break 6839 } 6840 y7 := v.Args[1] 6841 if y7.Op != OpARM64MOVDnop { 6842 break 6843 } 6844 x7 := y7.Args[0] 6845 if x7.Op != OpARM64MOVBUload { 6846 break 6847 } 6848 i0 := x7.AuxInt 6849 if x7.Aux != s { 6850 break 6851 } 6852 if p != x7.Args[0] { 6853 break 6854 } 6855 if mem != x7.Args[1] { 6856 break 6857 } 6858 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 6859 break 6860 } 6861 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 6862 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 6863 v.reset(OpCopy) 6864 v.AddArg(v0) 6865 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 6866 v1.Aux = s 6867 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 6868 v2.AuxInt = i0 6869 v2.AddArg(p) 6870 v1.AddArg(v2) 6871 v1.AddArg(mem) 6872 v0.AddArg(v1) 6873 return true 6874 } 6875 // match: (OR <t> y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem)))) 6876 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 6877 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 6878 for { 6879 t := v.Type 6880 y7 := v.Args[0] 6881 if y7.Op != OpARM64MOVDnop { 6882 break 6883 } 6884 x7 := y7.Args[0] 6885 if x7.Op != OpARM64MOVBUload { 6886 break 6887 } 6888 i0 := x7.AuxInt 6889 s := x7.Aux 6890 p := x7.Args[0] 6891 mem := x7.Args[1] 6892 o0 := v.Args[1] 6893 if o0.Op != OpARM64ORshiftLL { 6894 break 6895 } 6896 if o0.AuxInt != 8 { 6897 break 6898 } 6899 o1 := o0.Args[0] 6900 if o1.Op != OpARM64ORshiftLL { 6901 break 6902 } 6903 if o1.AuxInt != 16 { 6904 break 6905 } 6906 o2 := o1.Args[0] 6907 if o2.Op != OpARM64ORshiftLL { 6908 break 6909 } 6910 if o2.AuxInt != 24 { 6911 break 6912 } 6913 o3 := o2.Args[0] 6914 if o3.Op != OpARM64ORshiftLL { 6915 break 6916 } 6917 if o3.AuxInt != 32 { 6918 break 6919 } 6920 o4 := o3.Args[0] 6921 if o4.Op != OpARM64ORshiftLL { 6922 break 6923 } 6924 if o4.AuxInt != 40 { 6925 break 6926 } 6927 o5 := o4.Args[0] 6928 if o5.Op != OpARM64ORshiftLL { 6929 break 6930 } 6931 if o5.AuxInt != 48 { 6932 break 6933 } 6934 s0 := o5.Args[0] 6935 if s0.Op != OpARM64SLLconst { 6936 break 6937 } 6938 if s0.AuxInt != 56 { 6939 break 6940 } 6941 y0 := s0.Args[0] 6942 if y0.Op != OpARM64MOVDnop { 6943 break 6944 } 6945 x0 := y0.Args[0] 6946 if x0.Op != OpARM64MOVBUload { 6947 break 6948 } 6949 i7 := x0.AuxInt 6950 if x0.Aux != s { 6951 break 6952 } 6953 if p != x0.Args[0] { 6954 break 6955 } 6956 if mem != x0.Args[1] { 6957 break 6958 } 6959 y1 := o5.Args[1] 6960 if y1.Op != OpARM64MOVDnop { 6961 break 6962 } 6963 x1 := y1.Args[0] 6964 if x1.Op != OpARM64MOVBUload { 6965 break 6966 } 6967 i6 := x1.AuxInt 6968 if x1.Aux != s { 6969 break 6970 } 6971 if p != x1.Args[0] { 6972 break 6973 } 6974 if mem != x1.Args[1] { 6975 break 6976 } 6977 y2 := o4.Args[1] 6978 if y2.Op != OpARM64MOVDnop { 6979 break 6980 } 6981 x2 := y2.Args[0] 6982 if x2.Op != OpARM64MOVBUload { 6983 break 6984 } 6985 i5 := x2.AuxInt 6986 if x2.Aux != s { 6987 break 6988 } 6989 if p != x2.Args[0] { 6990 break 6991 } 6992 if mem != x2.Args[1] { 6993 break 6994 } 6995 y3 := o3.Args[1] 6996 if y3.Op != OpARM64MOVDnop { 6997 break 6998 } 6999 x3 := y3.Args[0] 7000 if x3.Op != OpARM64MOVBUload { 7001 break 7002 } 7003 i4 := x3.AuxInt 7004 if x3.Aux != s { 7005 break 7006 } 7007 if p != x3.Args[0] { 7008 break 7009 } 7010 if mem != x3.Args[1] { 7011 break 7012 } 7013 y4 := o2.Args[1] 7014 if y4.Op != OpARM64MOVDnop { 7015 break 7016 } 7017 x4 := y4.Args[0] 7018 if x4.Op != OpARM64MOVBUload { 7019 break 7020 } 7021 i3 := x4.AuxInt 7022 if x4.Aux != s { 7023 break 7024 } 7025 if p != x4.Args[0] { 7026 break 7027 } 7028 if mem != x4.Args[1] { 7029 break 7030 } 7031 y5 := o1.Args[1] 7032 if y5.Op != OpARM64MOVDnop { 7033 break 7034 } 7035 x5 := y5.Args[0] 7036 if x5.Op != OpARM64MOVBUload { 7037 break 7038 } 7039 i2 := x5.AuxInt 7040 if x5.Aux != s { 7041 break 7042 } 7043 if p != x5.Args[0] { 7044 break 7045 } 7046 if mem != x5.Args[1] { 7047 break 7048 } 7049 y6 := o0.Args[1] 7050 if y6.Op != OpARM64MOVDnop { 7051 break 7052 } 7053 x6 := y6.Args[0] 7054 if x6.Op != OpARM64MOVBUload { 7055 break 7056 } 7057 i1 := x6.AuxInt 7058 if x6.Aux != s { 7059 break 7060 } 7061 if p != x6.Args[0] { 7062 break 7063 } 7064 if mem != x6.Args[1] { 7065 break 7066 } 7067 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 7068 break 7069 } 7070 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 7071 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 7072 v.reset(OpCopy) 7073 v.AddArg(v0) 7074 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 7075 v1.Aux = s 7076 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7077 v2.AuxInt = i0 7078 v2.AddArg(p) 7079 v1.AddArg(v2) 7080 v1.AddArg(mem) 7081 v0.AddArg(v1) 7082 return true 7083 } 7084 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) 7085 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 7086 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 7087 for { 7088 t := v.Type 7089 o0 := v.Args[0] 7090 if o0.Op != OpARM64ORshiftLL { 7091 break 7092 } 7093 if o0.AuxInt != 8 { 7094 break 7095 } 7096 o1 := o0.Args[0] 7097 if o1.Op != OpARM64ORshiftLL { 7098 break 7099 } 7100 if o1.AuxInt != 16 { 7101 break 7102 } 7103 s0 := o1.Args[0] 7104 if s0.Op != OpARM64SLLconst { 7105 break 7106 } 7107 if s0.AuxInt != 24 { 7108 break 7109 } 7110 y0 := s0.Args[0] 7111 if y0.Op != OpARM64MOVDnop { 7112 break 7113 } 7114 x0 := y0.Args[0] 7115 if x0.Op != OpARM64MOVBUload { 7116 break 7117 } 7118 i0 := x0.AuxInt 7119 s := x0.Aux 7120 p := x0.Args[0] 7121 mem := x0.Args[1] 7122 y1 := o1.Args[1] 7123 if y1.Op != OpARM64MOVDnop { 7124 break 7125 } 7126 x1 := y1.Args[0] 7127 if x1.Op != OpARM64MOVBUload { 7128 break 7129 } 7130 i1 := x1.AuxInt 7131 if x1.Aux != s { 7132 break 7133 } 7134 if p != x1.Args[0] { 7135 break 7136 } 7137 if mem != x1.Args[1] { 7138 break 7139 } 7140 y2 := o0.Args[1] 7141 if y2.Op != OpARM64MOVDnop { 7142 break 7143 } 7144 x2 := y2.Args[0] 7145 if x2.Op != OpARM64MOVBUload { 7146 break 7147 } 7148 i2 := x2.AuxInt 7149 if x2.Aux != s { 7150 break 7151 } 7152 if p != x2.Args[0] { 7153 break 7154 } 7155 if mem != x2.Args[1] { 7156 break 7157 } 7158 y3 := v.Args[1] 7159 if y3.Op != OpARM64MOVDnop { 7160 break 7161 } 7162 x3 := y3.Args[0] 7163 if x3.Op != OpARM64MOVBUload { 7164 break 7165 } 7166 i3 := x3.AuxInt 7167 if x3.Aux != s { 7168 break 7169 } 7170 if p != x3.Args[0] { 7171 break 7172 } 7173 if mem != x3.Args[1] { 7174 break 7175 } 7176 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 7177 break 7178 } 7179 b = mergePoint(b, x0, x1, x2, x3) 7180 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 7181 v.reset(OpCopy) 7182 v.AddArg(v0) 7183 v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 7184 v1.Aux = s 7185 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7186 v2.AuxInt = i0 7187 v2.AddArg(p) 7188 v1.AddArg(v2) 7189 v1.AddArg(mem) 7190 v0.AddArg(v1) 7191 return true 7192 } 7193 // match: (OR <t> y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem)))) 7194 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 7195 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 7196 for { 7197 t := v.Type 7198 y3 := v.Args[0] 7199 if y3.Op != OpARM64MOVDnop { 7200 break 7201 } 7202 x3 := y3.Args[0] 7203 if x3.Op != OpARM64MOVBUload { 7204 break 7205 } 7206 i3 := x3.AuxInt 7207 s := x3.Aux 7208 p := x3.Args[0] 7209 mem := x3.Args[1] 7210 o0 := v.Args[1] 7211 if o0.Op != OpARM64ORshiftLL { 7212 break 7213 } 7214 if o0.AuxInt != 8 { 7215 break 7216 } 7217 o1 := o0.Args[0] 7218 if o1.Op != OpARM64ORshiftLL { 7219 break 7220 } 7221 if o1.AuxInt != 16 { 7222 break 7223 } 7224 s0 := o1.Args[0] 7225 if s0.Op != OpARM64SLLconst { 7226 break 7227 } 7228 if s0.AuxInt != 24 { 7229 break 7230 } 7231 y0 := s0.Args[0] 7232 if y0.Op != OpARM64MOVDnop { 7233 break 7234 } 7235 x0 := y0.Args[0] 7236 if x0.Op != OpARM64MOVBUload { 7237 break 7238 } 7239 i0 := x0.AuxInt 7240 if x0.Aux != s { 7241 break 7242 } 7243 if p != x0.Args[0] { 7244 break 7245 } 7246 if mem != x0.Args[1] { 7247 break 7248 } 7249 y1 := o1.Args[1] 7250 if y1.Op != OpARM64MOVDnop { 7251 break 7252 } 7253 x1 := y1.Args[0] 7254 if x1.Op != OpARM64MOVBUload { 7255 break 7256 } 7257 i1 := x1.AuxInt 7258 if x1.Aux != s { 7259 break 7260 } 7261 if p != x1.Args[0] { 7262 break 7263 } 7264 if mem != x1.Args[1] { 7265 break 7266 } 7267 y2 := o0.Args[1] 7268 if y2.Op != OpARM64MOVDnop { 7269 break 7270 } 7271 x2 := y2.Args[0] 7272 if x2.Op != OpARM64MOVBUload { 7273 break 7274 } 7275 i2 := x2.AuxInt 7276 if x2.Aux != s { 7277 break 7278 } 7279 if p != x2.Args[0] { 7280 break 7281 } 7282 if mem != x2.Args[1] { 7283 break 7284 } 7285 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 7286 break 7287 } 7288 b = mergePoint(b, x0, x1, x2, x3) 7289 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 7290 v.reset(OpCopy) 7291 v.AddArg(v0) 7292 v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 7293 v1.Aux = s 7294 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7295 v2.AuxInt = i0 7296 v2.AddArg(p) 7297 v1.AddArg(v2) 7298 v1.AddArg(mem) 7299 v0.AddArg(v1) 7300 return true 7301 } 7302 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i6] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i7] {s} p mem))) 7303 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 7304 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 7305 for { 7306 t := v.Type 7307 o0 := v.Args[0] 7308 if o0.Op != OpARM64ORshiftLL { 7309 break 7310 } 7311 if o0.AuxInt != 8 { 7312 break 7313 } 7314 o1 := o0.Args[0] 7315 if o1.Op != OpARM64ORshiftLL { 7316 break 7317 } 7318 if o1.AuxInt != 16 { 7319 break 7320 } 7321 o2 := o1.Args[0] 7322 if o2.Op != OpARM64ORshiftLL { 7323 break 7324 } 7325 if o2.AuxInt != 24 { 7326 break 7327 } 7328 o3 := o2.Args[0] 7329 if o3.Op != OpARM64ORshiftLL { 7330 break 7331 } 7332 if o3.AuxInt != 32 { 7333 break 7334 } 7335 o4 := o3.Args[0] 7336 if o4.Op != OpARM64ORshiftLL { 7337 break 7338 } 7339 if o4.AuxInt != 40 { 7340 break 7341 } 7342 o5 := o4.Args[0] 7343 if o5.Op != OpARM64ORshiftLL { 7344 break 7345 } 7346 if o5.AuxInt != 48 { 7347 break 7348 } 7349 s0 := o5.Args[0] 7350 if s0.Op != OpARM64SLLconst { 7351 break 7352 } 7353 if s0.AuxInt != 56 { 7354 break 7355 } 7356 y0 := s0.Args[0] 7357 if y0.Op != OpARM64MOVDnop { 7358 break 7359 } 7360 x0 := y0.Args[0] 7361 if x0.Op != OpARM64MOVBUload { 7362 break 7363 } 7364 i0 := x0.AuxInt 7365 s := x0.Aux 7366 p := x0.Args[0] 7367 mem := x0.Args[1] 7368 y1 := o5.Args[1] 7369 if y1.Op != OpARM64MOVDnop { 7370 break 7371 } 7372 x1 := y1.Args[0] 7373 if x1.Op != OpARM64MOVBUload { 7374 break 7375 } 7376 i1 := x1.AuxInt 7377 if x1.Aux != s { 7378 break 7379 } 7380 if p != x1.Args[0] { 7381 break 7382 } 7383 if mem != x1.Args[1] { 7384 break 7385 } 7386 y2 := o4.Args[1] 7387 if y2.Op != OpARM64MOVDnop { 7388 break 7389 } 7390 x2 := y2.Args[0] 7391 if x2.Op != OpARM64MOVBUload { 7392 break 7393 } 7394 i2 := x2.AuxInt 7395 if x2.Aux != s { 7396 break 7397 } 7398 if p != x2.Args[0] { 7399 break 7400 } 7401 if mem != x2.Args[1] { 7402 break 7403 } 7404 y3 := o3.Args[1] 7405 if y3.Op != OpARM64MOVDnop { 7406 break 7407 } 7408 x3 := y3.Args[0] 7409 if x3.Op != OpARM64MOVBUload { 7410 break 7411 } 7412 i3 := x3.AuxInt 7413 if x3.Aux != s { 7414 break 7415 } 7416 if p != x3.Args[0] { 7417 break 7418 } 7419 if mem != x3.Args[1] { 7420 break 7421 } 7422 y4 := o2.Args[1] 7423 if y4.Op != OpARM64MOVDnop { 7424 break 7425 } 7426 x4 := y4.Args[0] 7427 if x4.Op != OpARM64MOVBUload { 7428 break 7429 } 7430 i4 := x4.AuxInt 7431 if x4.Aux != s { 7432 break 7433 } 7434 if p != x4.Args[0] { 7435 break 7436 } 7437 if mem != x4.Args[1] { 7438 break 7439 } 7440 y5 := o1.Args[1] 7441 if y5.Op != OpARM64MOVDnop { 7442 break 7443 } 7444 x5 := y5.Args[0] 7445 if x5.Op != OpARM64MOVBUload { 7446 break 7447 } 7448 i5 := x5.AuxInt 7449 if x5.Aux != s { 7450 break 7451 } 7452 if p != x5.Args[0] { 7453 break 7454 } 7455 if mem != x5.Args[1] { 7456 break 7457 } 7458 y6 := o0.Args[1] 7459 if y6.Op != OpARM64MOVDnop { 7460 break 7461 } 7462 x6 := y6.Args[0] 7463 if x6.Op != OpARM64MOVBUload { 7464 break 7465 } 7466 i6 := x6.AuxInt 7467 if x6.Aux != s { 7468 break 7469 } 7470 if p != x6.Args[0] { 7471 break 7472 } 7473 if mem != x6.Args[1] { 7474 break 7475 } 7476 y7 := v.Args[1] 7477 if y7.Op != OpARM64MOVDnop { 7478 break 7479 } 7480 x7 := y7.Args[0] 7481 if x7.Op != OpARM64MOVBUload { 7482 break 7483 } 7484 i7 := x7.AuxInt 7485 if x7.Aux != s { 7486 break 7487 } 7488 if p != x7.Args[0] { 7489 break 7490 } 7491 if mem != x7.Args[1] { 7492 break 7493 } 7494 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 7495 break 7496 } 7497 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 7498 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 7499 v.reset(OpCopy) 7500 v.AddArg(v0) 7501 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 7502 v1.Aux = s 7503 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7504 v2.AuxInt = i0 7505 v2.AddArg(p) 7506 v1.AddArg(v2) 7507 v1.AddArg(mem) 7508 v0.AddArg(v1) 7509 return true 7510 } 7511 // match: (OR <t> y7:(MOVDnop x7:(MOVBUload [i7] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i6] {s} p mem)))) 7512 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 7513 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 7514 for { 7515 t := v.Type 7516 y7 := v.Args[0] 7517 if y7.Op != OpARM64MOVDnop { 7518 break 7519 } 7520 x7 := y7.Args[0] 7521 if x7.Op != OpARM64MOVBUload { 7522 break 7523 } 7524 i7 := x7.AuxInt 7525 s := x7.Aux 7526 p := x7.Args[0] 7527 mem := x7.Args[1] 7528 o0 := v.Args[1] 7529 if o0.Op != OpARM64ORshiftLL { 7530 break 7531 } 7532 if o0.AuxInt != 8 { 7533 break 7534 } 7535 o1 := o0.Args[0] 7536 if o1.Op != OpARM64ORshiftLL { 7537 break 7538 } 7539 if o1.AuxInt != 16 { 7540 break 7541 } 7542 o2 := o1.Args[0] 7543 if o2.Op != OpARM64ORshiftLL { 7544 break 7545 } 7546 if o2.AuxInt != 24 { 7547 break 7548 } 7549 o3 := o2.Args[0] 7550 if o3.Op != OpARM64ORshiftLL { 7551 break 7552 } 7553 if o3.AuxInt != 32 { 7554 break 7555 } 7556 o4 := o3.Args[0] 7557 if o4.Op != OpARM64ORshiftLL { 7558 break 7559 } 7560 if o4.AuxInt != 40 { 7561 break 7562 } 7563 o5 := o4.Args[0] 7564 if o5.Op != OpARM64ORshiftLL { 7565 break 7566 } 7567 if o5.AuxInt != 48 { 7568 break 7569 } 7570 s0 := o5.Args[0] 7571 if s0.Op != OpARM64SLLconst { 7572 break 7573 } 7574 if s0.AuxInt != 56 { 7575 break 7576 } 7577 y0 := s0.Args[0] 7578 if y0.Op != OpARM64MOVDnop { 7579 break 7580 } 7581 x0 := y0.Args[0] 7582 if x0.Op != OpARM64MOVBUload { 7583 break 7584 } 7585 i0 := x0.AuxInt 7586 if x0.Aux != s { 7587 break 7588 } 7589 if p != x0.Args[0] { 7590 break 7591 } 7592 if mem != x0.Args[1] { 7593 break 7594 } 7595 y1 := o5.Args[1] 7596 if y1.Op != OpARM64MOVDnop { 7597 break 7598 } 7599 x1 := y1.Args[0] 7600 if x1.Op != OpARM64MOVBUload { 7601 break 7602 } 7603 i1 := x1.AuxInt 7604 if x1.Aux != s { 7605 break 7606 } 7607 if p != x1.Args[0] { 7608 break 7609 } 7610 if mem != x1.Args[1] { 7611 break 7612 } 7613 y2 := o4.Args[1] 7614 if y2.Op != OpARM64MOVDnop { 7615 break 7616 } 7617 x2 := y2.Args[0] 7618 if x2.Op != OpARM64MOVBUload { 7619 break 7620 } 7621 i2 := x2.AuxInt 7622 if x2.Aux != s { 7623 break 7624 } 7625 if p != x2.Args[0] { 7626 break 7627 } 7628 if mem != x2.Args[1] { 7629 break 7630 } 7631 y3 := o3.Args[1] 7632 if y3.Op != OpARM64MOVDnop { 7633 break 7634 } 7635 x3 := y3.Args[0] 7636 if x3.Op != OpARM64MOVBUload { 7637 break 7638 } 7639 i3 := x3.AuxInt 7640 if x3.Aux != s { 7641 break 7642 } 7643 if p != x3.Args[0] { 7644 break 7645 } 7646 if mem != x3.Args[1] { 7647 break 7648 } 7649 y4 := o2.Args[1] 7650 if y4.Op != OpARM64MOVDnop { 7651 break 7652 } 7653 x4 := y4.Args[0] 7654 if x4.Op != OpARM64MOVBUload { 7655 break 7656 } 7657 i4 := x4.AuxInt 7658 if x4.Aux != s { 7659 break 7660 } 7661 if p != x4.Args[0] { 7662 break 7663 } 7664 if mem != x4.Args[1] { 7665 break 7666 } 7667 y5 := o1.Args[1] 7668 if y5.Op != OpARM64MOVDnop { 7669 break 7670 } 7671 x5 := y5.Args[0] 7672 if x5.Op != OpARM64MOVBUload { 7673 break 7674 } 7675 i5 := x5.AuxInt 7676 if x5.Aux != s { 7677 break 7678 } 7679 if p != x5.Args[0] { 7680 break 7681 } 7682 if mem != x5.Args[1] { 7683 break 7684 } 7685 y6 := o0.Args[1] 7686 if y6.Op != OpARM64MOVDnop { 7687 break 7688 } 7689 x6 := y6.Args[0] 7690 if x6.Op != OpARM64MOVBUload { 7691 break 7692 } 7693 i6 := x6.AuxInt 7694 if x6.Aux != s { 7695 break 7696 } 7697 if p != x6.Args[0] { 7698 break 7699 } 7700 if mem != x6.Args[1] { 7701 break 7702 } 7703 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 7704 break 7705 } 7706 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 7707 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 7708 v.reset(OpCopy) 7709 v.AddArg(v0) 7710 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 7711 v1.Aux = s 7712 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7713 v2.AuxInt = i0 7714 v2.AddArg(p) 7715 v1.AddArg(v2) 7716 v1.AddArg(mem) 7717 v0.AddArg(v1) 7718 return true 7719 } 7720 return false 7721 } 7722 func rewriteValueARM64_OpARM64ORconst_0(v *Value) bool { 7723 // match: (ORconst [0] x) 7724 // cond: 7725 // result: x 7726 for { 7727 if v.AuxInt != 0 { 7728 break 7729 } 7730 x := v.Args[0] 7731 v.reset(OpCopy) 7732 v.Type = x.Type 7733 v.AddArg(x) 7734 return true 7735 } 7736 // match: (ORconst [-1] _) 7737 // cond: 7738 // result: (MOVDconst [-1]) 7739 for { 7740 if v.AuxInt != -1 { 7741 break 7742 } 7743 v.reset(OpARM64MOVDconst) 7744 v.AuxInt = -1 7745 return true 7746 } 7747 // match: (ORconst [c] (MOVDconst [d])) 7748 // cond: 7749 // result: (MOVDconst [c|d]) 7750 for { 7751 c := v.AuxInt 7752 v_0 := v.Args[0] 7753 if v_0.Op != OpARM64MOVDconst { 7754 break 7755 } 7756 d := v_0.AuxInt 7757 v.reset(OpARM64MOVDconst) 7758 v.AuxInt = c | d 7759 return true 7760 } 7761 // match: (ORconst [c] (ORconst [d] x)) 7762 // cond: 7763 // result: (ORconst [c|d] x) 7764 for { 7765 c := v.AuxInt 7766 v_0 := v.Args[0] 7767 if v_0.Op != OpARM64ORconst { 7768 break 7769 } 7770 d := v_0.AuxInt 7771 x := v_0.Args[0] 7772 v.reset(OpARM64ORconst) 7773 v.AuxInt = c | d 7774 v.AddArg(x) 7775 return true 7776 } 7777 return false 7778 } 7779 func rewriteValueARM64_OpARM64ORshiftLL_0(v *Value) bool { 7780 b := v.Block 7781 _ = b 7782 // match: (ORshiftLL (MOVDconst [c]) x [d]) 7783 // cond: 7784 // result: (ORconst [c] (SLLconst <x.Type> x [d])) 7785 for { 7786 d := v.AuxInt 7787 v_0 := v.Args[0] 7788 if v_0.Op != OpARM64MOVDconst { 7789 break 7790 } 7791 c := v_0.AuxInt 7792 x := v.Args[1] 7793 v.reset(OpARM64ORconst) 7794 v.AuxInt = c 7795 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7796 v0.AuxInt = d 7797 v0.AddArg(x) 7798 v.AddArg(v0) 7799 return true 7800 } 7801 // match: (ORshiftLL x (MOVDconst [c]) [d]) 7802 // cond: 7803 // result: (ORconst x [int64(uint64(c)<<uint64(d))]) 7804 for { 7805 d := v.AuxInt 7806 x := v.Args[0] 7807 v_1 := v.Args[1] 7808 if v_1.Op != OpARM64MOVDconst { 7809 break 7810 } 7811 c := v_1.AuxInt 7812 v.reset(OpARM64ORconst) 7813 v.AuxInt = int64(uint64(c) << uint64(d)) 7814 v.AddArg(x) 7815 return true 7816 } 7817 // match: (ORshiftLL x y:(SLLconst x [c]) [d]) 7818 // cond: c==d 7819 // result: y 7820 for { 7821 d := v.AuxInt 7822 x := v.Args[0] 7823 y := v.Args[1] 7824 if y.Op != OpARM64SLLconst { 7825 break 7826 } 7827 c := y.AuxInt 7828 if x != y.Args[0] { 7829 break 7830 } 7831 if !(c == d) { 7832 break 7833 } 7834 v.reset(OpCopy) 7835 v.Type = y.Type 7836 v.AddArg(y) 7837 return true 7838 } 7839 // match: (ORshiftLL [c] (SRLconst x [64-c]) x) 7840 // cond: 7841 // result: (RORconst [64-c] x) 7842 for { 7843 c := v.AuxInt 7844 v_0 := v.Args[0] 7845 if v_0.Op != OpARM64SRLconst { 7846 break 7847 } 7848 if v_0.AuxInt != 64-c { 7849 break 7850 } 7851 x := v_0.Args[0] 7852 if x != v.Args[1] { 7853 break 7854 } 7855 v.reset(OpARM64RORconst) 7856 v.AuxInt = 64 - c 7857 v.AddArg(x) 7858 return true 7859 } 7860 // match: (ORshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x) 7861 // cond: c < 32 && t.Size() == 4 7862 // result: (RORWconst [32-c] x) 7863 for { 7864 t := v.Type 7865 c := v.AuxInt 7866 v_0 := v.Args[0] 7867 if v_0.Op != OpARM64SRLconst { 7868 break 7869 } 7870 if v_0.AuxInt != 32-c { 7871 break 7872 } 7873 v_0_0 := v_0.Args[0] 7874 if v_0_0.Op != OpARM64MOVWUreg { 7875 break 7876 } 7877 x := v_0_0.Args[0] 7878 if x != v.Args[1] { 7879 break 7880 } 7881 if !(c < 32 && t.Size() == 4) { 7882 break 7883 } 7884 v.reset(OpARM64RORWconst) 7885 v.AuxInt = 32 - c 7886 v.AddArg(x) 7887 return true 7888 } 7889 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) 7890 // cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 7891 // result: @mergePoint(b,x0,x1) (MOVHUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 7892 for { 7893 t := v.Type 7894 if v.AuxInt != 8 { 7895 break 7896 } 7897 y0 := v.Args[0] 7898 if y0.Op != OpARM64MOVDnop { 7899 break 7900 } 7901 x0 := y0.Args[0] 7902 if x0.Op != OpARM64MOVBUload { 7903 break 7904 } 7905 i0 := x0.AuxInt 7906 s := x0.Aux 7907 p := x0.Args[0] 7908 mem := x0.Args[1] 7909 y1 := v.Args[1] 7910 if y1.Op != OpARM64MOVDnop { 7911 break 7912 } 7913 x1 := y1.Args[0] 7914 if x1.Op != OpARM64MOVBUload { 7915 break 7916 } 7917 i1 := x1.AuxInt 7918 if x1.Aux != s { 7919 break 7920 } 7921 if p != x1.Args[0] { 7922 break 7923 } 7924 if mem != x1.Args[1] { 7925 break 7926 } 7927 if !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 7928 break 7929 } 7930 b = mergePoint(b, x0, x1) 7931 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, t) 7932 v.reset(OpCopy) 7933 v.AddArg(v0) 7934 v0.Aux = s 7935 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7936 v1.AuxInt = i0 7937 v1.AddArg(p) 7938 v0.AddArg(v1) 7939 v0.AddArg(mem) 7940 return true 7941 } 7942 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i3] {s} p mem))) 7943 // cond: i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0) 7944 // result: @mergePoint(b,x0,x1,x2) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 7945 for { 7946 t := v.Type 7947 if v.AuxInt != 24 { 7948 break 7949 } 7950 o0 := v.Args[0] 7951 if o0.Op != OpARM64ORshiftLL { 7952 break 7953 } 7954 if o0.AuxInt != 16 { 7955 break 7956 } 7957 x0 := o0.Args[0] 7958 if x0.Op != OpARM64MOVHUload { 7959 break 7960 } 7961 i0 := x0.AuxInt 7962 s := x0.Aux 7963 p := x0.Args[0] 7964 mem := x0.Args[1] 7965 y1 := o0.Args[1] 7966 if y1.Op != OpARM64MOVDnop { 7967 break 7968 } 7969 x1 := y1.Args[0] 7970 if x1.Op != OpARM64MOVBUload { 7971 break 7972 } 7973 i2 := x1.AuxInt 7974 if x1.Aux != s { 7975 break 7976 } 7977 if p != x1.Args[0] { 7978 break 7979 } 7980 if mem != x1.Args[1] { 7981 break 7982 } 7983 y2 := v.Args[1] 7984 if y2.Op != OpARM64MOVDnop { 7985 break 7986 } 7987 x2 := y2.Args[0] 7988 if x2.Op != OpARM64MOVBUload { 7989 break 7990 } 7991 i3 := x2.AuxInt 7992 if x2.Aux != s { 7993 break 7994 } 7995 if p != x2.Args[0] { 7996 break 7997 } 7998 if mem != x2.Args[1] { 7999 break 8000 } 8001 if !(i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) { 8002 break 8003 } 8004 b = mergePoint(b, x0, x1, x2) 8005 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 8006 v.reset(OpCopy) 8007 v.AddArg(v0) 8008 v0.Aux = s 8009 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8010 v1.AuxInt = i0 8011 v1.AddArg(p) 8012 v0.AddArg(v1) 8013 v0.AddArg(mem) 8014 return true 8015 } 8016 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i4] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i7] {s} p mem))) 8017 // cond: i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 8018 // result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem) 8019 for { 8020 t := v.Type 8021 if v.AuxInt != 56 { 8022 break 8023 } 8024 o0 := v.Args[0] 8025 if o0.Op != OpARM64ORshiftLL { 8026 break 8027 } 8028 if o0.AuxInt != 48 { 8029 break 8030 } 8031 o1 := o0.Args[0] 8032 if o1.Op != OpARM64ORshiftLL { 8033 break 8034 } 8035 if o1.AuxInt != 40 { 8036 break 8037 } 8038 o2 := o1.Args[0] 8039 if o2.Op != OpARM64ORshiftLL { 8040 break 8041 } 8042 if o2.AuxInt != 32 { 8043 break 8044 } 8045 x0 := o2.Args[0] 8046 if x0.Op != OpARM64MOVWUload { 8047 break 8048 } 8049 i0 := x0.AuxInt 8050 s := x0.Aux 8051 p := x0.Args[0] 8052 mem := x0.Args[1] 8053 y1 := o2.Args[1] 8054 if y1.Op != OpARM64MOVDnop { 8055 break 8056 } 8057 x1 := y1.Args[0] 8058 if x1.Op != OpARM64MOVBUload { 8059 break 8060 } 8061 i4 := x1.AuxInt 8062 if x1.Aux != s { 8063 break 8064 } 8065 if p != x1.Args[0] { 8066 break 8067 } 8068 if mem != x1.Args[1] { 8069 break 8070 } 8071 y2 := o1.Args[1] 8072 if y2.Op != OpARM64MOVDnop { 8073 break 8074 } 8075 x2 := y2.Args[0] 8076 if x2.Op != OpARM64MOVBUload { 8077 break 8078 } 8079 i5 := x2.AuxInt 8080 if x2.Aux != s { 8081 break 8082 } 8083 if p != x2.Args[0] { 8084 break 8085 } 8086 if mem != x2.Args[1] { 8087 break 8088 } 8089 y3 := o0.Args[1] 8090 if y3.Op != OpARM64MOVDnop { 8091 break 8092 } 8093 x3 := y3.Args[0] 8094 if x3.Op != OpARM64MOVBUload { 8095 break 8096 } 8097 i6 := x3.AuxInt 8098 if x3.Aux != s { 8099 break 8100 } 8101 if p != x3.Args[0] { 8102 break 8103 } 8104 if mem != x3.Args[1] { 8105 break 8106 } 8107 y4 := v.Args[1] 8108 if y4.Op != OpARM64MOVDnop { 8109 break 8110 } 8111 x4 := y4.Args[0] 8112 if x4.Op != OpARM64MOVBUload { 8113 break 8114 } 8115 i7 := x4.AuxInt 8116 if x4.Aux != s { 8117 break 8118 } 8119 if p != x4.Args[0] { 8120 break 8121 } 8122 if mem != x4.Args[1] { 8123 break 8124 } 8125 if !(i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 8126 break 8127 } 8128 b = mergePoint(b, x0, x1, x2, x3, x4) 8129 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 8130 v.reset(OpCopy) 8131 v.AddArg(v0) 8132 v0.Aux = s 8133 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8134 v1.AuxInt = i0 8135 v1.AddArg(p) 8136 v0.AddArg(v1) 8137 v0.AddArg(mem) 8138 return true 8139 } 8140 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i1] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i0] {s} p mem))) 8141 // cond: i1 == i0+1 && fitsARM64Offset(i0, 2, s) && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 8142 // result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUload <t> [i0] {s} p mem)) 8143 for { 8144 t := v.Type 8145 if v.AuxInt != 8 { 8146 break 8147 } 8148 y0 := v.Args[0] 8149 if y0.Op != OpARM64MOVDnop { 8150 break 8151 } 8152 x0 := y0.Args[0] 8153 if x0.Op != OpARM64MOVBUload { 8154 break 8155 } 8156 i1 := x0.AuxInt 8157 s := x0.Aux 8158 p := x0.Args[0] 8159 mem := x0.Args[1] 8160 y1 := v.Args[1] 8161 if y1.Op != OpARM64MOVDnop { 8162 break 8163 } 8164 x1 := y1.Args[0] 8165 if x1.Op != OpARM64MOVBUload { 8166 break 8167 } 8168 i0 := x1.AuxInt 8169 if x1.Aux != s { 8170 break 8171 } 8172 if p != x1.Args[0] { 8173 break 8174 } 8175 if mem != x1.Args[1] { 8176 break 8177 } 8178 if !(i1 == i0+1 && fitsARM64Offset(i0, 2, s) && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 8179 break 8180 } 8181 b = mergePoint(b, x0, x1) 8182 v0 := b.NewValue0(v.Pos, OpARM64REV16W, t) 8183 v.reset(OpCopy) 8184 v.AddArg(v0) 8185 v1 := b.NewValue0(v.Pos, OpARM64MOVHUload, t) 8186 v1.AuxInt = i0 8187 v1.Aux = s 8188 v1.AddArg(p) 8189 v1.AddArg(mem) 8190 v0.AddArg(v1) 8191 return true 8192 } 8193 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUload [i2] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i0] {s} p mem))) 8194 // cond: i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0) 8195 // result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 8196 for { 8197 t := v.Type 8198 if v.AuxInt != 24 { 8199 break 8200 } 8201 o0 := v.Args[0] 8202 if o0.Op != OpARM64ORshiftLL { 8203 break 8204 } 8205 if o0.AuxInt != 16 { 8206 break 8207 } 8208 y0 := o0.Args[0] 8209 if y0.Op != OpARM64REV16W { 8210 break 8211 } 8212 x0 := y0.Args[0] 8213 if x0.Op != OpARM64MOVHUload { 8214 break 8215 } 8216 i2 := x0.AuxInt 8217 s := x0.Aux 8218 p := x0.Args[0] 8219 mem := x0.Args[1] 8220 y1 := o0.Args[1] 8221 if y1.Op != OpARM64MOVDnop { 8222 break 8223 } 8224 x1 := y1.Args[0] 8225 if x1.Op != OpARM64MOVBUload { 8226 break 8227 } 8228 i1 := x1.AuxInt 8229 if x1.Aux != s { 8230 break 8231 } 8232 if p != x1.Args[0] { 8233 break 8234 } 8235 if mem != x1.Args[1] { 8236 break 8237 } 8238 y2 := v.Args[1] 8239 if y2.Op != OpARM64MOVDnop { 8240 break 8241 } 8242 x2 := y2.Args[0] 8243 if x2.Op != OpARM64MOVBUload { 8244 break 8245 } 8246 i0 := x2.AuxInt 8247 if x2.Aux != s { 8248 break 8249 } 8250 if p != x2.Args[0] { 8251 break 8252 } 8253 if mem != x2.Args[1] { 8254 break 8255 } 8256 if !(i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) { 8257 break 8258 } 8259 b = mergePoint(b, x0, x1, x2) 8260 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 8261 v.reset(OpCopy) 8262 v.AddArg(v0) 8263 v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 8264 v1.Aux = s 8265 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8266 v2.AuxInt = i0 8267 v2.AddArg(p) 8268 v1.AddArg(v2) 8269 v1.AddArg(mem) 8270 v0.AddArg(v1) 8271 return true 8272 } 8273 return false 8274 } 8275 func rewriteValueARM64_OpARM64ORshiftLL_10(v *Value) bool { 8276 b := v.Block 8277 _ = b 8278 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUload [i4] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i3] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i1] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i0] {s} p mem))) 8279 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 8280 // result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 8281 for { 8282 t := v.Type 8283 if v.AuxInt != 56 { 8284 break 8285 } 8286 o0 := v.Args[0] 8287 if o0.Op != OpARM64ORshiftLL { 8288 break 8289 } 8290 if o0.AuxInt != 48 { 8291 break 8292 } 8293 o1 := o0.Args[0] 8294 if o1.Op != OpARM64ORshiftLL { 8295 break 8296 } 8297 if o1.AuxInt != 40 { 8298 break 8299 } 8300 o2 := o1.Args[0] 8301 if o2.Op != OpARM64ORshiftLL { 8302 break 8303 } 8304 if o2.AuxInt != 32 { 8305 break 8306 } 8307 y0 := o2.Args[0] 8308 if y0.Op != OpARM64REVW { 8309 break 8310 } 8311 x0 := y0.Args[0] 8312 if x0.Op != OpARM64MOVWUload { 8313 break 8314 } 8315 i4 := x0.AuxInt 8316 s := x0.Aux 8317 p := x0.Args[0] 8318 mem := x0.Args[1] 8319 y1 := o2.Args[1] 8320 if y1.Op != OpARM64MOVDnop { 8321 break 8322 } 8323 x1 := y1.Args[0] 8324 if x1.Op != OpARM64MOVBUload { 8325 break 8326 } 8327 i3 := x1.AuxInt 8328 if x1.Aux != s { 8329 break 8330 } 8331 if p != x1.Args[0] { 8332 break 8333 } 8334 if mem != x1.Args[1] { 8335 break 8336 } 8337 y2 := o1.Args[1] 8338 if y2.Op != OpARM64MOVDnop { 8339 break 8340 } 8341 x2 := y2.Args[0] 8342 if x2.Op != OpARM64MOVBUload { 8343 break 8344 } 8345 i2 := x2.AuxInt 8346 if x2.Aux != s { 8347 break 8348 } 8349 if p != x2.Args[0] { 8350 break 8351 } 8352 if mem != x2.Args[1] { 8353 break 8354 } 8355 y3 := o0.Args[1] 8356 if y3.Op != OpARM64MOVDnop { 8357 break 8358 } 8359 x3 := y3.Args[0] 8360 if x3.Op != OpARM64MOVBUload { 8361 break 8362 } 8363 i1 := x3.AuxInt 8364 if x3.Aux != s { 8365 break 8366 } 8367 if p != x3.Args[0] { 8368 break 8369 } 8370 if mem != x3.Args[1] { 8371 break 8372 } 8373 y4 := v.Args[1] 8374 if y4.Op != OpARM64MOVDnop { 8375 break 8376 } 8377 x4 := y4.Args[0] 8378 if x4.Op != OpARM64MOVBUload { 8379 break 8380 } 8381 i0 := x4.AuxInt 8382 if x4.Aux != s { 8383 break 8384 } 8385 if p != x4.Args[0] { 8386 break 8387 } 8388 if mem != x4.Args[1] { 8389 break 8390 } 8391 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 8392 break 8393 } 8394 b = mergePoint(b, x0, x1, x2, x3, x4) 8395 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 8396 v.reset(OpCopy) 8397 v.AddArg(v0) 8398 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 8399 v1.Aux = s 8400 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8401 v2.AuxInt = i0 8402 v2.AddArg(p) 8403 v1.AddArg(v2) 8404 v1.AddArg(mem) 8405 v0.AddArg(v1) 8406 return true 8407 } 8408 return false 8409 } 8410 func rewriteValueARM64_OpARM64ORshiftRA_0(v *Value) bool { 8411 b := v.Block 8412 _ = b 8413 // match: (ORshiftRA (MOVDconst [c]) x [d]) 8414 // cond: 8415 // result: (ORconst [c] (SRAconst <x.Type> x [d])) 8416 for { 8417 d := v.AuxInt 8418 v_0 := v.Args[0] 8419 if v_0.Op != OpARM64MOVDconst { 8420 break 8421 } 8422 c := v_0.AuxInt 8423 x := v.Args[1] 8424 v.reset(OpARM64ORconst) 8425 v.AuxInt = c 8426 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 8427 v0.AuxInt = d 8428 v0.AddArg(x) 8429 v.AddArg(v0) 8430 return true 8431 } 8432 // match: (ORshiftRA x (MOVDconst [c]) [d]) 8433 // cond: 8434 // result: (ORconst x [int64(int64(c)>>uint64(d))]) 8435 for { 8436 d := v.AuxInt 8437 x := v.Args[0] 8438 v_1 := v.Args[1] 8439 if v_1.Op != OpARM64MOVDconst { 8440 break 8441 } 8442 c := v_1.AuxInt 8443 v.reset(OpARM64ORconst) 8444 v.AuxInt = int64(int64(c) >> uint64(d)) 8445 v.AddArg(x) 8446 return true 8447 } 8448 // match: (ORshiftRA x y:(SRAconst x [c]) [d]) 8449 // cond: c==d 8450 // result: y 8451 for { 8452 d := v.AuxInt 8453 x := v.Args[0] 8454 y := v.Args[1] 8455 if y.Op != OpARM64SRAconst { 8456 break 8457 } 8458 c := y.AuxInt 8459 if x != y.Args[0] { 8460 break 8461 } 8462 if !(c == d) { 8463 break 8464 } 8465 v.reset(OpCopy) 8466 v.Type = y.Type 8467 v.AddArg(y) 8468 return true 8469 } 8470 return false 8471 } 8472 func rewriteValueARM64_OpARM64ORshiftRL_0(v *Value) bool { 8473 b := v.Block 8474 _ = b 8475 // match: (ORshiftRL (MOVDconst [c]) x [d]) 8476 // cond: 8477 // result: (ORconst [c] (SRLconst <x.Type> x [d])) 8478 for { 8479 d := v.AuxInt 8480 v_0 := v.Args[0] 8481 if v_0.Op != OpARM64MOVDconst { 8482 break 8483 } 8484 c := v_0.AuxInt 8485 x := v.Args[1] 8486 v.reset(OpARM64ORconst) 8487 v.AuxInt = c 8488 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 8489 v0.AuxInt = d 8490 v0.AddArg(x) 8491 v.AddArg(v0) 8492 return true 8493 } 8494 // match: (ORshiftRL x (MOVDconst [c]) [d]) 8495 // cond: 8496 // result: (ORconst x [int64(uint64(c)>>uint64(d))]) 8497 for { 8498 d := v.AuxInt 8499 x := v.Args[0] 8500 v_1 := v.Args[1] 8501 if v_1.Op != OpARM64MOVDconst { 8502 break 8503 } 8504 c := v_1.AuxInt 8505 v.reset(OpARM64ORconst) 8506 v.AuxInt = int64(uint64(c) >> uint64(d)) 8507 v.AddArg(x) 8508 return true 8509 } 8510 // match: (ORshiftRL x y:(SRLconst x [c]) [d]) 8511 // cond: c==d 8512 // result: y 8513 for { 8514 d := v.AuxInt 8515 x := v.Args[0] 8516 y := v.Args[1] 8517 if y.Op != OpARM64SRLconst { 8518 break 8519 } 8520 c := y.AuxInt 8521 if x != y.Args[0] { 8522 break 8523 } 8524 if !(c == d) { 8525 break 8526 } 8527 v.reset(OpCopy) 8528 v.Type = y.Type 8529 v.AddArg(y) 8530 return true 8531 } 8532 // match: (ORshiftRL [c] (SLLconst x [64-c]) x) 8533 // cond: 8534 // result: (RORconst [ c] x) 8535 for { 8536 c := v.AuxInt 8537 v_0 := v.Args[0] 8538 if v_0.Op != OpARM64SLLconst { 8539 break 8540 } 8541 if v_0.AuxInt != 64-c { 8542 break 8543 } 8544 x := v_0.Args[0] 8545 if x != v.Args[1] { 8546 break 8547 } 8548 v.reset(OpARM64RORconst) 8549 v.AuxInt = c 8550 v.AddArg(x) 8551 return true 8552 } 8553 // match: (ORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 8554 // cond: c < 32 && t.Size() == 4 8555 // result: (RORWconst [ c] x) 8556 for { 8557 t := v.Type 8558 c := v.AuxInt 8559 v_0 := v.Args[0] 8560 if v_0.Op != OpARM64SLLconst { 8561 break 8562 } 8563 if v_0.AuxInt != 32-c { 8564 break 8565 } 8566 x := v_0.Args[0] 8567 v_1 := v.Args[1] 8568 if v_1.Op != OpARM64MOVWUreg { 8569 break 8570 } 8571 if x != v_1.Args[0] { 8572 break 8573 } 8574 if !(c < 32 && t.Size() == 4) { 8575 break 8576 } 8577 v.reset(OpARM64RORWconst) 8578 v.AuxInt = c 8579 v.AddArg(x) 8580 return true 8581 } 8582 return false 8583 } 8584 func rewriteValueARM64_OpARM64SLL_0(v *Value) bool { 8585 // match: (SLL x (MOVDconst [c])) 8586 // cond: 8587 // result: (SLLconst x [c&63]) 8588 for { 8589 x := v.Args[0] 8590 v_1 := v.Args[1] 8591 if v_1.Op != OpARM64MOVDconst { 8592 break 8593 } 8594 c := v_1.AuxInt 8595 v.reset(OpARM64SLLconst) 8596 v.AuxInt = c & 63 8597 v.AddArg(x) 8598 return true 8599 } 8600 return false 8601 } 8602 func rewriteValueARM64_OpARM64SLLconst_0(v *Value) bool { 8603 // match: (SLLconst [c] (MOVDconst [d])) 8604 // cond: 8605 // result: (MOVDconst [int64(d)<<uint64(c)]) 8606 for { 8607 c := v.AuxInt 8608 v_0 := v.Args[0] 8609 if v_0.Op != OpARM64MOVDconst { 8610 break 8611 } 8612 d := v_0.AuxInt 8613 v.reset(OpARM64MOVDconst) 8614 v.AuxInt = int64(d) << uint64(c) 8615 return true 8616 } 8617 // match: (SLLconst [c] (SRLconst [c] x)) 8618 // cond: 0 < c && c < 64 8619 // result: (ANDconst [^(1<<uint(c)-1)] x) 8620 for { 8621 c := v.AuxInt 8622 v_0 := v.Args[0] 8623 if v_0.Op != OpARM64SRLconst { 8624 break 8625 } 8626 if v_0.AuxInt != c { 8627 break 8628 } 8629 x := v_0.Args[0] 8630 if !(0 < c && c < 64) { 8631 break 8632 } 8633 v.reset(OpARM64ANDconst) 8634 v.AuxInt = ^(1<<uint(c) - 1) 8635 v.AddArg(x) 8636 return true 8637 } 8638 return false 8639 } 8640 func rewriteValueARM64_OpARM64SRA_0(v *Value) bool { 8641 // match: (SRA x (MOVDconst [c])) 8642 // cond: 8643 // result: (SRAconst x [c&63]) 8644 for { 8645 x := v.Args[0] 8646 v_1 := v.Args[1] 8647 if v_1.Op != OpARM64MOVDconst { 8648 break 8649 } 8650 c := v_1.AuxInt 8651 v.reset(OpARM64SRAconst) 8652 v.AuxInt = c & 63 8653 v.AddArg(x) 8654 return true 8655 } 8656 return false 8657 } 8658 func rewriteValueARM64_OpARM64SRAconst_0(v *Value) bool { 8659 // match: (SRAconst [c] (MOVDconst [d])) 8660 // cond: 8661 // result: (MOVDconst [int64(d)>>uint64(c)]) 8662 for { 8663 c := v.AuxInt 8664 v_0 := v.Args[0] 8665 if v_0.Op != OpARM64MOVDconst { 8666 break 8667 } 8668 d := v_0.AuxInt 8669 v.reset(OpARM64MOVDconst) 8670 v.AuxInt = int64(d) >> uint64(c) 8671 return true 8672 } 8673 return false 8674 } 8675 func rewriteValueARM64_OpARM64SRL_0(v *Value) bool { 8676 // match: (SRL x (MOVDconst [c])) 8677 // cond: 8678 // result: (SRLconst x [c&63]) 8679 for { 8680 x := v.Args[0] 8681 v_1 := v.Args[1] 8682 if v_1.Op != OpARM64MOVDconst { 8683 break 8684 } 8685 c := v_1.AuxInt 8686 v.reset(OpARM64SRLconst) 8687 v.AuxInt = c & 63 8688 v.AddArg(x) 8689 return true 8690 } 8691 return false 8692 } 8693 func rewriteValueARM64_OpARM64SRLconst_0(v *Value) bool { 8694 // match: (SRLconst [c] (MOVDconst [d])) 8695 // cond: 8696 // result: (MOVDconst [int64(uint64(d)>>uint64(c))]) 8697 for { 8698 c := v.AuxInt 8699 v_0 := v.Args[0] 8700 if v_0.Op != OpARM64MOVDconst { 8701 break 8702 } 8703 d := v_0.AuxInt 8704 v.reset(OpARM64MOVDconst) 8705 v.AuxInt = int64(uint64(d) >> uint64(c)) 8706 return true 8707 } 8708 // match: (SRLconst [c] (SLLconst [c] x)) 8709 // cond: 0 < c && c < 64 8710 // result: (ANDconst [1<<uint(64-c)-1] x) 8711 for { 8712 c := v.AuxInt 8713 v_0 := v.Args[0] 8714 if v_0.Op != OpARM64SLLconst { 8715 break 8716 } 8717 if v_0.AuxInt != c { 8718 break 8719 } 8720 x := v_0.Args[0] 8721 if !(0 < c && c < 64) { 8722 break 8723 } 8724 v.reset(OpARM64ANDconst) 8725 v.AuxInt = 1<<uint(64-c) - 1 8726 v.AddArg(x) 8727 return true 8728 } 8729 return false 8730 } 8731 func rewriteValueARM64_OpARM64SUB_0(v *Value) bool { 8732 b := v.Block 8733 _ = b 8734 // match: (SUB x (MOVDconst [c])) 8735 // cond: 8736 // result: (SUBconst [c] x) 8737 for { 8738 x := v.Args[0] 8739 v_1 := v.Args[1] 8740 if v_1.Op != OpARM64MOVDconst { 8741 break 8742 } 8743 c := v_1.AuxInt 8744 v.reset(OpARM64SUBconst) 8745 v.AuxInt = c 8746 v.AddArg(x) 8747 return true 8748 } 8749 // match: (SUB x x) 8750 // cond: 8751 // result: (MOVDconst [0]) 8752 for { 8753 x := v.Args[0] 8754 if x != v.Args[1] { 8755 break 8756 } 8757 v.reset(OpARM64MOVDconst) 8758 v.AuxInt = 0 8759 return true 8760 } 8761 // match: (SUB x (SUB y z)) 8762 // cond: 8763 // result: (SUB (ADD <v.Type> x z) y) 8764 for { 8765 x := v.Args[0] 8766 v_1 := v.Args[1] 8767 if v_1.Op != OpARM64SUB { 8768 break 8769 } 8770 y := v_1.Args[0] 8771 z := v_1.Args[1] 8772 v.reset(OpARM64SUB) 8773 v0 := b.NewValue0(v.Pos, OpARM64ADD, v.Type) 8774 v0.AddArg(x) 8775 v0.AddArg(z) 8776 v.AddArg(v0) 8777 v.AddArg(y) 8778 return true 8779 } 8780 // match: (SUB (SUB x y) z) 8781 // cond: 8782 // result: (SUB x (ADD <y.Type> y z)) 8783 for { 8784 v_0 := v.Args[0] 8785 if v_0.Op != OpARM64SUB { 8786 break 8787 } 8788 x := v_0.Args[0] 8789 y := v_0.Args[1] 8790 z := v.Args[1] 8791 v.reset(OpARM64SUB) 8792 v.AddArg(x) 8793 v0 := b.NewValue0(v.Pos, OpARM64ADD, y.Type) 8794 v0.AddArg(y) 8795 v0.AddArg(z) 8796 v.AddArg(v0) 8797 return true 8798 } 8799 // match: (SUB x (SLLconst [c] y)) 8800 // cond: 8801 // result: (SUBshiftLL x y [c]) 8802 for { 8803 x := v.Args[0] 8804 v_1 := v.Args[1] 8805 if v_1.Op != OpARM64SLLconst { 8806 break 8807 } 8808 c := v_1.AuxInt 8809 y := v_1.Args[0] 8810 v.reset(OpARM64SUBshiftLL) 8811 v.AuxInt = c 8812 v.AddArg(x) 8813 v.AddArg(y) 8814 return true 8815 } 8816 // match: (SUB x (SRLconst [c] y)) 8817 // cond: 8818 // result: (SUBshiftRL x y [c]) 8819 for { 8820 x := v.Args[0] 8821 v_1 := v.Args[1] 8822 if v_1.Op != OpARM64SRLconst { 8823 break 8824 } 8825 c := v_1.AuxInt 8826 y := v_1.Args[0] 8827 v.reset(OpARM64SUBshiftRL) 8828 v.AuxInt = c 8829 v.AddArg(x) 8830 v.AddArg(y) 8831 return true 8832 } 8833 // match: (SUB x (SRAconst [c] y)) 8834 // cond: 8835 // result: (SUBshiftRA x y [c]) 8836 for { 8837 x := v.Args[0] 8838 v_1 := v.Args[1] 8839 if v_1.Op != OpARM64SRAconst { 8840 break 8841 } 8842 c := v_1.AuxInt 8843 y := v_1.Args[0] 8844 v.reset(OpARM64SUBshiftRA) 8845 v.AuxInt = c 8846 v.AddArg(x) 8847 v.AddArg(y) 8848 return true 8849 } 8850 return false 8851 } 8852 func rewriteValueARM64_OpARM64SUBconst_0(v *Value) bool { 8853 // match: (SUBconst [0] x) 8854 // cond: 8855 // result: x 8856 for { 8857 if v.AuxInt != 0 { 8858 break 8859 } 8860 x := v.Args[0] 8861 v.reset(OpCopy) 8862 v.Type = x.Type 8863 v.AddArg(x) 8864 return true 8865 } 8866 // match: (SUBconst [c] (MOVDconst [d])) 8867 // cond: 8868 // result: (MOVDconst [d-c]) 8869 for { 8870 c := v.AuxInt 8871 v_0 := v.Args[0] 8872 if v_0.Op != OpARM64MOVDconst { 8873 break 8874 } 8875 d := v_0.AuxInt 8876 v.reset(OpARM64MOVDconst) 8877 v.AuxInt = d - c 8878 return true 8879 } 8880 // match: (SUBconst [c] (SUBconst [d] x)) 8881 // cond: 8882 // result: (ADDconst [-c-d] x) 8883 for { 8884 c := v.AuxInt 8885 v_0 := v.Args[0] 8886 if v_0.Op != OpARM64SUBconst { 8887 break 8888 } 8889 d := v_0.AuxInt 8890 x := v_0.Args[0] 8891 v.reset(OpARM64ADDconst) 8892 v.AuxInt = -c - d 8893 v.AddArg(x) 8894 return true 8895 } 8896 // match: (SUBconst [c] (ADDconst [d] x)) 8897 // cond: 8898 // result: (ADDconst [-c+d] x) 8899 for { 8900 c := v.AuxInt 8901 v_0 := v.Args[0] 8902 if v_0.Op != OpARM64ADDconst { 8903 break 8904 } 8905 d := v_0.AuxInt 8906 x := v_0.Args[0] 8907 v.reset(OpARM64ADDconst) 8908 v.AuxInt = -c + d 8909 v.AddArg(x) 8910 return true 8911 } 8912 return false 8913 } 8914 func rewriteValueARM64_OpARM64SUBshiftLL_0(v *Value) bool { 8915 // match: (SUBshiftLL x (MOVDconst [c]) [d]) 8916 // cond: 8917 // result: (SUBconst x [int64(uint64(c)<<uint64(d))]) 8918 for { 8919 d := v.AuxInt 8920 x := v.Args[0] 8921 v_1 := v.Args[1] 8922 if v_1.Op != OpARM64MOVDconst { 8923 break 8924 } 8925 c := v_1.AuxInt 8926 v.reset(OpARM64SUBconst) 8927 v.AuxInt = int64(uint64(c) << uint64(d)) 8928 v.AddArg(x) 8929 return true 8930 } 8931 // match: (SUBshiftLL x (SLLconst x [c]) [d]) 8932 // cond: c==d 8933 // result: (MOVDconst [0]) 8934 for { 8935 d := v.AuxInt 8936 x := v.Args[0] 8937 v_1 := v.Args[1] 8938 if v_1.Op != OpARM64SLLconst { 8939 break 8940 } 8941 c := v_1.AuxInt 8942 if x != v_1.Args[0] { 8943 break 8944 } 8945 if !(c == d) { 8946 break 8947 } 8948 v.reset(OpARM64MOVDconst) 8949 v.AuxInt = 0 8950 return true 8951 } 8952 return false 8953 } 8954 func rewriteValueARM64_OpARM64SUBshiftRA_0(v *Value) bool { 8955 // match: (SUBshiftRA x (MOVDconst [c]) [d]) 8956 // cond: 8957 // result: (SUBconst x [int64(int64(c)>>uint64(d))]) 8958 for { 8959 d := v.AuxInt 8960 x := v.Args[0] 8961 v_1 := v.Args[1] 8962 if v_1.Op != OpARM64MOVDconst { 8963 break 8964 } 8965 c := v_1.AuxInt 8966 v.reset(OpARM64SUBconst) 8967 v.AuxInt = int64(int64(c) >> uint64(d)) 8968 v.AddArg(x) 8969 return true 8970 } 8971 // match: (SUBshiftRA x (SRAconst x [c]) [d]) 8972 // cond: c==d 8973 // result: (MOVDconst [0]) 8974 for { 8975 d := v.AuxInt 8976 x := v.Args[0] 8977 v_1 := v.Args[1] 8978 if v_1.Op != OpARM64SRAconst { 8979 break 8980 } 8981 c := v_1.AuxInt 8982 if x != v_1.Args[0] { 8983 break 8984 } 8985 if !(c == d) { 8986 break 8987 } 8988 v.reset(OpARM64MOVDconst) 8989 v.AuxInt = 0 8990 return true 8991 } 8992 return false 8993 } 8994 func rewriteValueARM64_OpARM64SUBshiftRL_0(v *Value) bool { 8995 // match: (SUBshiftRL x (MOVDconst [c]) [d]) 8996 // cond: 8997 // result: (SUBconst x [int64(uint64(c)>>uint64(d))]) 8998 for { 8999 d := v.AuxInt 9000 x := v.Args[0] 9001 v_1 := v.Args[1] 9002 if v_1.Op != OpARM64MOVDconst { 9003 break 9004 } 9005 c := v_1.AuxInt 9006 v.reset(OpARM64SUBconst) 9007 v.AuxInt = int64(uint64(c) >> uint64(d)) 9008 v.AddArg(x) 9009 return true 9010 } 9011 // match: (SUBshiftRL x (SRLconst x [c]) [d]) 9012 // cond: c==d 9013 // result: (MOVDconst [0]) 9014 for { 9015 d := v.AuxInt 9016 x := v.Args[0] 9017 v_1 := v.Args[1] 9018 if v_1.Op != OpARM64SRLconst { 9019 break 9020 } 9021 c := v_1.AuxInt 9022 if x != v_1.Args[0] { 9023 break 9024 } 9025 if !(c == d) { 9026 break 9027 } 9028 v.reset(OpARM64MOVDconst) 9029 v.AuxInt = 0 9030 return true 9031 } 9032 return false 9033 } 9034 func rewriteValueARM64_OpARM64UDIV_0(v *Value) bool { 9035 // match: (UDIV x (MOVDconst [1])) 9036 // cond: 9037 // result: x 9038 for { 9039 x := v.Args[0] 9040 v_1 := v.Args[1] 9041 if v_1.Op != OpARM64MOVDconst { 9042 break 9043 } 9044 if v_1.AuxInt != 1 { 9045 break 9046 } 9047 v.reset(OpCopy) 9048 v.Type = x.Type 9049 v.AddArg(x) 9050 return true 9051 } 9052 // match: (UDIV x (MOVDconst [c])) 9053 // cond: isPowerOfTwo(c) 9054 // result: (SRLconst [log2(c)] x) 9055 for { 9056 x := v.Args[0] 9057 v_1 := v.Args[1] 9058 if v_1.Op != OpARM64MOVDconst { 9059 break 9060 } 9061 c := v_1.AuxInt 9062 if !(isPowerOfTwo(c)) { 9063 break 9064 } 9065 v.reset(OpARM64SRLconst) 9066 v.AuxInt = log2(c) 9067 v.AddArg(x) 9068 return true 9069 } 9070 // match: (UDIV (MOVDconst [c]) (MOVDconst [d])) 9071 // cond: 9072 // result: (MOVDconst [int64(uint64(c)/uint64(d))]) 9073 for { 9074 v_0 := v.Args[0] 9075 if v_0.Op != OpARM64MOVDconst { 9076 break 9077 } 9078 c := v_0.AuxInt 9079 v_1 := v.Args[1] 9080 if v_1.Op != OpARM64MOVDconst { 9081 break 9082 } 9083 d := v_1.AuxInt 9084 v.reset(OpARM64MOVDconst) 9085 v.AuxInt = int64(uint64(c) / uint64(d)) 9086 return true 9087 } 9088 return false 9089 } 9090 func rewriteValueARM64_OpARM64UDIVW_0(v *Value) bool { 9091 // match: (UDIVW x (MOVDconst [c])) 9092 // cond: uint32(c)==1 9093 // result: x 9094 for { 9095 x := v.Args[0] 9096 v_1 := v.Args[1] 9097 if v_1.Op != OpARM64MOVDconst { 9098 break 9099 } 9100 c := v_1.AuxInt 9101 if !(uint32(c) == 1) { 9102 break 9103 } 9104 v.reset(OpCopy) 9105 v.Type = x.Type 9106 v.AddArg(x) 9107 return true 9108 } 9109 // match: (UDIVW x (MOVDconst [c])) 9110 // cond: isPowerOfTwo(c) && is32Bit(c) 9111 // result: (SRLconst [log2(c)] x) 9112 for { 9113 x := v.Args[0] 9114 v_1 := v.Args[1] 9115 if v_1.Op != OpARM64MOVDconst { 9116 break 9117 } 9118 c := v_1.AuxInt 9119 if !(isPowerOfTwo(c) && is32Bit(c)) { 9120 break 9121 } 9122 v.reset(OpARM64SRLconst) 9123 v.AuxInt = log2(c) 9124 v.AddArg(x) 9125 return true 9126 } 9127 // match: (UDIVW (MOVDconst [c]) (MOVDconst [d])) 9128 // cond: 9129 // result: (MOVDconst [int64(uint32(c)/uint32(d))]) 9130 for { 9131 v_0 := v.Args[0] 9132 if v_0.Op != OpARM64MOVDconst { 9133 break 9134 } 9135 c := v_0.AuxInt 9136 v_1 := v.Args[1] 9137 if v_1.Op != OpARM64MOVDconst { 9138 break 9139 } 9140 d := v_1.AuxInt 9141 v.reset(OpARM64MOVDconst) 9142 v.AuxInt = int64(uint32(c) / uint32(d)) 9143 return true 9144 } 9145 return false 9146 } 9147 func rewriteValueARM64_OpARM64UMOD_0(v *Value) bool { 9148 // match: (UMOD _ (MOVDconst [1])) 9149 // cond: 9150 // result: (MOVDconst [0]) 9151 for { 9152 v_1 := v.Args[1] 9153 if v_1.Op != OpARM64MOVDconst { 9154 break 9155 } 9156 if v_1.AuxInt != 1 { 9157 break 9158 } 9159 v.reset(OpARM64MOVDconst) 9160 v.AuxInt = 0 9161 return true 9162 } 9163 // match: (UMOD x (MOVDconst [c])) 9164 // cond: isPowerOfTwo(c) 9165 // result: (ANDconst [c-1] x) 9166 for { 9167 x := v.Args[0] 9168 v_1 := v.Args[1] 9169 if v_1.Op != OpARM64MOVDconst { 9170 break 9171 } 9172 c := v_1.AuxInt 9173 if !(isPowerOfTwo(c)) { 9174 break 9175 } 9176 v.reset(OpARM64ANDconst) 9177 v.AuxInt = c - 1 9178 v.AddArg(x) 9179 return true 9180 } 9181 // match: (UMOD (MOVDconst [c]) (MOVDconst [d])) 9182 // cond: 9183 // result: (MOVDconst [int64(uint64(c)%uint64(d))]) 9184 for { 9185 v_0 := v.Args[0] 9186 if v_0.Op != OpARM64MOVDconst { 9187 break 9188 } 9189 c := v_0.AuxInt 9190 v_1 := v.Args[1] 9191 if v_1.Op != OpARM64MOVDconst { 9192 break 9193 } 9194 d := v_1.AuxInt 9195 v.reset(OpARM64MOVDconst) 9196 v.AuxInt = int64(uint64(c) % uint64(d)) 9197 return true 9198 } 9199 return false 9200 } 9201 func rewriteValueARM64_OpARM64UMODW_0(v *Value) bool { 9202 // match: (UMODW _ (MOVDconst [c])) 9203 // cond: uint32(c)==1 9204 // result: (MOVDconst [0]) 9205 for { 9206 v_1 := v.Args[1] 9207 if v_1.Op != OpARM64MOVDconst { 9208 break 9209 } 9210 c := v_1.AuxInt 9211 if !(uint32(c) == 1) { 9212 break 9213 } 9214 v.reset(OpARM64MOVDconst) 9215 v.AuxInt = 0 9216 return true 9217 } 9218 // match: (UMODW x (MOVDconst [c])) 9219 // cond: isPowerOfTwo(c) && is32Bit(c) 9220 // result: (ANDconst [c-1] x) 9221 for { 9222 x := v.Args[0] 9223 v_1 := v.Args[1] 9224 if v_1.Op != OpARM64MOVDconst { 9225 break 9226 } 9227 c := v_1.AuxInt 9228 if !(isPowerOfTwo(c) && is32Bit(c)) { 9229 break 9230 } 9231 v.reset(OpARM64ANDconst) 9232 v.AuxInt = c - 1 9233 v.AddArg(x) 9234 return true 9235 } 9236 // match: (UMODW (MOVDconst [c]) (MOVDconst [d])) 9237 // cond: 9238 // result: (MOVDconst [int64(uint32(c)%uint32(d))]) 9239 for { 9240 v_0 := v.Args[0] 9241 if v_0.Op != OpARM64MOVDconst { 9242 break 9243 } 9244 c := v_0.AuxInt 9245 v_1 := v.Args[1] 9246 if v_1.Op != OpARM64MOVDconst { 9247 break 9248 } 9249 d := v_1.AuxInt 9250 v.reset(OpARM64MOVDconst) 9251 v.AuxInt = int64(uint32(c) % uint32(d)) 9252 return true 9253 } 9254 return false 9255 } 9256 func rewriteValueARM64_OpARM64XOR_0(v *Value) bool { 9257 // match: (XOR x (MOVDconst [c])) 9258 // cond: 9259 // result: (XORconst [c] x) 9260 for { 9261 x := v.Args[0] 9262 v_1 := v.Args[1] 9263 if v_1.Op != OpARM64MOVDconst { 9264 break 9265 } 9266 c := v_1.AuxInt 9267 v.reset(OpARM64XORconst) 9268 v.AuxInt = c 9269 v.AddArg(x) 9270 return true 9271 } 9272 // match: (XOR (MOVDconst [c]) x) 9273 // cond: 9274 // result: (XORconst [c] x) 9275 for { 9276 v_0 := v.Args[0] 9277 if v_0.Op != OpARM64MOVDconst { 9278 break 9279 } 9280 c := v_0.AuxInt 9281 x := v.Args[1] 9282 v.reset(OpARM64XORconst) 9283 v.AuxInt = c 9284 v.AddArg(x) 9285 return true 9286 } 9287 // match: (XOR x x) 9288 // cond: 9289 // result: (MOVDconst [0]) 9290 for { 9291 x := v.Args[0] 9292 if x != v.Args[1] { 9293 break 9294 } 9295 v.reset(OpARM64MOVDconst) 9296 v.AuxInt = 0 9297 return true 9298 } 9299 // match: (XOR x (SLLconst [c] y)) 9300 // cond: 9301 // result: (XORshiftLL x y [c]) 9302 for { 9303 x := v.Args[0] 9304 v_1 := v.Args[1] 9305 if v_1.Op != OpARM64SLLconst { 9306 break 9307 } 9308 c := v_1.AuxInt 9309 y := v_1.Args[0] 9310 v.reset(OpARM64XORshiftLL) 9311 v.AuxInt = c 9312 v.AddArg(x) 9313 v.AddArg(y) 9314 return true 9315 } 9316 // match: (XOR (SLLconst [c] y) x) 9317 // cond: 9318 // result: (XORshiftLL x y [c]) 9319 for { 9320 v_0 := v.Args[0] 9321 if v_0.Op != OpARM64SLLconst { 9322 break 9323 } 9324 c := v_0.AuxInt 9325 y := v_0.Args[0] 9326 x := v.Args[1] 9327 v.reset(OpARM64XORshiftLL) 9328 v.AuxInt = c 9329 v.AddArg(x) 9330 v.AddArg(y) 9331 return true 9332 } 9333 // match: (XOR x (SRLconst [c] y)) 9334 // cond: 9335 // result: (XORshiftRL x y [c]) 9336 for { 9337 x := v.Args[0] 9338 v_1 := v.Args[1] 9339 if v_1.Op != OpARM64SRLconst { 9340 break 9341 } 9342 c := v_1.AuxInt 9343 y := v_1.Args[0] 9344 v.reset(OpARM64XORshiftRL) 9345 v.AuxInt = c 9346 v.AddArg(x) 9347 v.AddArg(y) 9348 return true 9349 } 9350 // match: (XOR (SRLconst [c] y) x) 9351 // cond: 9352 // result: (XORshiftRL x y [c]) 9353 for { 9354 v_0 := v.Args[0] 9355 if v_0.Op != OpARM64SRLconst { 9356 break 9357 } 9358 c := v_0.AuxInt 9359 y := v_0.Args[0] 9360 x := v.Args[1] 9361 v.reset(OpARM64XORshiftRL) 9362 v.AuxInt = c 9363 v.AddArg(x) 9364 v.AddArg(y) 9365 return true 9366 } 9367 // match: (XOR x (SRAconst [c] y)) 9368 // cond: 9369 // result: (XORshiftRA x y [c]) 9370 for { 9371 x := v.Args[0] 9372 v_1 := v.Args[1] 9373 if v_1.Op != OpARM64SRAconst { 9374 break 9375 } 9376 c := v_1.AuxInt 9377 y := v_1.Args[0] 9378 v.reset(OpARM64XORshiftRA) 9379 v.AuxInt = c 9380 v.AddArg(x) 9381 v.AddArg(y) 9382 return true 9383 } 9384 // match: (XOR (SRAconst [c] y) x) 9385 // cond: 9386 // result: (XORshiftRA x y [c]) 9387 for { 9388 v_0 := v.Args[0] 9389 if v_0.Op != OpARM64SRAconst { 9390 break 9391 } 9392 c := v_0.AuxInt 9393 y := v_0.Args[0] 9394 x := v.Args[1] 9395 v.reset(OpARM64XORshiftRA) 9396 v.AuxInt = c 9397 v.AddArg(x) 9398 v.AddArg(y) 9399 return true 9400 } 9401 return false 9402 } 9403 func rewriteValueARM64_OpARM64XORconst_0(v *Value) bool { 9404 // match: (XORconst [0] x) 9405 // cond: 9406 // result: x 9407 for { 9408 if v.AuxInt != 0 { 9409 break 9410 } 9411 x := v.Args[0] 9412 v.reset(OpCopy) 9413 v.Type = x.Type 9414 v.AddArg(x) 9415 return true 9416 } 9417 // match: (XORconst [-1] x) 9418 // cond: 9419 // result: (MVN x) 9420 for { 9421 if v.AuxInt != -1 { 9422 break 9423 } 9424 x := v.Args[0] 9425 v.reset(OpARM64MVN) 9426 v.AddArg(x) 9427 return true 9428 } 9429 // match: (XORconst [c] (MOVDconst [d])) 9430 // cond: 9431 // result: (MOVDconst [c^d]) 9432 for { 9433 c := v.AuxInt 9434 v_0 := v.Args[0] 9435 if v_0.Op != OpARM64MOVDconst { 9436 break 9437 } 9438 d := v_0.AuxInt 9439 v.reset(OpARM64MOVDconst) 9440 v.AuxInt = c ^ d 9441 return true 9442 } 9443 // match: (XORconst [c] (XORconst [d] x)) 9444 // cond: 9445 // result: (XORconst [c^d] x) 9446 for { 9447 c := v.AuxInt 9448 v_0 := v.Args[0] 9449 if v_0.Op != OpARM64XORconst { 9450 break 9451 } 9452 d := v_0.AuxInt 9453 x := v_0.Args[0] 9454 v.reset(OpARM64XORconst) 9455 v.AuxInt = c ^ d 9456 v.AddArg(x) 9457 return true 9458 } 9459 return false 9460 } 9461 func rewriteValueARM64_OpARM64XORshiftLL_0(v *Value) bool { 9462 b := v.Block 9463 _ = b 9464 // match: (XORshiftLL (MOVDconst [c]) x [d]) 9465 // cond: 9466 // result: (XORconst [c] (SLLconst <x.Type> x [d])) 9467 for { 9468 d := v.AuxInt 9469 v_0 := v.Args[0] 9470 if v_0.Op != OpARM64MOVDconst { 9471 break 9472 } 9473 c := v_0.AuxInt 9474 x := v.Args[1] 9475 v.reset(OpARM64XORconst) 9476 v.AuxInt = c 9477 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 9478 v0.AuxInt = d 9479 v0.AddArg(x) 9480 v.AddArg(v0) 9481 return true 9482 } 9483 // match: (XORshiftLL x (MOVDconst [c]) [d]) 9484 // cond: 9485 // result: (XORconst x [int64(uint64(c)<<uint64(d))]) 9486 for { 9487 d := v.AuxInt 9488 x := v.Args[0] 9489 v_1 := v.Args[1] 9490 if v_1.Op != OpARM64MOVDconst { 9491 break 9492 } 9493 c := v_1.AuxInt 9494 v.reset(OpARM64XORconst) 9495 v.AuxInt = int64(uint64(c) << uint64(d)) 9496 v.AddArg(x) 9497 return true 9498 } 9499 // match: (XORshiftLL x (SLLconst x [c]) [d]) 9500 // cond: c==d 9501 // result: (MOVDconst [0]) 9502 for { 9503 d := v.AuxInt 9504 x := v.Args[0] 9505 v_1 := v.Args[1] 9506 if v_1.Op != OpARM64SLLconst { 9507 break 9508 } 9509 c := v_1.AuxInt 9510 if x != v_1.Args[0] { 9511 break 9512 } 9513 if !(c == d) { 9514 break 9515 } 9516 v.reset(OpARM64MOVDconst) 9517 v.AuxInt = 0 9518 return true 9519 } 9520 // match: (XORshiftLL [c] (SRLconst x [64-c]) x) 9521 // cond: 9522 // result: (RORconst [64-c] x) 9523 for { 9524 c := v.AuxInt 9525 v_0 := v.Args[0] 9526 if v_0.Op != OpARM64SRLconst { 9527 break 9528 } 9529 if v_0.AuxInt != 64-c { 9530 break 9531 } 9532 x := v_0.Args[0] 9533 if x != v.Args[1] { 9534 break 9535 } 9536 v.reset(OpARM64RORconst) 9537 v.AuxInt = 64 - c 9538 v.AddArg(x) 9539 return true 9540 } 9541 // match: (XORshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x) 9542 // cond: c < 32 && t.Size() == 4 9543 // result: (RORWconst [32-c] x) 9544 for { 9545 t := v.Type 9546 c := v.AuxInt 9547 v_0 := v.Args[0] 9548 if v_0.Op != OpARM64SRLconst { 9549 break 9550 } 9551 if v_0.AuxInt != 32-c { 9552 break 9553 } 9554 v_0_0 := v_0.Args[0] 9555 if v_0_0.Op != OpARM64MOVWUreg { 9556 break 9557 } 9558 x := v_0_0.Args[0] 9559 if x != v.Args[1] { 9560 break 9561 } 9562 if !(c < 32 && t.Size() == 4) { 9563 break 9564 } 9565 v.reset(OpARM64RORWconst) 9566 v.AuxInt = 32 - c 9567 v.AddArg(x) 9568 return true 9569 } 9570 return false 9571 } 9572 func rewriteValueARM64_OpARM64XORshiftRA_0(v *Value) bool { 9573 b := v.Block 9574 _ = b 9575 // match: (XORshiftRA (MOVDconst [c]) x [d]) 9576 // cond: 9577 // result: (XORconst [c] (SRAconst <x.Type> x [d])) 9578 for { 9579 d := v.AuxInt 9580 v_0 := v.Args[0] 9581 if v_0.Op != OpARM64MOVDconst { 9582 break 9583 } 9584 c := v_0.AuxInt 9585 x := v.Args[1] 9586 v.reset(OpARM64XORconst) 9587 v.AuxInt = c 9588 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 9589 v0.AuxInt = d 9590 v0.AddArg(x) 9591 v.AddArg(v0) 9592 return true 9593 } 9594 // match: (XORshiftRA x (MOVDconst [c]) [d]) 9595 // cond: 9596 // result: (XORconst x [int64(int64(c)>>uint64(d))]) 9597 for { 9598 d := v.AuxInt 9599 x := v.Args[0] 9600 v_1 := v.Args[1] 9601 if v_1.Op != OpARM64MOVDconst { 9602 break 9603 } 9604 c := v_1.AuxInt 9605 v.reset(OpARM64XORconst) 9606 v.AuxInt = int64(int64(c) >> uint64(d)) 9607 v.AddArg(x) 9608 return true 9609 } 9610 // match: (XORshiftRA x (SRAconst x [c]) [d]) 9611 // cond: c==d 9612 // result: (MOVDconst [0]) 9613 for { 9614 d := v.AuxInt 9615 x := v.Args[0] 9616 v_1 := v.Args[1] 9617 if v_1.Op != OpARM64SRAconst { 9618 break 9619 } 9620 c := v_1.AuxInt 9621 if x != v_1.Args[0] { 9622 break 9623 } 9624 if !(c == d) { 9625 break 9626 } 9627 v.reset(OpARM64MOVDconst) 9628 v.AuxInt = 0 9629 return true 9630 } 9631 return false 9632 } 9633 func rewriteValueARM64_OpARM64XORshiftRL_0(v *Value) bool { 9634 b := v.Block 9635 _ = b 9636 // match: (XORshiftRL (MOVDconst [c]) x [d]) 9637 // cond: 9638 // result: (XORconst [c] (SRLconst <x.Type> x [d])) 9639 for { 9640 d := v.AuxInt 9641 v_0 := v.Args[0] 9642 if v_0.Op != OpARM64MOVDconst { 9643 break 9644 } 9645 c := v_0.AuxInt 9646 x := v.Args[1] 9647 v.reset(OpARM64XORconst) 9648 v.AuxInt = c 9649 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 9650 v0.AuxInt = d 9651 v0.AddArg(x) 9652 v.AddArg(v0) 9653 return true 9654 } 9655 // match: (XORshiftRL x (MOVDconst [c]) [d]) 9656 // cond: 9657 // result: (XORconst x [int64(uint64(c)>>uint64(d))]) 9658 for { 9659 d := v.AuxInt 9660 x := v.Args[0] 9661 v_1 := v.Args[1] 9662 if v_1.Op != OpARM64MOVDconst { 9663 break 9664 } 9665 c := v_1.AuxInt 9666 v.reset(OpARM64XORconst) 9667 v.AuxInt = int64(uint64(c) >> uint64(d)) 9668 v.AddArg(x) 9669 return true 9670 } 9671 // match: (XORshiftRL x (SRLconst x [c]) [d]) 9672 // cond: c==d 9673 // result: (MOVDconst [0]) 9674 for { 9675 d := v.AuxInt 9676 x := v.Args[0] 9677 v_1 := v.Args[1] 9678 if v_1.Op != OpARM64SRLconst { 9679 break 9680 } 9681 c := v_1.AuxInt 9682 if x != v_1.Args[0] { 9683 break 9684 } 9685 if !(c == d) { 9686 break 9687 } 9688 v.reset(OpARM64MOVDconst) 9689 v.AuxInt = 0 9690 return true 9691 } 9692 // match: (XORshiftRL [c] (SLLconst x [64-c]) x) 9693 // cond: 9694 // result: (RORconst [ c] x) 9695 for { 9696 c := v.AuxInt 9697 v_0 := v.Args[0] 9698 if v_0.Op != OpARM64SLLconst { 9699 break 9700 } 9701 if v_0.AuxInt != 64-c { 9702 break 9703 } 9704 x := v_0.Args[0] 9705 if x != v.Args[1] { 9706 break 9707 } 9708 v.reset(OpARM64RORconst) 9709 v.AuxInt = c 9710 v.AddArg(x) 9711 return true 9712 } 9713 // match: (XORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 9714 // cond: c < 32 && t.Size() == 4 9715 // result: (RORWconst [ c] x) 9716 for { 9717 t := v.Type 9718 c := v.AuxInt 9719 v_0 := v.Args[0] 9720 if v_0.Op != OpARM64SLLconst { 9721 break 9722 } 9723 if v_0.AuxInt != 32-c { 9724 break 9725 } 9726 x := v_0.Args[0] 9727 v_1 := v.Args[1] 9728 if v_1.Op != OpARM64MOVWUreg { 9729 break 9730 } 9731 if x != v_1.Args[0] { 9732 break 9733 } 9734 if !(c < 32 && t.Size() == 4) { 9735 break 9736 } 9737 v.reset(OpARM64RORWconst) 9738 v.AuxInt = c 9739 v.AddArg(x) 9740 return true 9741 } 9742 return false 9743 } 9744 func rewriteValueARM64_OpAdd16_0(v *Value) bool { 9745 // match: (Add16 x y) 9746 // cond: 9747 // result: (ADD x y) 9748 for { 9749 x := v.Args[0] 9750 y := v.Args[1] 9751 v.reset(OpARM64ADD) 9752 v.AddArg(x) 9753 v.AddArg(y) 9754 return true 9755 } 9756 } 9757 func rewriteValueARM64_OpAdd32_0(v *Value) bool { 9758 // match: (Add32 x y) 9759 // cond: 9760 // result: (ADD x y) 9761 for { 9762 x := v.Args[0] 9763 y := v.Args[1] 9764 v.reset(OpARM64ADD) 9765 v.AddArg(x) 9766 v.AddArg(y) 9767 return true 9768 } 9769 } 9770 func rewriteValueARM64_OpAdd32F_0(v *Value) bool { 9771 // match: (Add32F x y) 9772 // cond: 9773 // result: (FADDS x y) 9774 for { 9775 x := v.Args[0] 9776 y := v.Args[1] 9777 v.reset(OpARM64FADDS) 9778 v.AddArg(x) 9779 v.AddArg(y) 9780 return true 9781 } 9782 } 9783 func rewriteValueARM64_OpAdd64_0(v *Value) bool { 9784 // match: (Add64 x y) 9785 // cond: 9786 // result: (ADD x y) 9787 for { 9788 x := v.Args[0] 9789 y := v.Args[1] 9790 v.reset(OpARM64ADD) 9791 v.AddArg(x) 9792 v.AddArg(y) 9793 return true 9794 } 9795 } 9796 func rewriteValueARM64_OpAdd64F_0(v *Value) bool { 9797 // match: (Add64F x y) 9798 // cond: 9799 // result: (FADDD x y) 9800 for { 9801 x := v.Args[0] 9802 y := v.Args[1] 9803 v.reset(OpARM64FADDD) 9804 v.AddArg(x) 9805 v.AddArg(y) 9806 return true 9807 } 9808 } 9809 func rewriteValueARM64_OpAdd8_0(v *Value) bool { 9810 // match: (Add8 x y) 9811 // cond: 9812 // result: (ADD x y) 9813 for { 9814 x := v.Args[0] 9815 y := v.Args[1] 9816 v.reset(OpARM64ADD) 9817 v.AddArg(x) 9818 v.AddArg(y) 9819 return true 9820 } 9821 } 9822 func rewriteValueARM64_OpAddPtr_0(v *Value) bool { 9823 // match: (AddPtr x y) 9824 // cond: 9825 // result: (ADD x y) 9826 for { 9827 x := v.Args[0] 9828 y := v.Args[1] 9829 v.reset(OpARM64ADD) 9830 v.AddArg(x) 9831 v.AddArg(y) 9832 return true 9833 } 9834 } 9835 func rewriteValueARM64_OpAddr_0(v *Value) bool { 9836 // match: (Addr {sym} base) 9837 // cond: 9838 // result: (MOVDaddr {sym} base) 9839 for { 9840 sym := v.Aux 9841 base := v.Args[0] 9842 v.reset(OpARM64MOVDaddr) 9843 v.Aux = sym 9844 v.AddArg(base) 9845 return true 9846 } 9847 } 9848 func rewriteValueARM64_OpAnd16_0(v *Value) bool { 9849 // match: (And16 x y) 9850 // cond: 9851 // result: (AND x y) 9852 for { 9853 x := v.Args[0] 9854 y := v.Args[1] 9855 v.reset(OpARM64AND) 9856 v.AddArg(x) 9857 v.AddArg(y) 9858 return true 9859 } 9860 } 9861 func rewriteValueARM64_OpAnd32_0(v *Value) bool { 9862 // match: (And32 x y) 9863 // cond: 9864 // result: (AND x y) 9865 for { 9866 x := v.Args[0] 9867 y := v.Args[1] 9868 v.reset(OpARM64AND) 9869 v.AddArg(x) 9870 v.AddArg(y) 9871 return true 9872 } 9873 } 9874 func rewriteValueARM64_OpAnd64_0(v *Value) bool { 9875 // match: (And64 x y) 9876 // cond: 9877 // result: (AND x y) 9878 for { 9879 x := v.Args[0] 9880 y := v.Args[1] 9881 v.reset(OpARM64AND) 9882 v.AddArg(x) 9883 v.AddArg(y) 9884 return true 9885 } 9886 } 9887 func rewriteValueARM64_OpAnd8_0(v *Value) bool { 9888 // match: (And8 x y) 9889 // cond: 9890 // result: (AND x y) 9891 for { 9892 x := v.Args[0] 9893 y := v.Args[1] 9894 v.reset(OpARM64AND) 9895 v.AddArg(x) 9896 v.AddArg(y) 9897 return true 9898 } 9899 } 9900 func rewriteValueARM64_OpAndB_0(v *Value) bool { 9901 // match: (AndB x y) 9902 // cond: 9903 // result: (AND x y) 9904 for { 9905 x := v.Args[0] 9906 y := v.Args[1] 9907 v.reset(OpARM64AND) 9908 v.AddArg(x) 9909 v.AddArg(y) 9910 return true 9911 } 9912 } 9913 func rewriteValueARM64_OpAtomicAdd32_0(v *Value) bool { 9914 // match: (AtomicAdd32 ptr val mem) 9915 // cond: 9916 // result: (LoweredAtomicAdd32 ptr val mem) 9917 for { 9918 ptr := v.Args[0] 9919 val := v.Args[1] 9920 mem := v.Args[2] 9921 v.reset(OpARM64LoweredAtomicAdd32) 9922 v.AddArg(ptr) 9923 v.AddArg(val) 9924 v.AddArg(mem) 9925 return true 9926 } 9927 } 9928 func rewriteValueARM64_OpAtomicAdd64_0(v *Value) bool { 9929 // match: (AtomicAdd64 ptr val mem) 9930 // cond: 9931 // result: (LoweredAtomicAdd64 ptr val mem) 9932 for { 9933 ptr := v.Args[0] 9934 val := v.Args[1] 9935 mem := v.Args[2] 9936 v.reset(OpARM64LoweredAtomicAdd64) 9937 v.AddArg(ptr) 9938 v.AddArg(val) 9939 v.AddArg(mem) 9940 return true 9941 } 9942 } 9943 func rewriteValueARM64_OpAtomicAnd8_0(v *Value) bool { 9944 // match: (AtomicAnd8 ptr val mem) 9945 // cond: 9946 // result: (LoweredAtomicAnd8 ptr val mem) 9947 for { 9948 ptr := v.Args[0] 9949 val := v.Args[1] 9950 mem := v.Args[2] 9951 v.reset(OpARM64LoweredAtomicAnd8) 9952 v.AddArg(ptr) 9953 v.AddArg(val) 9954 v.AddArg(mem) 9955 return true 9956 } 9957 } 9958 func rewriteValueARM64_OpAtomicCompareAndSwap32_0(v *Value) bool { 9959 // match: (AtomicCompareAndSwap32 ptr old new_ mem) 9960 // cond: 9961 // result: (LoweredAtomicCas32 ptr old new_ mem) 9962 for { 9963 ptr := v.Args[0] 9964 old := v.Args[1] 9965 new_ := v.Args[2] 9966 mem := v.Args[3] 9967 v.reset(OpARM64LoweredAtomicCas32) 9968 v.AddArg(ptr) 9969 v.AddArg(old) 9970 v.AddArg(new_) 9971 v.AddArg(mem) 9972 return true 9973 } 9974 } 9975 func rewriteValueARM64_OpAtomicCompareAndSwap64_0(v *Value) bool { 9976 // match: (AtomicCompareAndSwap64 ptr old new_ mem) 9977 // cond: 9978 // result: (LoweredAtomicCas64 ptr old new_ mem) 9979 for { 9980 ptr := v.Args[0] 9981 old := v.Args[1] 9982 new_ := v.Args[2] 9983 mem := v.Args[3] 9984 v.reset(OpARM64LoweredAtomicCas64) 9985 v.AddArg(ptr) 9986 v.AddArg(old) 9987 v.AddArg(new_) 9988 v.AddArg(mem) 9989 return true 9990 } 9991 } 9992 func rewriteValueARM64_OpAtomicExchange32_0(v *Value) bool { 9993 // match: (AtomicExchange32 ptr val mem) 9994 // cond: 9995 // result: (LoweredAtomicExchange32 ptr val mem) 9996 for { 9997 ptr := v.Args[0] 9998 val := v.Args[1] 9999 mem := v.Args[2] 10000 v.reset(OpARM64LoweredAtomicExchange32) 10001 v.AddArg(ptr) 10002 v.AddArg(val) 10003 v.AddArg(mem) 10004 return true 10005 } 10006 } 10007 func rewriteValueARM64_OpAtomicExchange64_0(v *Value) bool { 10008 // match: (AtomicExchange64 ptr val mem) 10009 // cond: 10010 // result: (LoweredAtomicExchange64 ptr val mem) 10011 for { 10012 ptr := v.Args[0] 10013 val := v.Args[1] 10014 mem := v.Args[2] 10015 v.reset(OpARM64LoweredAtomicExchange64) 10016 v.AddArg(ptr) 10017 v.AddArg(val) 10018 v.AddArg(mem) 10019 return true 10020 } 10021 } 10022 func rewriteValueARM64_OpAtomicLoad32_0(v *Value) bool { 10023 // match: (AtomicLoad32 ptr mem) 10024 // cond: 10025 // result: (LDARW ptr mem) 10026 for { 10027 ptr := v.Args[0] 10028 mem := v.Args[1] 10029 v.reset(OpARM64LDARW) 10030 v.AddArg(ptr) 10031 v.AddArg(mem) 10032 return true 10033 } 10034 } 10035 func rewriteValueARM64_OpAtomicLoad64_0(v *Value) bool { 10036 // match: (AtomicLoad64 ptr mem) 10037 // cond: 10038 // result: (LDAR ptr mem) 10039 for { 10040 ptr := v.Args[0] 10041 mem := v.Args[1] 10042 v.reset(OpARM64LDAR) 10043 v.AddArg(ptr) 10044 v.AddArg(mem) 10045 return true 10046 } 10047 } 10048 func rewriteValueARM64_OpAtomicLoadPtr_0(v *Value) bool { 10049 // match: (AtomicLoadPtr ptr mem) 10050 // cond: 10051 // result: (LDAR ptr mem) 10052 for { 10053 ptr := v.Args[0] 10054 mem := v.Args[1] 10055 v.reset(OpARM64LDAR) 10056 v.AddArg(ptr) 10057 v.AddArg(mem) 10058 return true 10059 } 10060 } 10061 func rewriteValueARM64_OpAtomicOr8_0(v *Value) bool { 10062 // match: (AtomicOr8 ptr val mem) 10063 // cond: 10064 // result: (LoweredAtomicOr8 ptr val mem) 10065 for { 10066 ptr := v.Args[0] 10067 val := v.Args[1] 10068 mem := v.Args[2] 10069 v.reset(OpARM64LoweredAtomicOr8) 10070 v.AddArg(ptr) 10071 v.AddArg(val) 10072 v.AddArg(mem) 10073 return true 10074 } 10075 } 10076 func rewriteValueARM64_OpAtomicStore32_0(v *Value) bool { 10077 // match: (AtomicStore32 ptr val mem) 10078 // cond: 10079 // result: (STLRW ptr val mem) 10080 for { 10081 ptr := v.Args[0] 10082 val := v.Args[1] 10083 mem := v.Args[2] 10084 v.reset(OpARM64STLRW) 10085 v.AddArg(ptr) 10086 v.AddArg(val) 10087 v.AddArg(mem) 10088 return true 10089 } 10090 } 10091 func rewriteValueARM64_OpAtomicStore64_0(v *Value) bool { 10092 // match: (AtomicStore64 ptr val mem) 10093 // cond: 10094 // result: (STLR ptr val mem) 10095 for { 10096 ptr := v.Args[0] 10097 val := v.Args[1] 10098 mem := v.Args[2] 10099 v.reset(OpARM64STLR) 10100 v.AddArg(ptr) 10101 v.AddArg(val) 10102 v.AddArg(mem) 10103 return true 10104 } 10105 } 10106 func rewriteValueARM64_OpAtomicStorePtrNoWB_0(v *Value) bool { 10107 // match: (AtomicStorePtrNoWB ptr val mem) 10108 // cond: 10109 // result: (STLR ptr val mem) 10110 for { 10111 ptr := v.Args[0] 10112 val := v.Args[1] 10113 mem := v.Args[2] 10114 v.reset(OpARM64STLR) 10115 v.AddArg(ptr) 10116 v.AddArg(val) 10117 v.AddArg(mem) 10118 return true 10119 } 10120 } 10121 func rewriteValueARM64_OpAvg64u_0(v *Value) bool { 10122 b := v.Block 10123 _ = b 10124 // match: (Avg64u <t> x y) 10125 // cond: 10126 // result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y) 10127 for { 10128 t := v.Type 10129 x := v.Args[0] 10130 y := v.Args[1] 10131 v.reset(OpARM64ADD) 10132 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, t) 10133 v0.AuxInt = 1 10134 v1 := b.NewValue0(v.Pos, OpARM64SUB, t) 10135 v1.AddArg(x) 10136 v1.AddArg(y) 10137 v0.AddArg(v1) 10138 v.AddArg(v0) 10139 v.AddArg(y) 10140 return true 10141 } 10142 } 10143 func rewriteValueARM64_OpBitLen64_0(v *Value) bool { 10144 b := v.Block 10145 _ = b 10146 types := &b.Func.Config.Types 10147 _ = types 10148 // match: (BitLen64 x) 10149 // cond: 10150 // result: (SUB (MOVDconst [64]) (CLZ <types.Int> x)) 10151 for { 10152 x := v.Args[0] 10153 v.reset(OpARM64SUB) 10154 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 10155 v0.AuxInt = 64 10156 v.AddArg(v0) 10157 v1 := b.NewValue0(v.Pos, OpARM64CLZ, types.Int) 10158 v1.AddArg(x) 10159 v.AddArg(v1) 10160 return true 10161 } 10162 } 10163 func rewriteValueARM64_OpBitRev16_0(v *Value) bool { 10164 b := v.Block 10165 _ = b 10166 types := &b.Func.Config.Types 10167 _ = types 10168 // match: (BitRev16 x) 10169 // cond: 10170 // result: (SRLconst [48] (RBIT <types.UInt64> x)) 10171 for { 10172 x := v.Args[0] 10173 v.reset(OpARM64SRLconst) 10174 v.AuxInt = 48 10175 v0 := b.NewValue0(v.Pos, OpARM64RBIT, types.UInt64) 10176 v0.AddArg(x) 10177 v.AddArg(v0) 10178 return true 10179 } 10180 } 10181 func rewriteValueARM64_OpBitRev32_0(v *Value) bool { 10182 // match: (BitRev32 x) 10183 // cond: 10184 // result: (RBITW x) 10185 for { 10186 x := v.Args[0] 10187 v.reset(OpARM64RBITW) 10188 v.AddArg(x) 10189 return true 10190 } 10191 } 10192 func rewriteValueARM64_OpBitRev64_0(v *Value) bool { 10193 // match: (BitRev64 x) 10194 // cond: 10195 // result: (RBIT x) 10196 for { 10197 x := v.Args[0] 10198 v.reset(OpARM64RBIT) 10199 v.AddArg(x) 10200 return true 10201 } 10202 } 10203 func rewriteValueARM64_OpBitRev8_0(v *Value) bool { 10204 b := v.Block 10205 _ = b 10206 types := &b.Func.Config.Types 10207 _ = types 10208 // match: (BitRev8 x) 10209 // cond: 10210 // result: (SRLconst [56] (RBIT <types.UInt64> x)) 10211 for { 10212 x := v.Args[0] 10213 v.reset(OpARM64SRLconst) 10214 v.AuxInt = 56 10215 v0 := b.NewValue0(v.Pos, OpARM64RBIT, types.UInt64) 10216 v0.AddArg(x) 10217 v.AddArg(v0) 10218 return true 10219 } 10220 } 10221 func rewriteValueARM64_OpBswap32_0(v *Value) bool { 10222 // match: (Bswap32 x) 10223 // cond: 10224 // result: (REVW x) 10225 for { 10226 x := v.Args[0] 10227 v.reset(OpARM64REVW) 10228 v.AddArg(x) 10229 return true 10230 } 10231 } 10232 func rewriteValueARM64_OpBswap64_0(v *Value) bool { 10233 // match: (Bswap64 x) 10234 // cond: 10235 // result: (REV x) 10236 for { 10237 x := v.Args[0] 10238 v.reset(OpARM64REV) 10239 v.AddArg(x) 10240 return true 10241 } 10242 } 10243 func rewriteValueARM64_OpClosureCall_0(v *Value) bool { 10244 // match: (ClosureCall [argwid] entry closure mem) 10245 // cond: 10246 // result: (CALLclosure [argwid] entry closure mem) 10247 for { 10248 argwid := v.AuxInt 10249 entry := v.Args[0] 10250 closure := v.Args[1] 10251 mem := v.Args[2] 10252 v.reset(OpARM64CALLclosure) 10253 v.AuxInt = argwid 10254 v.AddArg(entry) 10255 v.AddArg(closure) 10256 v.AddArg(mem) 10257 return true 10258 } 10259 } 10260 func rewriteValueARM64_OpCom16_0(v *Value) bool { 10261 // match: (Com16 x) 10262 // cond: 10263 // result: (MVN x) 10264 for { 10265 x := v.Args[0] 10266 v.reset(OpARM64MVN) 10267 v.AddArg(x) 10268 return true 10269 } 10270 } 10271 func rewriteValueARM64_OpCom32_0(v *Value) bool { 10272 // match: (Com32 x) 10273 // cond: 10274 // result: (MVN x) 10275 for { 10276 x := v.Args[0] 10277 v.reset(OpARM64MVN) 10278 v.AddArg(x) 10279 return true 10280 } 10281 } 10282 func rewriteValueARM64_OpCom64_0(v *Value) bool { 10283 // match: (Com64 x) 10284 // cond: 10285 // result: (MVN x) 10286 for { 10287 x := v.Args[0] 10288 v.reset(OpARM64MVN) 10289 v.AddArg(x) 10290 return true 10291 } 10292 } 10293 func rewriteValueARM64_OpCom8_0(v *Value) bool { 10294 // match: (Com8 x) 10295 // cond: 10296 // result: (MVN x) 10297 for { 10298 x := v.Args[0] 10299 v.reset(OpARM64MVN) 10300 v.AddArg(x) 10301 return true 10302 } 10303 } 10304 func rewriteValueARM64_OpConst16_0(v *Value) bool { 10305 // match: (Const16 [val]) 10306 // cond: 10307 // result: (MOVDconst [val]) 10308 for { 10309 val := v.AuxInt 10310 v.reset(OpARM64MOVDconst) 10311 v.AuxInt = val 10312 return true 10313 } 10314 } 10315 func rewriteValueARM64_OpConst32_0(v *Value) bool { 10316 // match: (Const32 [val]) 10317 // cond: 10318 // result: (MOVDconst [val]) 10319 for { 10320 val := v.AuxInt 10321 v.reset(OpARM64MOVDconst) 10322 v.AuxInt = val 10323 return true 10324 } 10325 } 10326 func rewriteValueARM64_OpConst32F_0(v *Value) bool { 10327 // match: (Const32F [val]) 10328 // cond: 10329 // result: (FMOVSconst [val]) 10330 for { 10331 val := v.AuxInt 10332 v.reset(OpARM64FMOVSconst) 10333 v.AuxInt = val 10334 return true 10335 } 10336 } 10337 func rewriteValueARM64_OpConst64_0(v *Value) bool { 10338 // match: (Const64 [val]) 10339 // cond: 10340 // result: (MOVDconst [val]) 10341 for { 10342 val := v.AuxInt 10343 v.reset(OpARM64MOVDconst) 10344 v.AuxInt = val 10345 return true 10346 } 10347 } 10348 func rewriteValueARM64_OpConst64F_0(v *Value) bool { 10349 // match: (Const64F [val]) 10350 // cond: 10351 // result: (FMOVDconst [val]) 10352 for { 10353 val := v.AuxInt 10354 v.reset(OpARM64FMOVDconst) 10355 v.AuxInt = val 10356 return true 10357 } 10358 } 10359 func rewriteValueARM64_OpConst8_0(v *Value) bool { 10360 // match: (Const8 [val]) 10361 // cond: 10362 // result: (MOVDconst [val]) 10363 for { 10364 val := v.AuxInt 10365 v.reset(OpARM64MOVDconst) 10366 v.AuxInt = val 10367 return true 10368 } 10369 } 10370 func rewriteValueARM64_OpConstBool_0(v *Value) bool { 10371 // match: (ConstBool [b]) 10372 // cond: 10373 // result: (MOVDconst [b]) 10374 for { 10375 b := v.AuxInt 10376 v.reset(OpARM64MOVDconst) 10377 v.AuxInt = b 10378 return true 10379 } 10380 } 10381 func rewriteValueARM64_OpConstNil_0(v *Value) bool { 10382 // match: (ConstNil) 10383 // cond: 10384 // result: (MOVDconst [0]) 10385 for { 10386 v.reset(OpARM64MOVDconst) 10387 v.AuxInt = 0 10388 return true 10389 } 10390 } 10391 func rewriteValueARM64_OpConvert_0(v *Value) bool { 10392 // match: (Convert x mem) 10393 // cond: 10394 // result: (MOVDconvert x mem) 10395 for { 10396 x := v.Args[0] 10397 mem := v.Args[1] 10398 v.reset(OpARM64MOVDconvert) 10399 v.AddArg(x) 10400 v.AddArg(mem) 10401 return true 10402 } 10403 } 10404 func rewriteValueARM64_OpCtz32_0(v *Value) bool { 10405 b := v.Block 10406 _ = b 10407 // match: (Ctz32 <t> x) 10408 // cond: 10409 // result: (CLZW (RBITW <t> x)) 10410 for { 10411 t := v.Type 10412 x := v.Args[0] 10413 v.reset(OpARM64CLZW) 10414 v0 := b.NewValue0(v.Pos, OpARM64RBITW, t) 10415 v0.AddArg(x) 10416 v.AddArg(v0) 10417 return true 10418 } 10419 } 10420 func rewriteValueARM64_OpCtz64_0(v *Value) bool { 10421 b := v.Block 10422 _ = b 10423 // match: (Ctz64 <t> x) 10424 // cond: 10425 // result: (CLZ (RBIT <t> x)) 10426 for { 10427 t := v.Type 10428 x := v.Args[0] 10429 v.reset(OpARM64CLZ) 10430 v0 := b.NewValue0(v.Pos, OpARM64RBIT, t) 10431 v0.AddArg(x) 10432 v.AddArg(v0) 10433 return true 10434 } 10435 } 10436 func rewriteValueARM64_OpCvt32Fto32_0(v *Value) bool { 10437 // match: (Cvt32Fto32 x) 10438 // cond: 10439 // result: (FCVTZSSW x) 10440 for { 10441 x := v.Args[0] 10442 v.reset(OpARM64FCVTZSSW) 10443 v.AddArg(x) 10444 return true 10445 } 10446 } 10447 func rewriteValueARM64_OpCvt32Fto32U_0(v *Value) bool { 10448 // match: (Cvt32Fto32U x) 10449 // cond: 10450 // result: (FCVTZUSW x) 10451 for { 10452 x := v.Args[0] 10453 v.reset(OpARM64FCVTZUSW) 10454 v.AddArg(x) 10455 return true 10456 } 10457 } 10458 func rewriteValueARM64_OpCvt32Fto64_0(v *Value) bool { 10459 // match: (Cvt32Fto64 x) 10460 // cond: 10461 // result: (FCVTZSS x) 10462 for { 10463 x := v.Args[0] 10464 v.reset(OpARM64FCVTZSS) 10465 v.AddArg(x) 10466 return true 10467 } 10468 } 10469 func rewriteValueARM64_OpCvt32Fto64F_0(v *Value) bool { 10470 // match: (Cvt32Fto64F x) 10471 // cond: 10472 // result: (FCVTSD x) 10473 for { 10474 x := v.Args[0] 10475 v.reset(OpARM64FCVTSD) 10476 v.AddArg(x) 10477 return true 10478 } 10479 } 10480 func rewriteValueARM64_OpCvt32Fto64U_0(v *Value) bool { 10481 // match: (Cvt32Fto64U x) 10482 // cond: 10483 // result: (FCVTZUS x) 10484 for { 10485 x := v.Args[0] 10486 v.reset(OpARM64FCVTZUS) 10487 v.AddArg(x) 10488 return true 10489 } 10490 } 10491 func rewriteValueARM64_OpCvt32Uto32F_0(v *Value) bool { 10492 // match: (Cvt32Uto32F x) 10493 // cond: 10494 // result: (UCVTFWS x) 10495 for { 10496 x := v.Args[0] 10497 v.reset(OpARM64UCVTFWS) 10498 v.AddArg(x) 10499 return true 10500 } 10501 } 10502 func rewriteValueARM64_OpCvt32Uto64F_0(v *Value) bool { 10503 // match: (Cvt32Uto64F x) 10504 // cond: 10505 // result: (UCVTFWD x) 10506 for { 10507 x := v.Args[0] 10508 v.reset(OpARM64UCVTFWD) 10509 v.AddArg(x) 10510 return true 10511 } 10512 } 10513 func rewriteValueARM64_OpCvt32to32F_0(v *Value) bool { 10514 // match: (Cvt32to32F x) 10515 // cond: 10516 // result: (SCVTFWS x) 10517 for { 10518 x := v.Args[0] 10519 v.reset(OpARM64SCVTFWS) 10520 v.AddArg(x) 10521 return true 10522 } 10523 } 10524 func rewriteValueARM64_OpCvt32to64F_0(v *Value) bool { 10525 // match: (Cvt32to64F x) 10526 // cond: 10527 // result: (SCVTFWD x) 10528 for { 10529 x := v.Args[0] 10530 v.reset(OpARM64SCVTFWD) 10531 v.AddArg(x) 10532 return true 10533 } 10534 } 10535 func rewriteValueARM64_OpCvt64Fto32_0(v *Value) bool { 10536 // match: (Cvt64Fto32 x) 10537 // cond: 10538 // result: (FCVTZSDW x) 10539 for { 10540 x := v.Args[0] 10541 v.reset(OpARM64FCVTZSDW) 10542 v.AddArg(x) 10543 return true 10544 } 10545 } 10546 func rewriteValueARM64_OpCvt64Fto32F_0(v *Value) bool { 10547 // match: (Cvt64Fto32F x) 10548 // cond: 10549 // result: (FCVTDS x) 10550 for { 10551 x := v.Args[0] 10552 v.reset(OpARM64FCVTDS) 10553 v.AddArg(x) 10554 return true 10555 } 10556 } 10557 func rewriteValueARM64_OpCvt64Fto32U_0(v *Value) bool { 10558 // match: (Cvt64Fto32U x) 10559 // cond: 10560 // result: (FCVTZUDW x) 10561 for { 10562 x := v.Args[0] 10563 v.reset(OpARM64FCVTZUDW) 10564 v.AddArg(x) 10565 return true 10566 } 10567 } 10568 func rewriteValueARM64_OpCvt64Fto64_0(v *Value) bool { 10569 // match: (Cvt64Fto64 x) 10570 // cond: 10571 // result: (FCVTZSD x) 10572 for { 10573 x := v.Args[0] 10574 v.reset(OpARM64FCVTZSD) 10575 v.AddArg(x) 10576 return true 10577 } 10578 } 10579 func rewriteValueARM64_OpCvt64Fto64U_0(v *Value) bool { 10580 // match: (Cvt64Fto64U x) 10581 // cond: 10582 // result: (FCVTZUD x) 10583 for { 10584 x := v.Args[0] 10585 v.reset(OpARM64FCVTZUD) 10586 v.AddArg(x) 10587 return true 10588 } 10589 } 10590 func rewriteValueARM64_OpCvt64Uto32F_0(v *Value) bool { 10591 // match: (Cvt64Uto32F x) 10592 // cond: 10593 // result: (UCVTFS x) 10594 for { 10595 x := v.Args[0] 10596 v.reset(OpARM64UCVTFS) 10597 v.AddArg(x) 10598 return true 10599 } 10600 } 10601 func rewriteValueARM64_OpCvt64Uto64F_0(v *Value) bool { 10602 // match: (Cvt64Uto64F x) 10603 // cond: 10604 // result: (UCVTFD x) 10605 for { 10606 x := v.Args[0] 10607 v.reset(OpARM64UCVTFD) 10608 v.AddArg(x) 10609 return true 10610 } 10611 } 10612 func rewriteValueARM64_OpCvt64to32F_0(v *Value) bool { 10613 // match: (Cvt64to32F x) 10614 // cond: 10615 // result: (SCVTFS x) 10616 for { 10617 x := v.Args[0] 10618 v.reset(OpARM64SCVTFS) 10619 v.AddArg(x) 10620 return true 10621 } 10622 } 10623 func rewriteValueARM64_OpCvt64to64F_0(v *Value) bool { 10624 // match: (Cvt64to64F x) 10625 // cond: 10626 // result: (SCVTFD x) 10627 for { 10628 x := v.Args[0] 10629 v.reset(OpARM64SCVTFD) 10630 v.AddArg(x) 10631 return true 10632 } 10633 } 10634 func rewriteValueARM64_OpDiv16_0(v *Value) bool { 10635 b := v.Block 10636 _ = b 10637 types := &b.Func.Config.Types 10638 _ = types 10639 // match: (Div16 x y) 10640 // cond: 10641 // result: (DIVW (SignExt16to32 x) (SignExt16to32 y)) 10642 for { 10643 x := v.Args[0] 10644 y := v.Args[1] 10645 v.reset(OpARM64DIVW) 10646 v0 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32) 10647 v0.AddArg(x) 10648 v.AddArg(v0) 10649 v1 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32) 10650 v1.AddArg(y) 10651 v.AddArg(v1) 10652 return true 10653 } 10654 } 10655 func rewriteValueARM64_OpDiv16u_0(v *Value) bool { 10656 b := v.Block 10657 _ = b 10658 types := &b.Func.Config.Types 10659 _ = types 10660 // match: (Div16u x y) 10661 // cond: 10662 // result: (UDIVW (ZeroExt16to32 x) (ZeroExt16to32 y)) 10663 for { 10664 x := v.Args[0] 10665 y := v.Args[1] 10666 v.reset(OpARM64UDIVW) 10667 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32) 10668 v0.AddArg(x) 10669 v.AddArg(v0) 10670 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32) 10671 v1.AddArg(y) 10672 v.AddArg(v1) 10673 return true 10674 } 10675 } 10676 func rewriteValueARM64_OpDiv32_0(v *Value) bool { 10677 // match: (Div32 x y) 10678 // cond: 10679 // result: (DIVW x y) 10680 for { 10681 x := v.Args[0] 10682 y := v.Args[1] 10683 v.reset(OpARM64DIVW) 10684 v.AddArg(x) 10685 v.AddArg(y) 10686 return true 10687 } 10688 } 10689 func rewriteValueARM64_OpDiv32F_0(v *Value) bool { 10690 // match: (Div32F x y) 10691 // cond: 10692 // result: (FDIVS x y) 10693 for { 10694 x := v.Args[0] 10695 y := v.Args[1] 10696 v.reset(OpARM64FDIVS) 10697 v.AddArg(x) 10698 v.AddArg(y) 10699 return true 10700 } 10701 } 10702 func rewriteValueARM64_OpDiv32u_0(v *Value) bool { 10703 // match: (Div32u x y) 10704 // cond: 10705 // result: (UDIVW x y) 10706 for { 10707 x := v.Args[0] 10708 y := v.Args[1] 10709 v.reset(OpARM64UDIVW) 10710 v.AddArg(x) 10711 v.AddArg(y) 10712 return true 10713 } 10714 } 10715 func rewriteValueARM64_OpDiv64_0(v *Value) bool { 10716 // match: (Div64 x y) 10717 // cond: 10718 // result: (DIV x y) 10719 for { 10720 x := v.Args[0] 10721 y := v.Args[1] 10722 v.reset(OpARM64DIV) 10723 v.AddArg(x) 10724 v.AddArg(y) 10725 return true 10726 } 10727 } 10728 func rewriteValueARM64_OpDiv64F_0(v *Value) bool { 10729 // match: (Div64F x y) 10730 // cond: 10731 // result: (FDIVD x y) 10732 for { 10733 x := v.Args[0] 10734 y := v.Args[1] 10735 v.reset(OpARM64FDIVD) 10736 v.AddArg(x) 10737 v.AddArg(y) 10738 return true 10739 } 10740 } 10741 func rewriteValueARM64_OpDiv64u_0(v *Value) bool { 10742 // match: (Div64u x y) 10743 // cond: 10744 // result: (UDIV x y) 10745 for { 10746 x := v.Args[0] 10747 y := v.Args[1] 10748 v.reset(OpARM64UDIV) 10749 v.AddArg(x) 10750 v.AddArg(y) 10751 return true 10752 } 10753 } 10754 func rewriteValueARM64_OpDiv8_0(v *Value) bool { 10755 b := v.Block 10756 _ = b 10757 types := &b.Func.Config.Types 10758 _ = types 10759 // match: (Div8 x y) 10760 // cond: 10761 // result: (DIVW (SignExt8to32 x) (SignExt8to32 y)) 10762 for { 10763 x := v.Args[0] 10764 y := v.Args[1] 10765 v.reset(OpARM64DIVW) 10766 v0 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32) 10767 v0.AddArg(x) 10768 v.AddArg(v0) 10769 v1 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32) 10770 v1.AddArg(y) 10771 v.AddArg(v1) 10772 return true 10773 } 10774 } 10775 func rewriteValueARM64_OpDiv8u_0(v *Value) bool { 10776 b := v.Block 10777 _ = b 10778 types := &b.Func.Config.Types 10779 _ = types 10780 // match: (Div8u x y) 10781 // cond: 10782 // result: (UDIVW (ZeroExt8to32 x) (ZeroExt8to32 y)) 10783 for { 10784 x := v.Args[0] 10785 y := v.Args[1] 10786 v.reset(OpARM64UDIVW) 10787 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32) 10788 v0.AddArg(x) 10789 v.AddArg(v0) 10790 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32) 10791 v1.AddArg(y) 10792 v.AddArg(v1) 10793 return true 10794 } 10795 } 10796 func rewriteValueARM64_OpEq16_0(v *Value) bool { 10797 b := v.Block 10798 _ = b 10799 types := &b.Func.Config.Types 10800 _ = types 10801 // match: (Eq16 x y) 10802 // cond: 10803 // result: (Equal (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 10804 for { 10805 x := v.Args[0] 10806 y := v.Args[1] 10807 v.reset(OpARM64Equal) 10808 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10809 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32) 10810 v1.AddArg(x) 10811 v0.AddArg(v1) 10812 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32) 10813 v2.AddArg(y) 10814 v0.AddArg(v2) 10815 v.AddArg(v0) 10816 return true 10817 } 10818 } 10819 func rewriteValueARM64_OpEq32_0(v *Value) bool { 10820 b := v.Block 10821 _ = b 10822 // match: (Eq32 x y) 10823 // cond: 10824 // result: (Equal (CMPW x y)) 10825 for { 10826 x := v.Args[0] 10827 y := v.Args[1] 10828 v.reset(OpARM64Equal) 10829 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10830 v0.AddArg(x) 10831 v0.AddArg(y) 10832 v.AddArg(v0) 10833 return true 10834 } 10835 } 10836 func rewriteValueARM64_OpEq32F_0(v *Value) bool { 10837 b := v.Block 10838 _ = b 10839 // match: (Eq32F x y) 10840 // cond: 10841 // result: (Equal (FCMPS x y)) 10842 for { 10843 x := v.Args[0] 10844 y := v.Args[1] 10845 v.reset(OpARM64Equal) 10846 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, TypeFlags) 10847 v0.AddArg(x) 10848 v0.AddArg(y) 10849 v.AddArg(v0) 10850 return true 10851 } 10852 } 10853 func rewriteValueARM64_OpEq64_0(v *Value) bool { 10854 b := v.Block 10855 _ = b 10856 // match: (Eq64 x y) 10857 // cond: 10858 // result: (Equal (CMP x y)) 10859 for { 10860 x := v.Args[0] 10861 y := v.Args[1] 10862 v.reset(OpARM64Equal) 10863 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 10864 v0.AddArg(x) 10865 v0.AddArg(y) 10866 v.AddArg(v0) 10867 return true 10868 } 10869 } 10870 func rewriteValueARM64_OpEq64F_0(v *Value) bool { 10871 b := v.Block 10872 _ = b 10873 // match: (Eq64F x y) 10874 // cond: 10875 // result: (Equal (FCMPD x y)) 10876 for { 10877 x := v.Args[0] 10878 y := v.Args[1] 10879 v.reset(OpARM64Equal) 10880 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, TypeFlags) 10881 v0.AddArg(x) 10882 v0.AddArg(y) 10883 v.AddArg(v0) 10884 return true 10885 } 10886 } 10887 func rewriteValueARM64_OpEq8_0(v *Value) bool { 10888 b := v.Block 10889 _ = b 10890 types := &b.Func.Config.Types 10891 _ = types 10892 // match: (Eq8 x y) 10893 // cond: 10894 // result: (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 10895 for { 10896 x := v.Args[0] 10897 y := v.Args[1] 10898 v.reset(OpARM64Equal) 10899 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10900 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32) 10901 v1.AddArg(x) 10902 v0.AddArg(v1) 10903 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32) 10904 v2.AddArg(y) 10905 v0.AddArg(v2) 10906 v.AddArg(v0) 10907 return true 10908 } 10909 } 10910 func rewriteValueARM64_OpEqB_0(v *Value) bool { 10911 b := v.Block 10912 _ = b 10913 types := &b.Func.Config.Types 10914 _ = types 10915 // match: (EqB x y) 10916 // cond: 10917 // result: (XOR (MOVDconst [1]) (XOR <types.Bool> x y)) 10918 for { 10919 x := v.Args[0] 10920 y := v.Args[1] 10921 v.reset(OpARM64XOR) 10922 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 10923 v0.AuxInt = 1 10924 v.AddArg(v0) 10925 v1 := b.NewValue0(v.Pos, OpARM64XOR, types.Bool) 10926 v1.AddArg(x) 10927 v1.AddArg(y) 10928 v.AddArg(v1) 10929 return true 10930 } 10931 } 10932 func rewriteValueARM64_OpEqPtr_0(v *Value) bool { 10933 b := v.Block 10934 _ = b 10935 // match: (EqPtr x y) 10936 // cond: 10937 // result: (Equal (CMP x y)) 10938 for { 10939 x := v.Args[0] 10940 y := v.Args[1] 10941 v.reset(OpARM64Equal) 10942 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 10943 v0.AddArg(x) 10944 v0.AddArg(y) 10945 v.AddArg(v0) 10946 return true 10947 } 10948 } 10949 func rewriteValueARM64_OpGeq16_0(v *Value) bool { 10950 b := v.Block 10951 _ = b 10952 types := &b.Func.Config.Types 10953 _ = types 10954 // match: (Geq16 x y) 10955 // cond: 10956 // result: (GreaterEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) 10957 for { 10958 x := v.Args[0] 10959 y := v.Args[1] 10960 v.reset(OpARM64GreaterEqual) 10961 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10962 v1 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32) 10963 v1.AddArg(x) 10964 v0.AddArg(v1) 10965 v2 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32) 10966 v2.AddArg(y) 10967 v0.AddArg(v2) 10968 v.AddArg(v0) 10969 return true 10970 } 10971 } 10972 func rewriteValueARM64_OpGeq16U_0(v *Value) bool { 10973 b := v.Block 10974 _ = b 10975 types := &b.Func.Config.Types 10976 _ = types 10977 // match: (Geq16U x y) 10978 // cond: 10979 // result: (GreaterEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 10980 for { 10981 x := v.Args[0] 10982 y := v.Args[1] 10983 v.reset(OpARM64GreaterEqualU) 10984 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 10985 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32) 10986 v1.AddArg(x) 10987 v0.AddArg(v1) 10988 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32) 10989 v2.AddArg(y) 10990 v0.AddArg(v2) 10991 v.AddArg(v0) 10992 return true 10993 } 10994 } 10995 func rewriteValueARM64_OpGeq32_0(v *Value) bool { 10996 b := v.Block 10997 _ = b 10998 // match: (Geq32 x y) 10999 // cond: 11000 // result: (GreaterEqual (CMPW x y)) 11001 for { 11002 x := v.Args[0] 11003 y := v.Args[1] 11004 v.reset(OpARM64GreaterEqual) 11005 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11006 v0.AddArg(x) 11007 v0.AddArg(y) 11008 v.AddArg(v0) 11009 return true 11010 } 11011 } 11012 func rewriteValueARM64_OpGeq32F_0(v *Value) bool { 11013 b := v.Block 11014 _ = b 11015 // match: (Geq32F x y) 11016 // cond: 11017 // result: (GreaterEqual (FCMPS x y)) 11018 for { 11019 x := v.Args[0] 11020 y := v.Args[1] 11021 v.reset(OpARM64GreaterEqual) 11022 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, TypeFlags) 11023 v0.AddArg(x) 11024 v0.AddArg(y) 11025 v.AddArg(v0) 11026 return true 11027 } 11028 } 11029 func rewriteValueARM64_OpGeq32U_0(v *Value) bool { 11030 b := v.Block 11031 _ = b 11032 // match: (Geq32U x y) 11033 // cond: 11034 // result: (GreaterEqualU (CMPW x y)) 11035 for { 11036 x := v.Args[0] 11037 y := v.Args[1] 11038 v.reset(OpARM64GreaterEqualU) 11039 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11040 v0.AddArg(x) 11041 v0.AddArg(y) 11042 v.AddArg(v0) 11043 return true 11044 } 11045 } 11046 func rewriteValueARM64_OpGeq64_0(v *Value) bool { 11047 b := v.Block 11048 _ = b 11049 // match: (Geq64 x y) 11050 // cond: 11051 // result: (GreaterEqual (CMP x y)) 11052 for { 11053 x := v.Args[0] 11054 y := v.Args[1] 11055 v.reset(OpARM64GreaterEqual) 11056 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 11057 v0.AddArg(x) 11058 v0.AddArg(y) 11059 v.AddArg(v0) 11060 return true 11061 } 11062 } 11063 func rewriteValueARM64_OpGeq64F_0(v *Value) bool { 11064 b := v.Block 11065 _ = b 11066 // match: (Geq64F x y) 11067 // cond: 11068 // result: (GreaterEqual (FCMPD x y)) 11069 for { 11070 x := v.Args[0] 11071 y := v.Args[1] 11072 v.reset(OpARM64GreaterEqual) 11073 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, TypeFlags) 11074 v0.AddArg(x) 11075 v0.AddArg(y) 11076 v.AddArg(v0) 11077 return true 11078 } 11079 } 11080 func rewriteValueARM64_OpGeq64U_0(v *Value) bool { 11081 b := v.Block 11082 _ = b 11083 // match: (Geq64U x y) 11084 // cond: 11085 // result: (GreaterEqualU (CMP x y)) 11086 for { 11087 x := v.Args[0] 11088 y := v.Args[1] 11089 v.reset(OpARM64GreaterEqualU) 11090 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 11091 v0.AddArg(x) 11092 v0.AddArg(y) 11093 v.AddArg(v0) 11094 return true 11095 } 11096 } 11097 func rewriteValueARM64_OpGeq8_0(v *Value) bool { 11098 b := v.Block 11099 _ = b 11100 types := &b.Func.Config.Types 11101 _ = types 11102 // match: (Geq8 x y) 11103 // cond: 11104 // result: (GreaterEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) 11105 for { 11106 x := v.Args[0] 11107 y := v.Args[1] 11108 v.reset(OpARM64GreaterEqual) 11109 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11110 v1 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32) 11111 v1.AddArg(x) 11112 v0.AddArg(v1) 11113 v2 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32) 11114 v2.AddArg(y) 11115 v0.AddArg(v2) 11116 v.AddArg(v0) 11117 return true 11118 } 11119 } 11120 func rewriteValueARM64_OpGeq8U_0(v *Value) bool { 11121 b := v.Block 11122 _ = b 11123 types := &b.Func.Config.Types 11124 _ = types 11125 // match: (Geq8U x y) 11126 // cond: 11127 // result: (GreaterEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 11128 for { 11129 x := v.Args[0] 11130 y := v.Args[1] 11131 v.reset(OpARM64GreaterEqualU) 11132 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11133 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32) 11134 v1.AddArg(x) 11135 v0.AddArg(v1) 11136 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32) 11137 v2.AddArg(y) 11138 v0.AddArg(v2) 11139 v.AddArg(v0) 11140 return true 11141 } 11142 } 11143 func rewriteValueARM64_OpGetClosurePtr_0(v *Value) bool { 11144 // match: (GetClosurePtr) 11145 // cond: 11146 // result: (LoweredGetClosurePtr) 11147 for { 11148 v.reset(OpARM64LoweredGetClosurePtr) 11149 return true 11150 } 11151 } 11152 func rewriteValueARM64_OpGreater16_0(v *Value) bool { 11153 b := v.Block 11154 _ = b 11155 types := &b.Func.Config.Types 11156 _ = types 11157 // match: (Greater16 x y) 11158 // cond: 11159 // result: (GreaterThan (CMPW (SignExt16to32 x) (SignExt16to32 y))) 11160 for { 11161 x := v.Args[0] 11162 y := v.Args[1] 11163 v.reset(OpARM64GreaterThan) 11164 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11165 v1 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32) 11166 v1.AddArg(x) 11167 v0.AddArg(v1) 11168 v2 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32) 11169 v2.AddArg(y) 11170 v0.AddArg(v2) 11171 v.AddArg(v0) 11172 return true 11173 } 11174 } 11175 func rewriteValueARM64_OpGreater16U_0(v *Value) bool { 11176 b := v.Block 11177 _ = b 11178 types := &b.Func.Config.Types 11179 _ = types 11180 // match: (Greater16U x y) 11181 // cond: 11182 // result: (GreaterThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 11183 for { 11184 x := v.Args[0] 11185 y := v.Args[1] 11186 v.reset(OpARM64GreaterThanU) 11187 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11188 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32) 11189 v1.AddArg(x) 11190 v0.AddArg(v1) 11191 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32) 11192 v2.AddArg(y) 11193 v0.AddArg(v2) 11194 v.AddArg(v0) 11195 return true 11196 } 11197 } 11198 func rewriteValueARM64_OpGreater32_0(v *Value) bool { 11199 b := v.Block 11200 _ = b 11201 // match: (Greater32 x y) 11202 // cond: 11203 // result: (GreaterThan (CMPW x y)) 11204 for { 11205 x := v.Args[0] 11206 y := v.Args[1] 11207 v.reset(OpARM64GreaterThan) 11208 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11209 v0.AddArg(x) 11210 v0.AddArg(y) 11211 v.AddArg(v0) 11212 return true 11213 } 11214 } 11215 func rewriteValueARM64_OpGreater32F_0(v *Value) bool { 11216 b := v.Block 11217 _ = b 11218 // match: (Greater32F x y) 11219 // cond: 11220 // result: (GreaterThan (FCMPS x y)) 11221 for { 11222 x := v.Args[0] 11223 y := v.Args[1] 11224 v.reset(OpARM64GreaterThan) 11225 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, TypeFlags) 11226 v0.AddArg(x) 11227 v0.AddArg(y) 11228 v.AddArg(v0) 11229 return true 11230 } 11231 } 11232 func rewriteValueARM64_OpGreater32U_0(v *Value) bool { 11233 b := v.Block 11234 _ = b 11235 // match: (Greater32U x y) 11236 // cond: 11237 // result: (GreaterThanU (CMPW x y)) 11238 for { 11239 x := v.Args[0] 11240 y := v.Args[1] 11241 v.reset(OpARM64GreaterThanU) 11242 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11243 v0.AddArg(x) 11244 v0.AddArg(y) 11245 v.AddArg(v0) 11246 return true 11247 } 11248 } 11249 func rewriteValueARM64_OpGreater64_0(v *Value) bool { 11250 b := v.Block 11251 _ = b 11252 // match: (Greater64 x y) 11253 // cond: 11254 // result: (GreaterThan (CMP x y)) 11255 for { 11256 x := v.Args[0] 11257 y := v.Args[1] 11258 v.reset(OpARM64GreaterThan) 11259 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 11260 v0.AddArg(x) 11261 v0.AddArg(y) 11262 v.AddArg(v0) 11263 return true 11264 } 11265 } 11266 func rewriteValueARM64_OpGreater64F_0(v *Value) bool { 11267 b := v.Block 11268 _ = b 11269 // match: (Greater64F x y) 11270 // cond: 11271 // result: (GreaterThan (FCMPD x y)) 11272 for { 11273 x := v.Args[0] 11274 y := v.Args[1] 11275 v.reset(OpARM64GreaterThan) 11276 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, TypeFlags) 11277 v0.AddArg(x) 11278 v0.AddArg(y) 11279 v.AddArg(v0) 11280 return true 11281 } 11282 } 11283 func rewriteValueARM64_OpGreater64U_0(v *Value) bool { 11284 b := v.Block 11285 _ = b 11286 // match: (Greater64U x y) 11287 // cond: 11288 // result: (GreaterThanU (CMP x y)) 11289 for { 11290 x := v.Args[0] 11291 y := v.Args[1] 11292 v.reset(OpARM64GreaterThanU) 11293 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 11294 v0.AddArg(x) 11295 v0.AddArg(y) 11296 v.AddArg(v0) 11297 return true 11298 } 11299 } 11300 func rewriteValueARM64_OpGreater8_0(v *Value) bool { 11301 b := v.Block 11302 _ = b 11303 types := &b.Func.Config.Types 11304 _ = types 11305 // match: (Greater8 x y) 11306 // cond: 11307 // result: (GreaterThan (CMPW (SignExt8to32 x) (SignExt8to32 y))) 11308 for { 11309 x := v.Args[0] 11310 y := v.Args[1] 11311 v.reset(OpARM64GreaterThan) 11312 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11313 v1 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32) 11314 v1.AddArg(x) 11315 v0.AddArg(v1) 11316 v2 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32) 11317 v2.AddArg(y) 11318 v0.AddArg(v2) 11319 v.AddArg(v0) 11320 return true 11321 } 11322 } 11323 func rewriteValueARM64_OpGreater8U_0(v *Value) bool { 11324 b := v.Block 11325 _ = b 11326 types := &b.Func.Config.Types 11327 _ = types 11328 // match: (Greater8U x y) 11329 // cond: 11330 // result: (GreaterThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 11331 for { 11332 x := v.Args[0] 11333 y := v.Args[1] 11334 v.reset(OpARM64GreaterThanU) 11335 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11336 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32) 11337 v1.AddArg(x) 11338 v0.AddArg(v1) 11339 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32) 11340 v2.AddArg(y) 11341 v0.AddArg(v2) 11342 v.AddArg(v0) 11343 return true 11344 } 11345 } 11346 func rewriteValueARM64_OpHmul32_0(v *Value) bool { 11347 b := v.Block 11348 _ = b 11349 types := &b.Func.Config.Types 11350 _ = types 11351 // match: (Hmul32 x y) 11352 // cond: 11353 // result: (SRAconst (MULL <types.Int64> x y) [32]) 11354 for { 11355 x := v.Args[0] 11356 y := v.Args[1] 11357 v.reset(OpARM64SRAconst) 11358 v.AuxInt = 32 11359 v0 := b.NewValue0(v.Pos, OpARM64MULL, types.Int64) 11360 v0.AddArg(x) 11361 v0.AddArg(y) 11362 v.AddArg(v0) 11363 return true 11364 } 11365 } 11366 func rewriteValueARM64_OpHmul32u_0(v *Value) bool { 11367 b := v.Block 11368 _ = b 11369 types := &b.Func.Config.Types 11370 _ = types 11371 // match: (Hmul32u x y) 11372 // cond: 11373 // result: (SRAconst (UMULL <types.UInt64> x y) [32]) 11374 for { 11375 x := v.Args[0] 11376 y := v.Args[1] 11377 v.reset(OpARM64SRAconst) 11378 v.AuxInt = 32 11379 v0 := b.NewValue0(v.Pos, OpARM64UMULL, types.UInt64) 11380 v0.AddArg(x) 11381 v0.AddArg(y) 11382 v.AddArg(v0) 11383 return true 11384 } 11385 } 11386 func rewriteValueARM64_OpHmul64_0(v *Value) bool { 11387 // match: (Hmul64 x y) 11388 // cond: 11389 // result: (MULH x y) 11390 for { 11391 x := v.Args[0] 11392 y := v.Args[1] 11393 v.reset(OpARM64MULH) 11394 v.AddArg(x) 11395 v.AddArg(y) 11396 return true 11397 } 11398 } 11399 func rewriteValueARM64_OpHmul64u_0(v *Value) bool { 11400 // match: (Hmul64u x y) 11401 // cond: 11402 // result: (UMULH x y) 11403 for { 11404 x := v.Args[0] 11405 y := v.Args[1] 11406 v.reset(OpARM64UMULH) 11407 v.AddArg(x) 11408 v.AddArg(y) 11409 return true 11410 } 11411 } 11412 func rewriteValueARM64_OpInterCall_0(v *Value) bool { 11413 // match: (InterCall [argwid] entry mem) 11414 // cond: 11415 // result: (CALLinter [argwid] entry mem) 11416 for { 11417 argwid := v.AuxInt 11418 entry := v.Args[0] 11419 mem := v.Args[1] 11420 v.reset(OpARM64CALLinter) 11421 v.AuxInt = argwid 11422 v.AddArg(entry) 11423 v.AddArg(mem) 11424 return true 11425 } 11426 } 11427 func rewriteValueARM64_OpIsInBounds_0(v *Value) bool { 11428 b := v.Block 11429 _ = b 11430 // match: (IsInBounds idx len) 11431 // cond: 11432 // result: (LessThanU (CMP idx len)) 11433 for { 11434 idx := v.Args[0] 11435 len := v.Args[1] 11436 v.reset(OpARM64LessThanU) 11437 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 11438 v0.AddArg(idx) 11439 v0.AddArg(len) 11440 v.AddArg(v0) 11441 return true 11442 } 11443 } 11444 func rewriteValueARM64_OpIsNonNil_0(v *Value) bool { 11445 b := v.Block 11446 _ = b 11447 // match: (IsNonNil ptr) 11448 // cond: 11449 // result: (NotEqual (CMPconst [0] ptr)) 11450 for { 11451 ptr := v.Args[0] 11452 v.reset(OpARM64NotEqual) 11453 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 11454 v0.AuxInt = 0 11455 v0.AddArg(ptr) 11456 v.AddArg(v0) 11457 return true 11458 } 11459 } 11460 func rewriteValueARM64_OpIsSliceInBounds_0(v *Value) bool { 11461 b := v.Block 11462 _ = b 11463 // match: (IsSliceInBounds idx len) 11464 // cond: 11465 // result: (LessEqualU (CMP idx len)) 11466 for { 11467 idx := v.Args[0] 11468 len := v.Args[1] 11469 v.reset(OpARM64LessEqualU) 11470 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 11471 v0.AddArg(idx) 11472 v0.AddArg(len) 11473 v.AddArg(v0) 11474 return true 11475 } 11476 } 11477 func rewriteValueARM64_OpLeq16_0(v *Value) bool { 11478 b := v.Block 11479 _ = b 11480 types := &b.Func.Config.Types 11481 _ = types 11482 // match: (Leq16 x y) 11483 // cond: 11484 // result: (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) 11485 for { 11486 x := v.Args[0] 11487 y := v.Args[1] 11488 v.reset(OpARM64LessEqual) 11489 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11490 v1 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32) 11491 v1.AddArg(x) 11492 v0.AddArg(v1) 11493 v2 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32) 11494 v2.AddArg(y) 11495 v0.AddArg(v2) 11496 v.AddArg(v0) 11497 return true 11498 } 11499 } 11500 func rewriteValueARM64_OpLeq16U_0(v *Value) bool { 11501 b := v.Block 11502 _ = b 11503 types := &b.Func.Config.Types 11504 _ = types 11505 // match: (Leq16U x y) 11506 // cond: 11507 // result: (LessEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 11508 for { 11509 x := v.Args[0] 11510 y := v.Args[1] 11511 v.reset(OpARM64LessEqualU) 11512 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11513 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32) 11514 v1.AddArg(x) 11515 v0.AddArg(v1) 11516 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32) 11517 v2.AddArg(y) 11518 v0.AddArg(v2) 11519 v.AddArg(v0) 11520 return true 11521 } 11522 } 11523 func rewriteValueARM64_OpLeq32_0(v *Value) bool { 11524 b := v.Block 11525 _ = b 11526 // match: (Leq32 x y) 11527 // cond: 11528 // result: (LessEqual (CMPW x y)) 11529 for { 11530 x := v.Args[0] 11531 y := v.Args[1] 11532 v.reset(OpARM64LessEqual) 11533 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11534 v0.AddArg(x) 11535 v0.AddArg(y) 11536 v.AddArg(v0) 11537 return true 11538 } 11539 } 11540 func rewriteValueARM64_OpLeq32F_0(v *Value) bool { 11541 b := v.Block 11542 _ = b 11543 // match: (Leq32F x y) 11544 // cond: 11545 // result: (GreaterEqual (FCMPS y x)) 11546 for { 11547 x := v.Args[0] 11548 y := v.Args[1] 11549 v.reset(OpARM64GreaterEqual) 11550 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, TypeFlags) 11551 v0.AddArg(y) 11552 v0.AddArg(x) 11553 v.AddArg(v0) 11554 return true 11555 } 11556 } 11557 func rewriteValueARM64_OpLeq32U_0(v *Value) bool { 11558 b := v.Block 11559 _ = b 11560 // match: (Leq32U x y) 11561 // cond: 11562 // result: (LessEqualU (CMPW x y)) 11563 for { 11564 x := v.Args[0] 11565 y := v.Args[1] 11566 v.reset(OpARM64LessEqualU) 11567 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11568 v0.AddArg(x) 11569 v0.AddArg(y) 11570 v.AddArg(v0) 11571 return true 11572 } 11573 } 11574 func rewriteValueARM64_OpLeq64_0(v *Value) bool { 11575 b := v.Block 11576 _ = b 11577 // match: (Leq64 x y) 11578 // cond: 11579 // result: (LessEqual (CMP x y)) 11580 for { 11581 x := v.Args[0] 11582 y := v.Args[1] 11583 v.reset(OpARM64LessEqual) 11584 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 11585 v0.AddArg(x) 11586 v0.AddArg(y) 11587 v.AddArg(v0) 11588 return true 11589 } 11590 } 11591 func rewriteValueARM64_OpLeq64F_0(v *Value) bool { 11592 b := v.Block 11593 _ = b 11594 // match: (Leq64F x y) 11595 // cond: 11596 // result: (GreaterEqual (FCMPD y x)) 11597 for { 11598 x := v.Args[0] 11599 y := v.Args[1] 11600 v.reset(OpARM64GreaterEqual) 11601 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, TypeFlags) 11602 v0.AddArg(y) 11603 v0.AddArg(x) 11604 v.AddArg(v0) 11605 return true 11606 } 11607 } 11608 func rewriteValueARM64_OpLeq64U_0(v *Value) bool { 11609 b := v.Block 11610 _ = b 11611 // match: (Leq64U x y) 11612 // cond: 11613 // result: (LessEqualU (CMP x y)) 11614 for { 11615 x := v.Args[0] 11616 y := v.Args[1] 11617 v.reset(OpARM64LessEqualU) 11618 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 11619 v0.AddArg(x) 11620 v0.AddArg(y) 11621 v.AddArg(v0) 11622 return true 11623 } 11624 } 11625 func rewriteValueARM64_OpLeq8_0(v *Value) bool { 11626 b := v.Block 11627 _ = b 11628 types := &b.Func.Config.Types 11629 _ = types 11630 // match: (Leq8 x y) 11631 // cond: 11632 // result: (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) 11633 for { 11634 x := v.Args[0] 11635 y := v.Args[1] 11636 v.reset(OpARM64LessEqual) 11637 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11638 v1 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32) 11639 v1.AddArg(x) 11640 v0.AddArg(v1) 11641 v2 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32) 11642 v2.AddArg(y) 11643 v0.AddArg(v2) 11644 v.AddArg(v0) 11645 return true 11646 } 11647 } 11648 func rewriteValueARM64_OpLeq8U_0(v *Value) bool { 11649 b := v.Block 11650 _ = b 11651 types := &b.Func.Config.Types 11652 _ = types 11653 // match: (Leq8U x y) 11654 // cond: 11655 // result: (LessEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 11656 for { 11657 x := v.Args[0] 11658 y := v.Args[1] 11659 v.reset(OpARM64LessEqualU) 11660 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11661 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32) 11662 v1.AddArg(x) 11663 v0.AddArg(v1) 11664 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32) 11665 v2.AddArg(y) 11666 v0.AddArg(v2) 11667 v.AddArg(v0) 11668 return true 11669 } 11670 } 11671 func rewriteValueARM64_OpLess16_0(v *Value) bool { 11672 b := v.Block 11673 _ = b 11674 types := &b.Func.Config.Types 11675 _ = types 11676 // match: (Less16 x y) 11677 // cond: 11678 // result: (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y))) 11679 for { 11680 x := v.Args[0] 11681 y := v.Args[1] 11682 v.reset(OpARM64LessThan) 11683 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11684 v1 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32) 11685 v1.AddArg(x) 11686 v0.AddArg(v1) 11687 v2 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32) 11688 v2.AddArg(y) 11689 v0.AddArg(v2) 11690 v.AddArg(v0) 11691 return true 11692 } 11693 } 11694 func rewriteValueARM64_OpLess16U_0(v *Value) bool { 11695 b := v.Block 11696 _ = b 11697 types := &b.Func.Config.Types 11698 _ = types 11699 // match: (Less16U x y) 11700 // cond: 11701 // result: (LessThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 11702 for { 11703 x := v.Args[0] 11704 y := v.Args[1] 11705 v.reset(OpARM64LessThanU) 11706 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11707 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32) 11708 v1.AddArg(x) 11709 v0.AddArg(v1) 11710 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32) 11711 v2.AddArg(y) 11712 v0.AddArg(v2) 11713 v.AddArg(v0) 11714 return true 11715 } 11716 } 11717 func rewriteValueARM64_OpLess32_0(v *Value) bool { 11718 b := v.Block 11719 _ = b 11720 // match: (Less32 x y) 11721 // cond: 11722 // result: (LessThan (CMPW x y)) 11723 for { 11724 x := v.Args[0] 11725 y := v.Args[1] 11726 v.reset(OpARM64LessThan) 11727 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11728 v0.AddArg(x) 11729 v0.AddArg(y) 11730 v.AddArg(v0) 11731 return true 11732 } 11733 } 11734 func rewriteValueARM64_OpLess32F_0(v *Value) bool { 11735 b := v.Block 11736 _ = b 11737 // match: (Less32F x y) 11738 // cond: 11739 // result: (GreaterThan (FCMPS y x)) 11740 for { 11741 x := v.Args[0] 11742 y := v.Args[1] 11743 v.reset(OpARM64GreaterThan) 11744 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, TypeFlags) 11745 v0.AddArg(y) 11746 v0.AddArg(x) 11747 v.AddArg(v0) 11748 return true 11749 } 11750 } 11751 func rewriteValueARM64_OpLess32U_0(v *Value) bool { 11752 b := v.Block 11753 _ = b 11754 // match: (Less32U x y) 11755 // cond: 11756 // result: (LessThanU (CMPW x y)) 11757 for { 11758 x := v.Args[0] 11759 y := v.Args[1] 11760 v.reset(OpARM64LessThanU) 11761 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11762 v0.AddArg(x) 11763 v0.AddArg(y) 11764 v.AddArg(v0) 11765 return true 11766 } 11767 } 11768 func rewriteValueARM64_OpLess64_0(v *Value) bool { 11769 b := v.Block 11770 _ = b 11771 // match: (Less64 x y) 11772 // cond: 11773 // result: (LessThan (CMP x y)) 11774 for { 11775 x := v.Args[0] 11776 y := v.Args[1] 11777 v.reset(OpARM64LessThan) 11778 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 11779 v0.AddArg(x) 11780 v0.AddArg(y) 11781 v.AddArg(v0) 11782 return true 11783 } 11784 } 11785 func rewriteValueARM64_OpLess64F_0(v *Value) bool { 11786 b := v.Block 11787 _ = b 11788 // match: (Less64F x y) 11789 // cond: 11790 // result: (GreaterThan (FCMPD y x)) 11791 for { 11792 x := v.Args[0] 11793 y := v.Args[1] 11794 v.reset(OpARM64GreaterThan) 11795 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, TypeFlags) 11796 v0.AddArg(y) 11797 v0.AddArg(x) 11798 v.AddArg(v0) 11799 return true 11800 } 11801 } 11802 func rewriteValueARM64_OpLess64U_0(v *Value) bool { 11803 b := v.Block 11804 _ = b 11805 // match: (Less64U x y) 11806 // cond: 11807 // result: (LessThanU (CMP x y)) 11808 for { 11809 x := v.Args[0] 11810 y := v.Args[1] 11811 v.reset(OpARM64LessThanU) 11812 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 11813 v0.AddArg(x) 11814 v0.AddArg(y) 11815 v.AddArg(v0) 11816 return true 11817 } 11818 } 11819 func rewriteValueARM64_OpLess8_0(v *Value) bool { 11820 b := v.Block 11821 _ = b 11822 types := &b.Func.Config.Types 11823 _ = types 11824 // match: (Less8 x y) 11825 // cond: 11826 // result: (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y))) 11827 for { 11828 x := v.Args[0] 11829 y := v.Args[1] 11830 v.reset(OpARM64LessThan) 11831 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11832 v1 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32) 11833 v1.AddArg(x) 11834 v0.AddArg(v1) 11835 v2 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32) 11836 v2.AddArg(y) 11837 v0.AddArg(v2) 11838 v.AddArg(v0) 11839 return true 11840 } 11841 } 11842 func rewriteValueARM64_OpLess8U_0(v *Value) bool { 11843 b := v.Block 11844 _ = b 11845 types := &b.Func.Config.Types 11846 _ = types 11847 // match: (Less8U x y) 11848 // cond: 11849 // result: (LessThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 11850 for { 11851 x := v.Args[0] 11852 y := v.Args[1] 11853 v.reset(OpARM64LessThanU) 11854 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 11855 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32) 11856 v1.AddArg(x) 11857 v0.AddArg(v1) 11858 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32) 11859 v2.AddArg(y) 11860 v0.AddArg(v2) 11861 v.AddArg(v0) 11862 return true 11863 } 11864 } 11865 func rewriteValueARM64_OpLoad_0(v *Value) bool { 11866 // match: (Load <t> ptr mem) 11867 // cond: t.IsBoolean() 11868 // result: (MOVBUload ptr mem) 11869 for { 11870 t := v.Type 11871 ptr := v.Args[0] 11872 mem := v.Args[1] 11873 if !(t.IsBoolean()) { 11874 break 11875 } 11876 v.reset(OpARM64MOVBUload) 11877 v.AddArg(ptr) 11878 v.AddArg(mem) 11879 return true 11880 } 11881 // match: (Load <t> ptr mem) 11882 // cond: (is8BitInt(t) && isSigned(t)) 11883 // result: (MOVBload ptr mem) 11884 for { 11885 t := v.Type 11886 ptr := v.Args[0] 11887 mem := v.Args[1] 11888 if !(is8BitInt(t) && isSigned(t)) { 11889 break 11890 } 11891 v.reset(OpARM64MOVBload) 11892 v.AddArg(ptr) 11893 v.AddArg(mem) 11894 return true 11895 } 11896 // match: (Load <t> ptr mem) 11897 // cond: (is8BitInt(t) && !isSigned(t)) 11898 // result: (MOVBUload ptr mem) 11899 for { 11900 t := v.Type 11901 ptr := v.Args[0] 11902 mem := v.Args[1] 11903 if !(is8BitInt(t) && !isSigned(t)) { 11904 break 11905 } 11906 v.reset(OpARM64MOVBUload) 11907 v.AddArg(ptr) 11908 v.AddArg(mem) 11909 return true 11910 } 11911 // match: (Load <t> ptr mem) 11912 // cond: (is16BitInt(t) && isSigned(t)) 11913 // result: (MOVHload ptr mem) 11914 for { 11915 t := v.Type 11916 ptr := v.Args[0] 11917 mem := v.Args[1] 11918 if !(is16BitInt(t) && isSigned(t)) { 11919 break 11920 } 11921 v.reset(OpARM64MOVHload) 11922 v.AddArg(ptr) 11923 v.AddArg(mem) 11924 return true 11925 } 11926 // match: (Load <t> ptr mem) 11927 // cond: (is16BitInt(t) && !isSigned(t)) 11928 // result: (MOVHUload ptr mem) 11929 for { 11930 t := v.Type 11931 ptr := v.Args[0] 11932 mem := v.Args[1] 11933 if !(is16BitInt(t) && !isSigned(t)) { 11934 break 11935 } 11936 v.reset(OpARM64MOVHUload) 11937 v.AddArg(ptr) 11938 v.AddArg(mem) 11939 return true 11940 } 11941 // match: (Load <t> ptr mem) 11942 // cond: (is32BitInt(t) && isSigned(t)) 11943 // result: (MOVWload ptr mem) 11944 for { 11945 t := v.Type 11946 ptr := v.Args[0] 11947 mem := v.Args[1] 11948 if !(is32BitInt(t) && isSigned(t)) { 11949 break 11950 } 11951 v.reset(OpARM64MOVWload) 11952 v.AddArg(ptr) 11953 v.AddArg(mem) 11954 return true 11955 } 11956 // match: (Load <t> ptr mem) 11957 // cond: (is32BitInt(t) && !isSigned(t)) 11958 // result: (MOVWUload ptr mem) 11959 for { 11960 t := v.Type 11961 ptr := v.Args[0] 11962 mem := v.Args[1] 11963 if !(is32BitInt(t) && !isSigned(t)) { 11964 break 11965 } 11966 v.reset(OpARM64MOVWUload) 11967 v.AddArg(ptr) 11968 v.AddArg(mem) 11969 return true 11970 } 11971 // match: (Load <t> ptr mem) 11972 // cond: (is64BitInt(t) || isPtr(t)) 11973 // result: (MOVDload ptr mem) 11974 for { 11975 t := v.Type 11976 ptr := v.Args[0] 11977 mem := v.Args[1] 11978 if !(is64BitInt(t) || isPtr(t)) { 11979 break 11980 } 11981 v.reset(OpARM64MOVDload) 11982 v.AddArg(ptr) 11983 v.AddArg(mem) 11984 return true 11985 } 11986 // match: (Load <t> ptr mem) 11987 // cond: is32BitFloat(t) 11988 // result: (FMOVSload ptr mem) 11989 for { 11990 t := v.Type 11991 ptr := v.Args[0] 11992 mem := v.Args[1] 11993 if !(is32BitFloat(t)) { 11994 break 11995 } 11996 v.reset(OpARM64FMOVSload) 11997 v.AddArg(ptr) 11998 v.AddArg(mem) 11999 return true 12000 } 12001 // match: (Load <t> ptr mem) 12002 // cond: is64BitFloat(t) 12003 // result: (FMOVDload ptr mem) 12004 for { 12005 t := v.Type 12006 ptr := v.Args[0] 12007 mem := v.Args[1] 12008 if !(is64BitFloat(t)) { 12009 break 12010 } 12011 v.reset(OpARM64FMOVDload) 12012 v.AddArg(ptr) 12013 v.AddArg(mem) 12014 return true 12015 } 12016 return false 12017 } 12018 func rewriteValueARM64_OpLsh16x16_0(v *Value) bool { 12019 b := v.Block 12020 _ = b 12021 types := &b.Func.Config.Types 12022 _ = types 12023 // match: (Lsh16x16 <t> x y) 12024 // cond: 12025 // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 12026 for { 12027 t := v.Type 12028 x := v.Args[0] 12029 y := v.Args[1] 12030 v.reset(OpARM64CSELULT) 12031 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12032 v0.AddArg(x) 12033 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 12034 v1.AddArg(y) 12035 v0.AddArg(v1) 12036 v.AddArg(v0) 12037 v2 := b.NewValue0(v.Pos, OpConst64, t) 12038 v2.AuxInt = 0 12039 v.AddArg(v2) 12040 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12041 v3.AuxInt = 64 12042 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 12043 v4.AddArg(y) 12044 v3.AddArg(v4) 12045 v.AddArg(v3) 12046 return true 12047 } 12048 } 12049 func rewriteValueARM64_OpLsh16x32_0(v *Value) bool { 12050 b := v.Block 12051 _ = b 12052 types := &b.Func.Config.Types 12053 _ = types 12054 // match: (Lsh16x32 <t> x y) 12055 // cond: 12056 // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 12057 for { 12058 t := v.Type 12059 x := v.Args[0] 12060 y := v.Args[1] 12061 v.reset(OpARM64CSELULT) 12062 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12063 v0.AddArg(x) 12064 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 12065 v1.AddArg(y) 12066 v0.AddArg(v1) 12067 v.AddArg(v0) 12068 v2 := b.NewValue0(v.Pos, OpConst64, t) 12069 v2.AuxInt = 0 12070 v.AddArg(v2) 12071 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12072 v3.AuxInt = 64 12073 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 12074 v4.AddArg(y) 12075 v3.AddArg(v4) 12076 v.AddArg(v3) 12077 return true 12078 } 12079 } 12080 func rewriteValueARM64_OpLsh16x64_0(v *Value) bool { 12081 b := v.Block 12082 _ = b 12083 // match: (Lsh16x64 x (MOVDconst [c])) 12084 // cond: uint64(c) < 16 12085 // result: (SLLconst x [c]) 12086 for { 12087 x := v.Args[0] 12088 v_1 := v.Args[1] 12089 if v_1.Op != OpARM64MOVDconst { 12090 break 12091 } 12092 c := v_1.AuxInt 12093 if !(uint64(c) < 16) { 12094 break 12095 } 12096 v.reset(OpARM64SLLconst) 12097 v.AuxInt = c 12098 v.AddArg(x) 12099 return true 12100 } 12101 // match: (Lsh16x64 _ (MOVDconst [c])) 12102 // cond: uint64(c) >= 16 12103 // result: (MOVDconst [0]) 12104 for { 12105 v_1 := v.Args[1] 12106 if v_1.Op != OpARM64MOVDconst { 12107 break 12108 } 12109 c := v_1.AuxInt 12110 if !(uint64(c) >= 16) { 12111 break 12112 } 12113 v.reset(OpARM64MOVDconst) 12114 v.AuxInt = 0 12115 return true 12116 } 12117 // match: (Lsh16x64 <t> x y) 12118 // cond: 12119 // result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 12120 for { 12121 t := v.Type 12122 x := v.Args[0] 12123 y := v.Args[1] 12124 v.reset(OpARM64CSELULT) 12125 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12126 v0.AddArg(x) 12127 v0.AddArg(y) 12128 v.AddArg(v0) 12129 v1 := b.NewValue0(v.Pos, OpConst64, t) 12130 v1.AuxInt = 0 12131 v.AddArg(v1) 12132 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12133 v2.AuxInt = 64 12134 v2.AddArg(y) 12135 v.AddArg(v2) 12136 return true 12137 } 12138 } 12139 func rewriteValueARM64_OpLsh16x8_0(v *Value) bool { 12140 b := v.Block 12141 _ = b 12142 types := &b.Func.Config.Types 12143 _ = types 12144 // match: (Lsh16x8 <t> x y) 12145 // cond: 12146 // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 12147 for { 12148 t := v.Type 12149 x := v.Args[0] 12150 y := v.Args[1] 12151 v.reset(OpARM64CSELULT) 12152 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12153 v0.AddArg(x) 12154 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 12155 v1.AddArg(y) 12156 v0.AddArg(v1) 12157 v.AddArg(v0) 12158 v2 := b.NewValue0(v.Pos, OpConst64, t) 12159 v2.AuxInt = 0 12160 v.AddArg(v2) 12161 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12162 v3.AuxInt = 64 12163 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 12164 v4.AddArg(y) 12165 v3.AddArg(v4) 12166 v.AddArg(v3) 12167 return true 12168 } 12169 } 12170 func rewriteValueARM64_OpLsh32x16_0(v *Value) bool { 12171 b := v.Block 12172 _ = b 12173 types := &b.Func.Config.Types 12174 _ = types 12175 // match: (Lsh32x16 <t> x y) 12176 // cond: 12177 // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 12178 for { 12179 t := v.Type 12180 x := v.Args[0] 12181 y := v.Args[1] 12182 v.reset(OpARM64CSELULT) 12183 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12184 v0.AddArg(x) 12185 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 12186 v1.AddArg(y) 12187 v0.AddArg(v1) 12188 v.AddArg(v0) 12189 v2 := b.NewValue0(v.Pos, OpConst64, t) 12190 v2.AuxInt = 0 12191 v.AddArg(v2) 12192 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12193 v3.AuxInt = 64 12194 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 12195 v4.AddArg(y) 12196 v3.AddArg(v4) 12197 v.AddArg(v3) 12198 return true 12199 } 12200 } 12201 func rewriteValueARM64_OpLsh32x32_0(v *Value) bool { 12202 b := v.Block 12203 _ = b 12204 types := &b.Func.Config.Types 12205 _ = types 12206 // match: (Lsh32x32 <t> x y) 12207 // cond: 12208 // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 12209 for { 12210 t := v.Type 12211 x := v.Args[0] 12212 y := v.Args[1] 12213 v.reset(OpARM64CSELULT) 12214 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12215 v0.AddArg(x) 12216 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 12217 v1.AddArg(y) 12218 v0.AddArg(v1) 12219 v.AddArg(v0) 12220 v2 := b.NewValue0(v.Pos, OpConst64, t) 12221 v2.AuxInt = 0 12222 v.AddArg(v2) 12223 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12224 v3.AuxInt = 64 12225 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 12226 v4.AddArg(y) 12227 v3.AddArg(v4) 12228 v.AddArg(v3) 12229 return true 12230 } 12231 } 12232 func rewriteValueARM64_OpLsh32x64_0(v *Value) bool { 12233 b := v.Block 12234 _ = b 12235 // match: (Lsh32x64 x (MOVDconst [c])) 12236 // cond: uint64(c) < 32 12237 // result: (SLLconst x [c]) 12238 for { 12239 x := v.Args[0] 12240 v_1 := v.Args[1] 12241 if v_1.Op != OpARM64MOVDconst { 12242 break 12243 } 12244 c := v_1.AuxInt 12245 if !(uint64(c) < 32) { 12246 break 12247 } 12248 v.reset(OpARM64SLLconst) 12249 v.AuxInt = c 12250 v.AddArg(x) 12251 return true 12252 } 12253 // match: (Lsh32x64 _ (MOVDconst [c])) 12254 // cond: uint64(c) >= 32 12255 // result: (MOVDconst [0]) 12256 for { 12257 v_1 := v.Args[1] 12258 if v_1.Op != OpARM64MOVDconst { 12259 break 12260 } 12261 c := v_1.AuxInt 12262 if !(uint64(c) >= 32) { 12263 break 12264 } 12265 v.reset(OpARM64MOVDconst) 12266 v.AuxInt = 0 12267 return true 12268 } 12269 // match: (Lsh32x64 <t> x y) 12270 // cond: 12271 // result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 12272 for { 12273 t := v.Type 12274 x := v.Args[0] 12275 y := v.Args[1] 12276 v.reset(OpARM64CSELULT) 12277 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12278 v0.AddArg(x) 12279 v0.AddArg(y) 12280 v.AddArg(v0) 12281 v1 := b.NewValue0(v.Pos, OpConst64, t) 12282 v1.AuxInt = 0 12283 v.AddArg(v1) 12284 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12285 v2.AuxInt = 64 12286 v2.AddArg(y) 12287 v.AddArg(v2) 12288 return true 12289 } 12290 } 12291 func rewriteValueARM64_OpLsh32x8_0(v *Value) bool { 12292 b := v.Block 12293 _ = b 12294 types := &b.Func.Config.Types 12295 _ = types 12296 // match: (Lsh32x8 <t> x y) 12297 // cond: 12298 // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 12299 for { 12300 t := v.Type 12301 x := v.Args[0] 12302 y := v.Args[1] 12303 v.reset(OpARM64CSELULT) 12304 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12305 v0.AddArg(x) 12306 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 12307 v1.AddArg(y) 12308 v0.AddArg(v1) 12309 v.AddArg(v0) 12310 v2 := b.NewValue0(v.Pos, OpConst64, t) 12311 v2.AuxInt = 0 12312 v.AddArg(v2) 12313 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12314 v3.AuxInt = 64 12315 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 12316 v4.AddArg(y) 12317 v3.AddArg(v4) 12318 v.AddArg(v3) 12319 return true 12320 } 12321 } 12322 func rewriteValueARM64_OpLsh64x16_0(v *Value) bool { 12323 b := v.Block 12324 _ = b 12325 types := &b.Func.Config.Types 12326 _ = types 12327 // match: (Lsh64x16 <t> x y) 12328 // cond: 12329 // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 12330 for { 12331 t := v.Type 12332 x := v.Args[0] 12333 y := v.Args[1] 12334 v.reset(OpARM64CSELULT) 12335 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12336 v0.AddArg(x) 12337 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 12338 v1.AddArg(y) 12339 v0.AddArg(v1) 12340 v.AddArg(v0) 12341 v2 := b.NewValue0(v.Pos, OpConst64, t) 12342 v2.AuxInt = 0 12343 v.AddArg(v2) 12344 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12345 v3.AuxInt = 64 12346 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 12347 v4.AddArg(y) 12348 v3.AddArg(v4) 12349 v.AddArg(v3) 12350 return true 12351 } 12352 } 12353 func rewriteValueARM64_OpLsh64x32_0(v *Value) bool { 12354 b := v.Block 12355 _ = b 12356 types := &b.Func.Config.Types 12357 _ = types 12358 // match: (Lsh64x32 <t> x y) 12359 // cond: 12360 // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 12361 for { 12362 t := v.Type 12363 x := v.Args[0] 12364 y := v.Args[1] 12365 v.reset(OpARM64CSELULT) 12366 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12367 v0.AddArg(x) 12368 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 12369 v1.AddArg(y) 12370 v0.AddArg(v1) 12371 v.AddArg(v0) 12372 v2 := b.NewValue0(v.Pos, OpConst64, t) 12373 v2.AuxInt = 0 12374 v.AddArg(v2) 12375 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12376 v3.AuxInt = 64 12377 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 12378 v4.AddArg(y) 12379 v3.AddArg(v4) 12380 v.AddArg(v3) 12381 return true 12382 } 12383 } 12384 func rewriteValueARM64_OpLsh64x64_0(v *Value) bool { 12385 b := v.Block 12386 _ = b 12387 // match: (Lsh64x64 x (MOVDconst [c])) 12388 // cond: uint64(c) < 64 12389 // result: (SLLconst x [c]) 12390 for { 12391 x := v.Args[0] 12392 v_1 := v.Args[1] 12393 if v_1.Op != OpARM64MOVDconst { 12394 break 12395 } 12396 c := v_1.AuxInt 12397 if !(uint64(c) < 64) { 12398 break 12399 } 12400 v.reset(OpARM64SLLconst) 12401 v.AuxInt = c 12402 v.AddArg(x) 12403 return true 12404 } 12405 // match: (Lsh64x64 _ (MOVDconst [c])) 12406 // cond: uint64(c) >= 64 12407 // result: (MOVDconst [0]) 12408 for { 12409 v_1 := v.Args[1] 12410 if v_1.Op != OpARM64MOVDconst { 12411 break 12412 } 12413 c := v_1.AuxInt 12414 if !(uint64(c) >= 64) { 12415 break 12416 } 12417 v.reset(OpARM64MOVDconst) 12418 v.AuxInt = 0 12419 return true 12420 } 12421 // match: (Lsh64x64 <t> x y) 12422 // cond: 12423 // result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 12424 for { 12425 t := v.Type 12426 x := v.Args[0] 12427 y := v.Args[1] 12428 v.reset(OpARM64CSELULT) 12429 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12430 v0.AddArg(x) 12431 v0.AddArg(y) 12432 v.AddArg(v0) 12433 v1 := b.NewValue0(v.Pos, OpConst64, t) 12434 v1.AuxInt = 0 12435 v.AddArg(v1) 12436 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12437 v2.AuxInt = 64 12438 v2.AddArg(y) 12439 v.AddArg(v2) 12440 return true 12441 } 12442 } 12443 func rewriteValueARM64_OpLsh64x8_0(v *Value) bool { 12444 b := v.Block 12445 _ = b 12446 types := &b.Func.Config.Types 12447 _ = types 12448 // match: (Lsh64x8 <t> x y) 12449 // cond: 12450 // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 12451 for { 12452 t := v.Type 12453 x := v.Args[0] 12454 y := v.Args[1] 12455 v.reset(OpARM64CSELULT) 12456 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12457 v0.AddArg(x) 12458 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 12459 v1.AddArg(y) 12460 v0.AddArg(v1) 12461 v.AddArg(v0) 12462 v2 := b.NewValue0(v.Pos, OpConst64, t) 12463 v2.AuxInt = 0 12464 v.AddArg(v2) 12465 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12466 v3.AuxInt = 64 12467 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 12468 v4.AddArg(y) 12469 v3.AddArg(v4) 12470 v.AddArg(v3) 12471 return true 12472 } 12473 } 12474 func rewriteValueARM64_OpLsh8x16_0(v *Value) bool { 12475 b := v.Block 12476 _ = b 12477 types := &b.Func.Config.Types 12478 _ = types 12479 // match: (Lsh8x16 <t> x y) 12480 // cond: 12481 // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 12482 for { 12483 t := v.Type 12484 x := v.Args[0] 12485 y := v.Args[1] 12486 v.reset(OpARM64CSELULT) 12487 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12488 v0.AddArg(x) 12489 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 12490 v1.AddArg(y) 12491 v0.AddArg(v1) 12492 v.AddArg(v0) 12493 v2 := b.NewValue0(v.Pos, OpConst64, t) 12494 v2.AuxInt = 0 12495 v.AddArg(v2) 12496 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12497 v3.AuxInt = 64 12498 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 12499 v4.AddArg(y) 12500 v3.AddArg(v4) 12501 v.AddArg(v3) 12502 return true 12503 } 12504 } 12505 func rewriteValueARM64_OpLsh8x32_0(v *Value) bool { 12506 b := v.Block 12507 _ = b 12508 types := &b.Func.Config.Types 12509 _ = types 12510 // match: (Lsh8x32 <t> x y) 12511 // cond: 12512 // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 12513 for { 12514 t := v.Type 12515 x := v.Args[0] 12516 y := v.Args[1] 12517 v.reset(OpARM64CSELULT) 12518 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12519 v0.AddArg(x) 12520 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 12521 v1.AddArg(y) 12522 v0.AddArg(v1) 12523 v.AddArg(v0) 12524 v2 := b.NewValue0(v.Pos, OpConst64, t) 12525 v2.AuxInt = 0 12526 v.AddArg(v2) 12527 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12528 v3.AuxInt = 64 12529 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 12530 v4.AddArg(y) 12531 v3.AddArg(v4) 12532 v.AddArg(v3) 12533 return true 12534 } 12535 } 12536 func rewriteValueARM64_OpLsh8x64_0(v *Value) bool { 12537 b := v.Block 12538 _ = b 12539 // match: (Lsh8x64 x (MOVDconst [c])) 12540 // cond: uint64(c) < 8 12541 // result: (SLLconst x [c]) 12542 for { 12543 x := v.Args[0] 12544 v_1 := v.Args[1] 12545 if v_1.Op != OpARM64MOVDconst { 12546 break 12547 } 12548 c := v_1.AuxInt 12549 if !(uint64(c) < 8) { 12550 break 12551 } 12552 v.reset(OpARM64SLLconst) 12553 v.AuxInt = c 12554 v.AddArg(x) 12555 return true 12556 } 12557 // match: (Lsh8x64 _ (MOVDconst [c])) 12558 // cond: uint64(c) >= 8 12559 // result: (MOVDconst [0]) 12560 for { 12561 v_1 := v.Args[1] 12562 if v_1.Op != OpARM64MOVDconst { 12563 break 12564 } 12565 c := v_1.AuxInt 12566 if !(uint64(c) >= 8) { 12567 break 12568 } 12569 v.reset(OpARM64MOVDconst) 12570 v.AuxInt = 0 12571 return true 12572 } 12573 // match: (Lsh8x64 <t> x y) 12574 // cond: 12575 // result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 12576 for { 12577 t := v.Type 12578 x := v.Args[0] 12579 y := v.Args[1] 12580 v.reset(OpARM64CSELULT) 12581 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12582 v0.AddArg(x) 12583 v0.AddArg(y) 12584 v.AddArg(v0) 12585 v1 := b.NewValue0(v.Pos, OpConst64, t) 12586 v1.AuxInt = 0 12587 v.AddArg(v1) 12588 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12589 v2.AuxInt = 64 12590 v2.AddArg(y) 12591 v.AddArg(v2) 12592 return true 12593 } 12594 } 12595 func rewriteValueARM64_OpLsh8x8_0(v *Value) bool { 12596 b := v.Block 12597 _ = b 12598 types := &b.Func.Config.Types 12599 _ = types 12600 // match: (Lsh8x8 <t> x y) 12601 // cond: 12602 // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 12603 for { 12604 t := v.Type 12605 x := v.Args[0] 12606 y := v.Args[1] 12607 v.reset(OpARM64CSELULT) 12608 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12609 v0.AddArg(x) 12610 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 12611 v1.AddArg(y) 12612 v0.AddArg(v1) 12613 v.AddArg(v0) 12614 v2 := b.NewValue0(v.Pos, OpConst64, t) 12615 v2.AuxInt = 0 12616 v.AddArg(v2) 12617 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 12618 v3.AuxInt = 64 12619 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 12620 v4.AddArg(y) 12621 v3.AddArg(v4) 12622 v.AddArg(v3) 12623 return true 12624 } 12625 } 12626 func rewriteValueARM64_OpMod16_0(v *Value) bool { 12627 b := v.Block 12628 _ = b 12629 types := &b.Func.Config.Types 12630 _ = types 12631 // match: (Mod16 x y) 12632 // cond: 12633 // result: (MODW (SignExt16to32 x) (SignExt16to32 y)) 12634 for { 12635 x := v.Args[0] 12636 y := v.Args[1] 12637 v.reset(OpARM64MODW) 12638 v0 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32) 12639 v0.AddArg(x) 12640 v.AddArg(v0) 12641 v1 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32) 12642 v1.AddArg(y) 12643 v.AddArg(v1) 12644 return true 12645 } 12646 } 12647 func rewriteValueARM64_OpMod16u_0(v *Value) bool { 12648 b := v.Block 12649 _ = b 12650 types := &b.Func.Config.Types 12651 _ = types 12652 // match: (Mod16u x y) 12653 // cond: 12654 // result: (UMODW (ZeroExt16to32 x) (ZeroExt16to32 y)) 12655 for { 12656 x := v.Args[0] 12657 y := v.Args[1] 12658 v.reset(OpARM64UMODW) 12659 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32) 12660 v0.AddArg(x) 12661 v.AddArg(v0) 12662 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32) 12663 v1.AddArg(y) 12664 v.AddArg(v1) 12665 return true 12666 } 12667 } 12668 func rewriteValueARM64_OpMod32_0(v *Value) bool { 12669 // match: (Mod32 x y) 12670 // cond: 12671 // result: (MODW x y) 12672 for { 12673 x := v.Args[0] 12674 y := v.Args[1] 12675 v.reset(OpARM64MODW) 12676 v.AddArg(x) 12677 v.AddArg(y) 12678 return true 12679 } 12680 } 12681 func rewriteValueARM64_OpMod32u_0(v *Value) bool { 12682 // match: (Mod32u x y) 12683 // cond: 12684 // result: (UMODW x y) 12685 for { 12686 x := v.Args[0] 12687 y := v.Args[1] 12688 v.reset(OpARM64UMODW) 12689 v.AddArg(x) 12690 v.AddArg(y) 12691 return true 12692 } 12693 } 12694 func rewriteValueARM64_OpMod64_0(v *Value) bool { 12695 // match: (Mod64 x y) 12696 // cond: 12697 // result: (MOD x y) 12698 for { 12699 x := v.Args[0] 12700 y := v.Args[1] 12701 v.reset(OpARM64MOD) 12702 v.AddArg(x) 12703 v.AddArg(y) 12704 return true 12705 } 12706 } 12707 func rewriteValueARM64_OpMod64u_0(v *Value) bool { 12708 // match: (Mod64u x y) 12709 // cond: 12710 // result: (UMOD x y) 12711 for { 12712 x := v.Args[0] 12713 y := v.Args[1] 12714 v.reset(OpARM64UMOD) 12715 v.AddArg(x) 12716 v.AddArg(y) 12717 return true 12718 } 12719 } 12720 func rewriteValueARM64_OpMod8_0(v *Value) bool { 12721 b := v.Block 12722 _ = b 12723 types := &b.Func.Config.Types 12724 _ = types 12725 // match: (Mod8 x y) 12726 // cond: 12727 // result: (MODW (SignExt8to32 x) (SignExt8to32 y)) 12728 for { 12729 x := v.Args[0] 12730 y := v.Args[1] 12731 v.reset(OpARM64MODW) 12732 v0 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32) 12733 v0.AddArg(x) 12734 v.AddArg(v0) 12735 v1 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32) 12736 v1.AddArg(y) 12737 v.AddArg(v1) 12738 return true 12739 } 12740 } 12741 func rewriteValueARM64_OpMod8u_0(v *Value) bool { 12742 b := v.Block 12743 _ = b 12744 types := &b.Func.Config.Types 12745 _ = types 12746 // match: (Mod8u x y) 12747 // cond: 12748 // result: (UMODW (ZeroExt8to32 x) (ZeroExt8to32 y)) 12749 for { 12750 x := v.Args[0] 12751 y := v.Args[1] 12752 v.reset(OpARM64UMODW) 12753 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32) 12754 v0.AddArg(x) 12755 v.AddArg(v0) 12756 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32) 12757 v1.AddArg(y) 12758 v.AddArg(v1) 12759 return true 12760 } 12761 } 12762 func rewriteValueARM64_OpMove_0(v *Value) bool { 12763 b := v.Block 12764 _ = b 12765 types := &b.Func.Config.Types 12766 _ = types 12767 // match: (Move [0] _ _ mem) 12768 // cond: 12769 // result: mem 12770 for { 12771 if v.AuxInt != 0 { 12772 break 12773 } 12774 mem := v.Args[2] 12775 v.reset(OpCopy) 12776 v.Type = mem.Type 12777 v.AddArg(mem) 12778 return true 12779 } 12780 // match: (Move [1] dst src mem) 12781 // cond: 12782 // result: (MOVBstore dst (MOVBUload src mem) mem) 12783 for { 12784 if v.AuxInt != 1 { 12785 break 12786 } 12787 dst := v.Args[0] 12788 src := v.Args[1] 12789 mem := v.Args[2] 12790 v.reset(OpARM64MOVBstore) 12791 v.AddArg(dst) 12792 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, types.UInt8) 12793 v0.AddArg(src) 12794 v0.AddArg(mem) 12795 v.AddArg(v0) 12796 v.AddArg(mem) 12797 return true 12798 } 12799 // match: (Move [2] dst src mem) 12800 // cond: 12801 // result: (MOVHstore dst (MOVHUload src mem) mem) 12802 for { 12803 if v.AuxInt != 2 { 12804 break 12805 } 12806 dst := v.Args[0] 12807 src := v.Args[1] 12808 mem := v.Args[2] 12809 v.reset(OpARM64MOVHstore) 12810 v.AddArg(dst) 12811 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, types.UInt16) 12812 v0.AddArg(src) 12813 v0.AddArg(mem) 12814 v.AddArg(v0) 12815 v.AddArg(mem) 12816 return true 12817 } 12818 // match: (Move [4] dst src mem) 12819 // cond: 12820 // result: (MOVWstore dst (MOVWUload src mem) mem) 12821 for { 12822 if v.AuxInt != 4 { 12823 break 12824 } 12825 dst := v.Args[0] 12826 src := v.Args[1] 12827 mem := v.Args[2] 12828 v.reset(OpARM64MOVWstore) 12829 v.AddArg(dst) 12830 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, types.UInt32) 12831 v0.AddArg(src) 12832 v0.AddArg(mem) 12833 v.AddArg(v0) 12834 v.AddArg(mem) 12835 return true 12836 } 12837 // match: (Move [8] dst src mem) 12838 // cond: 12839 // result: (MOVDstore dst (MOVDload src mem) mem) 12840 for { 12841 if v.AuxInt != 8 { 12842 break 12843 } 12844 dst := v.Args[0] 12845 src := v.Args[1] 12846 mem := v.Args[2] 12847 v.reset(OpARM64MOVDstore) 12848 v.AddArg(dst) 12849 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, types.UInt64) 12850 v0.AddArg(src) 12851 v0.AddArg(mem) 12852 v.AddArg(v0) 12853 v.AddArg(mem) 12854 return true 12855 } 12856 // match: (Move [3] dst src mem) 12857 // cond: 12858 // result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem)) 12859 for { 12860 if v.AuxInt != 3 { 12861 break 12862 } 12863 dst := v.Args[0] 12864 src := v.Args[1] 12865 mem := v.Args[2] 12866 v.reset(OpARM64MOVBstore) 12867 v.AuxInt = 2 12868 v.AddArg(dst) 12869 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, types.UInt8) 12870 v0.AuxInt = 2 12871 v0.AddArg(src) 12872 v0.AddArg(mem) 12873 v.AddArg(v0) 12874 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, TypeMem) 12875 v1.AddArg(dst) 12876 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, types.UInt16) 12877 v2.AddArg(src) 12878 v2.AddArg(mem) 12879 v1.AddArg(v2) 12880 v1.AddArg(mem) 12881 v.AddArg(v1) 12882 return true 12883 } 12884 // match: (Move [5] dst src mem) 12885 // cond: 12886 // result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 12887 for { 12888 if v.AuxInt != 5 { 12889 break 12890 } 12891 dst := v.Args[0] 12892 src := v.Args[1] 12893 mem := v.Args[2] 12894 v.reset(OpARM64MOVBstore) 12895 v.AuxInt = 4 12896 v.AddArg(dst) 12897 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, types.UInt8) 12898 v0.AuxInt = 4 12899 v0.AddArg(src) 12900 v0.AddArg(mem) 12901 v.AddArg(v0) 12902 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, TypeMem) 12903 v1.AddArg(dst) 12904 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, types.UInt32) 12905 v2.AddArg(src) 12906 v2.AddArg(mem) 12907 v1.AddArg(v2) 12908 v1.AddArg(mem) 12909 v.AddArg(v1) 12910 return true 12911 } 12912 // match: (Move [6] dst src mem) 12913 // cond: 12914 // result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 12915 for { 12916 if v.AuxInt != 6 { 12917 break 12918 } 12919 dst := v.Args[0] 12920 src := v.Args[1] 12921 mem := v.Args[2] 12922 v.reset(OpARM64MOVHstore) 12923 v.AuxInt = 4 12924 v.AddArg(dst) 12925 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, types.UInt16) 12926 v0.AuxInt = 4 12927 v0.AddArg(src) 12928 v0.AddArg(mem) 12929 v.AddArg(v0) 12930 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, TypeMem) 12931 v1.AddArg(dst) 12932 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, types.UInt32) 12933 v2.AddArg(src) 12934 v2.AddArg(mem) 12935 v1.AddArg(v2) 12936 v1.AddArg(mem) 12937 v.AddArg(v1) 12938 return true 12939 } 12940 // match: (Move [7] dst src mem) 12941 // cond: 12942 // result: (MOVBstore [6] dst (MOVBUload [6] src mem) (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))) 12943 for { 12944 if v.AuxInt != 7 { 12945 break 12946 } 12947 dst := v.Args[0] 12948 src := v.Args[1] 12949 mem := v.Args[2] 12950 v.reset(OpARM64MOVBstore) 12951 v.AuxInt = 6 12952 v.AddArg(dst) 12953 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, types.UInt8) 12954 v0.AuxInt = 6 12955 v0.AddArg(src) 12956 v0.AddArg(mem) 12957 v.AddArg(v0) 12958 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, TypeMem) 12959 v1.AuxInt = 4 12960 v1.AddArg(dst) 12961 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, types.UInt16) 12962 v2.AuxInt = 4 12963 v2.AddArg(src) 12964 v2.AddArg(mem) 12965 v1.AddArg(v2) 12966 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, TypeMem) 12967 v3.AddArg(dst) 12968 v4 := b.NewValue0(v.Pos, OpARM64MOVWUload, types.UInt32) 12969 v4.AddArg(src) 12970 v4.AddArg(mem) 12971 v3.AddArg(v4) 12972 v3.AddArg(mem) 12973 v1.AddArg(v3) 12974 v.AddArg(v1) 12975 return true 12976 } 12977 // match: (Move [12] dst src mem) 12978 // cond: 12979 // result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 12980 for { 12981 if v.AuxInt != 12 { 12982 break 12983 } 12984 dst := v.Args[0] 12985 src := v.Args[1] 12986 mem := v.Args[2] 12987 v.reset(OpARM64MOVWstore) 12988 v.AuxInt = 8 12989 v.AddArg(dst) 12990 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, types.UInt32) 12991 v0.AuxInt = 8 12992 v0.AddArg(src) 12993 v0.AddArg(mem) 12994 v.AddArg(v0) 12995 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, TypeMem) 12996 v1.AddArg(dst) 12997 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, types.UInt64) 12998 v2.AddArg(src) 12999 v2.AddArg(mem) 13000 v1.AddArg(v2) 13001 v1.AddArg(mem) 13002 v.AddArg(v1) 13003 return true 13004 } 13005 return false 13006 } 13007 func rewriteValueARM64_OpMove_10(v *Value) bool { 13008 b := v.Block 13009 _ = b 13010 config := b.Func.Config 13011 _ = config 13012 types := &b.Func.Config.Types 13013 _ = types 13014 // match: (Move [16] dst src mem) 13015 // cond: 13016 // result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 13017 for { 13018 if v.AuxInt != 16 { 13019 break 13020 } 13021 dst := v.Args[0] 13022 src := v.Args[1] 13023 mem := v.Args[2] 13024 v.reset(OpARM64MOVDstore) 13025 v.AuxInt = 8 13026 v.AddArg(dst) 13027 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, types.UInt64) 13028 v0.AuxInt = 8 13029 v0.AddArg(src) 13030 v0.AddArg(mem) 13031 v.AddArg(v0) 13032 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, TypeMem) 13033 v1.AddArg(dst) 13034 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, types.UInt64) 13035 v2.AddArg(src) 13036 v2.AddArg(mem) 13037 v1.AddArg(v2) 13038 v1.AddArg(mem) 13039 v.AddArg(v1) 13040 return true 13041 } 13042 // match: (Move [24] dst src mem) 13043 // cond: 13044 // result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))) 13045 for { 13046 if v.AuxInt != 24 { 13047 break 13048 } 13049 dst := v.Args[0] 13050 src := v.Args[1] 13051 mem := v.Args[2] 13052 v.reset(OpARM64MOVDstore) 13053 v.AuxInt = 16 13054 v.AddArg(dst) 13055 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, types.UInt64) 13056 v0.AuxInt = 16 13057 v0.AddArg(src) 13058 v0.AddArg(mem) 13059 v.AddArg(v0) 13060 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, TypeMem) 13061 v1.AuxInt = 8 13062 v1.AddArg(dst) 13063 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, types.UInt64) 13064 v2.AuxInt = 8 13065 v2.AddArg(src) 13066 v2.AddArg(mem) 13067 v1.AddArg(v2) 13068 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, TypeMem) 13069 v3.AddArg(dst) 13070 v4 := b.NewValue0(v.Pos, OpARM64MOVDload, types.UInt64) 13071 v4.AddArg(src) 13072 v4.AddArg(mem) 13073 v3.AddArg(v4) 13074 v3.AddArg(mem) 13075 v1.AddArg(v3) 13076 v.AddArg(v1) 13077 return true 13078 } 13079 // match: (Move [s] dst src mem) 13080 // cond: s%8 != 0 && s > 8 13081 // result: (Move [s%8] (OffPtr <dst.Type> dst [s-s%8]) (OffPtr <src.Type> src [s-s%8]) (Move [s-s%8] dst src mem)) 13082 for { 13083 s := v.AuxInt 13084 dst := v.Args[0] 13085 src := v.Args[1] 13086 mem := v.Args[2] 13087 if !(s%8 != 0 && s > 8) { 13088 break 13089 } 13090 v.reset(OpMove) 13091 v.AuxInt = s % 8 13092 v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type) 13093 v0.AuxInt = s - s%8 13094 v0.AddArg(dst) 13095 v.AddArg(v0) 13096 v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type) 13097 v1.AuxInt = s - s%8 13098 v1.AddArg(src) 13099 v.AddArg(v1) 13100 v2 := b.NewValue0(v.Pos, OpMove, TypeMem) 13101 v2.AuxInt = s - s%8 13102 v2.AddArg(dst) 13103 v2.AddArg(src) 13104 v2.AddArg(mem) 13105 v.AddArg(v2) 13106 return true 13107 } 13108 // match: (Move [s] dst src mem) 13109 // cond: s%8 == 0 && s > 24 && s <= 8*128 && !config.noDuffDevice 13110 // result: (DUFFCOPY [8 * (128 - int64(s/8))] dst src mem) 13111 for { 13112 s := v.AuxInt 13113 dst := v.Args[0] 13114 src := v.Args[1] 13115 mem := v.Args[2] 13116 if !(s%8 == 0 && s > 24 && s <= 8*128 && !config.noDuffDevice) { 13117 break 13118 } 13119 v.reset(OpARM64DUFFCOPY) 13120 v.AuxInt = 8 * (128 - int64(s/8)) 13121 v.AddArg(dst) 13122 v.AddArg(src) 13123 v.AddArg(mem) 13124 return true 13125 } 13126 // match: (Move [s] dst src mem) 13127 // cond: s > 24 && s%8 == 0 13128 // result: (LoweredMove dst src (ADDconst <src.Type> src [s-8]) mem) 13129 for { 13130 s := v.AuxInt 13131 dst := v.Args[0] 13132 src := v.Args[1] 13133 mem := v.Args[2] 13134 if !(s > 24 && s%8 == 0) { 13135 break 13136 } 13137 v.reset(OpARM64LoweredMove) 13138 v.AddArg(dst) 13139 v.AddArg(src) 13140 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, src.Type) 13141 v0.AuxInt = s - 8 13142 v0.AddArg(src) 13143 v.AddArg(v0) 13144 v.AddArg(mem) 13145 return true 13146 } 13147 return false 13148 } 13149 func rewriteValueARM64_OpMul16_0(v *Value) bool { 13150 // match: (Mul16 x y) 13151 // cond: 13152 // result: (MULW x y) 13153 for { 13154 x := v.Args[0] 13155 y := v.Args[1] 13156 v.reset(OpARM64MULW) 13157 v.AddArg(x) 13158 v.AddArg(y) 13159 return true 13160 } 13161 } 13162 func rewriteValueARM64_OpMul32_0(v *Value) bool { 13163 // match: (Mul32 x y) 13164 // cond: 13165 // result: (MULW x y) 13166 for { 13167 x := v.Args[0] 13168 y := v.Args[1] 13169 v.reset(OpARM64MULW) 13170 v.AddArg(x) 13171 v.AddArg(y) 13172 return true 13173 } 13174 } 13175 func rewriteValueARM64_OpMul32F_0(v *Value) bool { 13176 // match: (Mul32F x y) 13177 // cond: 13178 // result: (FMULS x y) 13179 for { 13180 x := v.Args[0] 13181 y := v.Args[1] 13182 v.reset(OpARM64FMULS) 13183 v.AddArg(x) 13184 v.AddArg(y) 13185 return true 13186 } 13187 } 13188 func rewriteValueARM64_OpMul64_0(v *Value) bool { 13189 // match: (Mul64 x y) 13190 // cond: 13191 // result: (MUL x y) 13192 for { 13193 x := v.Args[0] 13194 y := v.Args[1] 13195 v.reset(OpARM64MUL) 13196 v.AddArg(x) 13197 v.AddArg(y) 13198 return true 13199 } 13200 } 13201 func rewriteValueARM64_OpMul64F_0(v *Value) bool { 13202 // match: (Mul64F x y) 13203 // cond: 13204 // result: (FMULD x y) 13205 for { 13206 x := v.Args[0] 13207 y := v.Args[1] 13208 v.reset(OpARM64FMULD) 13209 v.AddArg(x) 13210 v.AddArg(y) 13211 return true 13212 } 13213 } 13214 func rewriteValueARM64_OpMul8_0(v *Value) bool { 13215 // match: (Mul8 x y) 13216 // cond: 13217 // result: (MULW x y) 13218 for { 13219 x := v.Args[0] 13220 y := v.Args[1] 13221 v.reset(OpARM64MULW) 13222 v.AddArg(x) 13223 v.AddArg(y) 13224 return true 13225 } 13226 } 13227 func rewriteValueARM64_OpNeg16_0(v *Value) bool { 13228 // match: (Neg16 x) 13229 // cond: 13230 // result: (NEG x) 13231 for { 13232 x := v.Args[0] 13233 v.reset(OpARM64NEG) 13234 v.AddArg(x) 13235 return true 13236 } 13237 } 13238 func rewriteValueARM64_OpNeg32_0(v *Value) bool { 13239 // match: (Neg32 x) 13240 // cond: 13241 // result: (NEG x) 13242 for { 13243 x := v.Args[0] 13244 v.reset(OpARM64NEG) 13245 v.AddArg(x) 13246 return true 13247 } 13248 } 13249 func rewriteValueARM64_OpNeg32F_0(v *Value) bool { 13250 // match: (Neg32F x) 13251 // cond: 13252 // result: (FNEGS x) 13253 for { 13254 x := v.Args[0] 13255 v.reset(OpARM64FNEGS) 13256 v.AddArg(x) 13257 return true 13258 } 13259 } 13260 func rewriteValueARM64_OpNeg64_0(v *Value) bool { 13261 // match: (Neg64 x) 13262 // cond: 13263 // result: (NEG x) 13264 for { 13265 x := v.Args[0] 13266 v.reset(OpARM64NEG) 13267 v.AddArg(x) 13268 return true 13269 } 13270 } 13271 func rewriteValueARM64_OpNeg64F_0(v *Value) bool { 13272 // match: (Neg64F x) 13273 // cond: 13274 // result: (FNEGD x) 13275 for { 13276 x := v.Args[0] 13277 v.reset(OpARM64FNEGD) 13278 v.AddArg(x) 13279 return true 13280 } 13281 } 13282 func rewriteValueARM64_OpNeg8_0(v *Value) bool { 13283 // match: (Neg8 x) 13284 // cond: 13285 // result: (NEG x) 13286 for { 13287 x := v.Args[0] 13288 v.reset(OpARM64NEG) 13289 v.AddArg(x) 13290 return true 13291 } 13292 } 13293 func rewriteValueARM64_OpNeq16_0(v *Value) bool { 13294 b := v.Block 13295 _ = b 13296 types := &b.Func.Config.Types 13297 _ = types 13298 // match: (Neq16 x y) 13299 // cond: 13300 // result: (NotEqual (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 13301 for { 13302 x := v.Args[0] 13303 y := v.Args[1] 13304 v.reset(OpARM64NotEqual) 13305 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 13306 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32) 13307 v1.AddArg(x) 13308 v0.AddArg(v1) 13309 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32) 13310 v2.AddArg(y) 13311 v0.AddArg(v2) 13312 v.AddArg(v0) 13313 return true 13314 } 13315 } 13316 func rewriteValueARM64_OpNeq32_0(v *Value) bool { 13317 b := v.Block 13318 _ = b 13319 // match: (Neq32 x y) 13320 // cond: 13321 // result: (NotEqual (CMPW x y)) 13322 for { 13323 x := v.Args[0] 13324 y := v.Args[1] 13325 v.reset(OpARM64NotEqual) 13326 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 13327 v0.AddArg(x) 13328 v0.AddArg(y) 13329 v.AddArg(v0) 13330 return true 13331 } 13332 } 13333 func rewriteValueARM64_OpNeq32F_0(v *Value) bool { 13334 b := v.Block 13335 _ = b 13336 // match: (Neq32F x y) 13337 // cond: 13338 // result: (NotEqual (FCMPS x y)) 13339 for { 13340 x := v.Args[0] 13341 y := v.Args[1] 13342 v.reset(OpARM64NotEqual) 13343 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, TypeFlags) 13344 v0.AddArg(x) 13345 v0.AddArg(y) 13346 v.AddArg(v0) 13347 return true 13348 } 13349 } 13350 func rewriteValueARM64_OpNeq64_0(v *Value) bool { 13351 b := v.Block 13352 _ = b 13353 // match: (Neq64 x y) 13354 // cond: 13355 // result: (NotEqual (CMP x y)) 13356 for { 13357 x := v.Args[0] 13358 y := v.Args[1] 13359 v.reset(OpARM64NotEqual) 13360 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 13361 v0.AddArg(x) 13362 v0.AddArg(y) 13363 v.AddArg(v0) 13364 return true 13365 } 13366 } 13367 func rewriteValueARM64_OpNeq64F_0(v *Value) bool { 13368 b := v.Block 13369 _ = b 13370 // match: (Neq64F x y) 13371 // cond: 13372 // result: (NotEqual (FCMPD x y)) 13373 for { 13374 x := v.Args[0] 13375 y := v.Args[1] 13376 v.reset(OpARM64NotEqual) 13377 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, TypeFlags) 13378 v0.AddArg(x) 13379 v0.AddArg(y) 13380 v.AddArg(v0) 13381 return true 13382 } 13383 } 13384 func rewriteValueARM64_OpNeq8_0(v *Value) bool { 13385 b := v.Block 13386 _ = b 13387 types := &b.Func.Config.Types 13388 _ = types 13389 // match: (Neq8 x y) 13390 // cond: 13391 // result: (NotEqual (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 13392 for { 13393 x := v.Args[0] 13394 y := v.Args[1] 13395 v.reset(OpARM64NotEqual) 13396 v0 := b.NewValue0(v.Pos, OpARM64CMPW, TypeFlags) 13397 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32) 13398 v1.AddArg(x) 13399 v0.AddArg(v1) 13400 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32) 13401 v2.AddArg(y) 13402 v0.AddArg(v2) 13403 v.AddArg(v0) 13404 return true 13405 } 13406 } 13407 func rewriteValueARM64_OpNeqB_0(v *Value) bool { 13408 // match: (NeqB x y) 13409 // cond: 13410 // result: (XOR x y) 13411 for { 13412 x := v.Args[0] 13413 y := v.Args[1] 13414 v.reset(OpARM64XOR) 13415 v.AddArg(x) 13416 v.AddArg(y) 13417 return true 13418 } 13419 } 13420 func rewriteValueARM64_OpNeqPtr_0(v *Value) bool { 13421 b := v.Block 13422 _ = b 13423 // match: (NeqPtr x y) 13424 // cond: 13425 // result: (NotEqual (CMP x y)) 13426 for { 13427 x := v.Args[0] 13428 y := v.Args[1] 13429 v.reset(OpARM64NotEqual) 13430 v0 := b.NewValue0(v.Pos, OpARM64CMP, TypeFlags) 13431 v0.AddArg(x) 13432 v0.AddArg(y) 13433 v.AddArg(v0) 13434 return true 13435 } 13436 } 13437 func rewriteValueARM64_OpNilCheck_0(v *Value) bool { 13438 // match: (NilCheck ptr mem) 13439 // cond: 13440 // result: (LoweredNilCheck ptr mem) 13441 for { 13442 ptr := v.Args[0] 13443 mem := v.Args[1] 13444 v.reset(OpARM64LoweredNilCheck) 13445 v.AddArg(ptr) 13446 v.AddArg(mem) 13447 return true 13448 } 13449 } 13450 func rewriteValueARM64_OpNot_0(v *Value) bool { 13451 b := v.Block 13452 _ = b 13453 types := &b.Func.Config.Types 13454 _ = types 13455 // match: (Not x) 13456 // cond: 13457 // result: (XOR (MOVDconst [1]) x) 13458 for { 13459 x := v.Args[0] 13460 v.reset(OpARM64XOR) 13461 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 13462 v0.AuxInt = 1 13463 v.AddArg(v0) 13464 v.AddArg(x) 13465 return true 13466 } 13467 } 13468 func rewriteValueARM64_OpOffPtr_0(v *Value) bool { 13469 // match: (OffPtr [off] ptr:(SP)) 13470 // cond: 13471 // result: (MOVDaddr [off] ptr) 13472 for { 13473 off := v.AuxInt 13474 ptr := v.Args[0] 13475 if ptr.Op != OpSP { 13476 break 13477 } 13478 v.reset(OpARM64MOVDaddr) 13479 v.AuxInt = off 13480 v.AddArg(ptr) 13481 return true 13482 } 13483 // match: (OffPtr [off] ptr) 13484 // cond: 13485 // result: (ADDconst [off] ptr) 13486 for { 13487 off := v.AuxInt 13488 ptr := v.Args[0] 13489 v.reset(OpARM64ADDconst) 13490 v.AuxInt = off 13491 v.AddArg(ptr) 13492 return true 13493 } 13494 } 13495 func rewriteValueARM64_OpOr16_0(v *Value) bool { 13496 // match: (Or16 x y) 13497 // cond: 13498 // result: (OR x y) 13499 for { 13500 x := v.Args[0] 13501 y := v.Args[1] 13502 v.reset(OpARM64OR) 13503 v.AddArg(x) 13504 v.AddArg(y) 13505 return true 13506 } 13507 } 13508 func rewriteValueARM64_OpOr32_0(v *Value) bool { 13509 // match: (Or32 x y) 13510 // cond: 13511 // result: (OR x y) 13512 for { 13513 x := v.Args[0] 13514 y := v.Args[1] 13515 v.reset(OpARM64OR) 13516 v.AddArg(x) 13517 v.AddArg(y) 13518 return true 13519 } 13520 } 13521 func rewriteValueARM64_OpOr64_0(v *Value) bool { 13522 // match: (Or64 x y) 13523 // cond: 13524 // result: (OR x y) 13525 for { 13526 x := v.Args[0] 13527 y := v.Args[1] 13528 v.reset(OpARM64OR) 13529 v.AddArg(x) 13530 v.AddArg(y) 13531 return true 13532 } 13533 } 13534 func rewriteValueARM64_OpOr8_0(v *Value) bool { 13535 // match: (Or8 x y) 13536 // cond: 13537 // result: (OR x y) 13538 for { 13539 x := v.Args[0] 13540 y := v.Args[1] 13541 v.reset(OpARM64OR) 13542 v.AddArg(x) 13543 v.AddArg(y) 13544 return true 13545 } 13546 } 13547 func rewriteValueARM64_OpOrB_0(v *Value) bool { 13548 // match: (OrB x y) 13549 // cond: 13550 // result: (OR x y) 13551 for { 13552 x := v.Args[0] 13553 y := v.Args[1] 13554 v.reset(OpARM64OR) 13555 v.AddArg(x) 13556 v.AddArg(y) 13557 return true 13558 } 13559 } 13560 func rewriteValueARM64_OpRound32F_0(v *Value) bool { 13561 // match: (Round32F x) 13562 // cond: 13563 // result: x 13564 for { 13565 x := v.Args[0] 13566 v.reset(OpCopy) 13567 v.Type = x.Type 13568 v.AddArg(x) 13569 return true 13570 } 13571 } 13572 func rewriteValueARM64_OpRound64F_0(v *Value) bool { 13573 // match: (Round64F x) 13574 // cond: 13575 // result: x 13576 for { 13577 x := v.Args[0] 13578 v.reset(OpCopy) 13579 v.Type = x.Type 13580 v.AddArg(x) 13581 return true 13582 } 13583 } 13584 func rewriteValueARM64_OpRsh16Ux16_0(v *Value) bool { 13585 b := v.Block 13586 _ = b 13587 types := &b.Func.Config.Types 13588 _ = types 13589 // match: (Rsh16Ux16 <t> x y) 13590 // cond: 13591 // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 13592 for { 13593 t := v.Type 13594 x := v.Args[0] 13595 y := v.Args[1] 13596 v.reset(OpARM64CSELULT) 13597 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13598 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 13599 v1.AddArg(x) 13600 v0.AddArg(v1) 13601 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 13602 v2.AddArg(y) 13603 v0.AddArg(v2) 13604 v.AddArg(v0) 13605 v3 := b.NewValue0(v.Pos, OpConst64, t) 13606 v3.AuxInt = 0 13607 v.AddArg(v3) 13608 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13609 v4.AuxInt = 64 13610 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 13611 v5.AddArg(y) 13612 v4.AddArg(v5) 13613 v.AddArg(v4) 13614 return true 13615 } 13616 } 13617 func rewriteValueARM64_OpRsh16Ux32_0(v *Value) bool { 13618 b := v.Block 13619 _ = b 13620 types := &b.Func.Config.Types 13621 _ = types 13622 // match: (Rsh16Ux32 <t> x y) 13623 // cond: 13624 // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 13625 for { 13626 t := v.Type 13627 x := v.Args[0] 13628 y := v.Args[1] 13629 v.reset(OpARM64CSELULT) 13630 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13631 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 13632 v1.AddArg(x) 13633 v0.AddArg(v1) 13634 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 13635 v2.AddArg(y) 13636 v0.AddArg(v2) 13637 v.AddArg(v0) 13638 v3 := b.NewValue0(v.Pos, OpConst64, t) 13639 v3.AuxInt = 0 13640 v.AddArg(v3) 13641 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13642 v4.AuxInt = 64 13643 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 13644 v5.AddArg(y) 13645 v4.AddArg(v5) 13646 v.AddArg(v4) 13647 return true 13648 } 13649 } 13650 func rewriteValueARM64_OpRsh16Ux64_0(v *Value) bool { 13651 b := v.Block 13652 _ = b 13653 types := &b.Func.Config.Types 13654 _ = types 13655 // match: (Rsh16Ux64 x (MOVDconst [c])) 13656 // cond: uint64(c) < 16 13657 // result: (SRLconst (ZeroExt16to64 x) [c]) 13658 for { 13659 x := v.Args[0] 13660 v_1 := v.Args[1] 13661 if v_1.Op != OpARM64MOVDconst { 13662 break 13663 } 13664 c := v_1.AuxInt 13665 if !(uint64(c) < 16) { 13666 break 13667 } 13668 v.reset(OpARM64SRLconst) 13669 v.AuxInt = c 13670 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 13671 v0.AddArg(x) 13672 v.AddArg(v0) 13673 return true 13674 } 13675 // match: (Rsh16Ux64 _ (MOVDconst [c])) 13676 // cond: uint64(c) >= 16 13677 // result: (MOVDconst [0]) 13678 for { 13679 v_1 := v.Args[1] 13680 if v_1.Op != OpARM64MOVDconst { 13681 break 13682 } 13683 c := v_1.AuxInt 13684 if !(uint64(c) >= 16) { 13685 break 13686 } 13687 v.reset(OpARM64MOVDconst) 13688 v.AuxInt = 0 13689 return true 13690 } 13691 // match: (Rsh16Ux64 <t> x y) 13692 // cond: 13693 // result: (CSELULT (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 13694 for { 13695 t := v.Type 13696 x := v.Args[0] 13697 y := v.Args[1] 13698 v.reset(OpARM64CSELULT) 13699 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13700 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 13701 v1.AddArg(x) 13702 v0.AddArg(v1) 13703 v0.AddArg(y) 13704 v.AddArg(v0) 13705 v2 := b.NewValue0(v.Pos, OpConst64, t) 13706 v2.AuxInt = 0 13707 v.AddArg(v2) 13708 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13709 v3.AuxInt = 64 13710 v3.AddArg(y) 13711 v.AddArg(v3) 13712 return true 13713 } 13714 } 13715 func rewriteValueARM64_OpRsh16Ux8_0(v *Value) bool { 13716 b := v.Block 13717 _ = b 13718 types := &b.Func.Config.Types 13719 _ = types 13720 // match: (Rsh16Ux8 <t> x y) 13721 // cond: 13722 // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 13723 for { 13724 t := v.Type 13725 x := v.Args[0] 13726 y := v.Args[1] 13727 v.reset(OpARM64CSELULT) 13728 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13729 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 13730 v1.AddArg(x) 13731 v0.AddArg(v1) 13732 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 13733 v2.AddArg(y) 13734 v0.AddArg(v2) 13735 v.AddArg(v0) 13736 v3 := b.NewValue0(v.Pos, OpConst64, t) 13737 v3.AuxInt = 0 13738 v.AddArg(v3) 13739 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13740 v4.AuxInt = 64 13741 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 13742 v5.AddArg(y) 13743 v4.AddArg(v5) 13744 v.AddArg(v4) 13745 return true 13746 } 13747 } 13748 func rewriteValueARM64_OpRsh16x16_0(v *Value) bool { 13749 b := v.Block 13750 _ = b 13751 types := &b.Func.Config.Types 13752 _ = types 13753 // match: (Rsh16x16 x y) 13754 // cond: 13755 // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 13756 for { 13757 x := v.Args[0] 13758 y := v.Args[1] 13759 v.reset(OpARM64SRA) 13760 v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64) 13761 v0.AddArg(x) 13762 v.AddArg(v0) 13763 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 13764 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 13765 v2.AddArg(y) 13766 v1.AddArg(v2) 13767 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 13768 v3.AuxInt = 63 13769 v1.AddArg(v3) 13770 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13771 v4.AuxInt = 64 13772 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 13773 v5.AddArg(y) 13774 v4.AddArg(v5) 13775 v1.AddArg(v4) 13776 v.AddArg(v1) 13777 return true 13778 } 13779 } 13780 func rewriteValueARM64_OpRsh16x32_0(v *Value) bool { 13781 b := v.Block 13782 _ = b 13783 types := &b.Func.Config.Types 13784 _ = types 13785 // match: (Rsh16x32 x y) 13786 // cond: 13787 // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 13788 for { 13789 x := v.Args[0] 13790 y := v.Args[1] 13791 v.reset(OpARM64SRA) 13792 v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64) 13793 v0.AddArg(x) 13794 v.AddArg(v0) 13795 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 13796 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 13797 v2.AddArg(y) 13798 v1.AddArg(v2) 13799 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 13800 v3.AuxInt = 63 13801 v1.AddArg(v3) 13802 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13803 v4.AuxInt = 64 13804 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 13805 v5.AddArg(y) 13806 v4.AddArg(v5) 13807 v1.AddArg(v4) 13808 v.AddArg(v1) 13809 return true 13810 } 13811 } 13812 func rewriteValueARM64_OpRsh16x64_0(v *Value) bool { 13813 b := v.Block 13814 _ = b 13815 types := &b.Func.Config.Types 13816 _ = types 13817 // match: (Rsh16x64 x (MOVDconst [c])) 13818 // cond: uint64(c) < 16 13819 // result: (SRAconst (SignExt16to64 x) [c]) 13820 for { 13821 x := v.Args[0] 13822 v_1 := v.Args[1] 13823 if v_1.Op != OpARM64MOVDconst { 13824 break 13825 } 13826 c := v_1.AuxInt 13827 if !(uint64(c) < 16) { 13828 break 13829 } 13830 v.reset(OpARM64SRAconst) 13831 v.AuxInt = c 13832 v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64) 13833 v0.AddArg(x) 13834 v.AddArg(v0) 13835 return true 13836 } 13837 // match: (Rsh16x64 x (MOVDconst [c])) 13838 // cond: uint64(c) >= 16 13839 // result: (SRAconst (SignExt16to64 x) [63]) 13840 for { 13841 x := v.Args[0] 13842 v_1 := v.Args[1] 13843 if v_1.Op != OpARM64MOVDconst { 13844 break 13845 } 13846 c := v_1.AuxInt 13847 if !(uint64(c) >= 16) { 13848 break 13849 } 13850 v.reset(OpARM64SRAconst) 13851 v.AuxInt = 63 13852 v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64) 13853 v0.AddArg(x) 13854 v.AddArg(v0) 13855 return true 13856 } 13857 // match: (Rsh16x64 x y) 13858 // cond: 13859 // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 13860 for { 13861 x := v.Args[0] 13862 y := v.Args[1] 13863 v.reset(OpARM64SRA) 13864 v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64) 13865 v0.AddArg(x) 13866 v.AddArg(v0) 13867 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 13868 v1.AddArg(y) 13869 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 13870 v2.AuxInt = 63 13871 v1.AddArg(v2) 13872 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13873 v3.AuxInt = 64 13874 v3.AddArg(y) 13875 v1.AddArg(v3) 13876 v.AddArg(v1) 13877 return true 13878 } 13879 } 13880 func rewriteValueARM64_OpRsh16x8_0(v *Value) bool { 13881 b := v.Block 13882 _ = b 13883 types := &b.Func.Config.Types 13884 _ = types 13885 // match: (Rsh16x8 x y) 13886 // cond: 13887 // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 13888 for { 13889 x := v.Args[0] 13890 y := v.Args[1] 13891 v.reset(OpARM64SRA) 13892 v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64) 13893 v0.AddArg(x) 13894 v.AddArg(v0) 13895 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 13896 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 13897 v2.AddArg(y) 13898 v1.AddArg(v2) 13899 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 13900 v3.AuxInt = 63 13901 v1.AddArg(v3) 13902 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13903 v4.AuxInt = 64 13904 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 13905 v5.AddArg(y) 13906 v4.AddArg(v5) 13907 v1.AddArg(v4) 13908 v.AddArg(v1) 13909 return true 13910 } 13911 } 13912 func rewriteValueARM64_OpRsh32Ux16_0(v *Value) bool { 13913 b := v.Block 13914 _ = b 13915 types := &b.Func.Config.Types 13916 _ = types 13917 // match: (Rsh32Ux16 <t> x y) 13918 // cond: 13919 // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 13920 for { 13921 t := v.Type 13922 x := v.Args[0] 13923 y := v.Args[1] 13924 v.reset(OpARM64CSELULT) 13925 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13926 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 13927 v1.AddArg(x) 13928 v0.AddArg(v1) 13929 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 13930 v2.AddArg(y) 13931 v0.AddArg(v2) 13932 v.AddArg(v0) 13933 v3 := b.NewValue0(v.Pos, OpConst64, t) 13934 v3.AuxInt = 0 13935 v.AddArg(v3) 13936 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13937 v4.AuxInt = 64 13938 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 13939 v5.AddArg(y) 13940 v4.AddArg(v5) 13941 v.AddArg(v4) 13942 return true 13943 } 13944 } 13945 func rewriteValueARM64_OpRsh32Ux32_0(v *Value) bool { 13946 b := v.Block 13947 _ = b 13948 types := &b.Func.Config.Types 13949 _ = types 13950 // match: (Rsh32Ux32 <t> x y) 13951 // cond: 13952 // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 13953 for { 13954 t := v.Type 13955 x := v.Args[0] 13956 y := v.Args[1] 13957 v.reset(OpARM64CSELULT) 13958 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 13959 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 13960 v1.AddArg(x) 13961 v0.AddArg(v1) 13962 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 13963 v2.AddArg(y) 13964 v0.AddArg(v2) 13965 v.AddArg(v0) 13966 v3 := b.NewValue0(v.Pos, OpConst64, t) 13967 v3.AuxInt = 0 13968 v.AddArg(v3) 13969 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 13970 v4.AuxInt = 64 13971 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 13972 v5.AddArg(y) 13973 v4.AddArg(v5) 13974 v.AddArg(v4) 13975 return true 13976 } 13977 } 13978 func rewriteValueARM64_OpRsh32Ux64_0(v *Value) bool { 13979 b := v.Block 13980 _ = b 13981 types := &b.Func.Config.Types 13982 _ = types 13983 // match: (Rsh32Ux64 x (MOVDconst [c])) 13984 // cond: uint64(c) < 32 13985 // result: (SRLconst (ZeroExt32to64 x) [c]) 13986 for { 13987 x := v.Args[0] 13988 v_1 := v.Args[1] 13989 if v_1.Op != OpARM64MOVDconst { 13990 break 13991 } 13992 c := v_1.AuxInt 13993 if !(uint64(c) < 32) { 13994 break 13995 } 13996 v.reset(OpARM64SRLconst) 13997 v.AuxInt = c 13998 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 13999 v0.AddArg(x) 14000 v.AddArg(v0) 14001 return true 14002 } 14003 // match: (Rsh32Ux64 _ (MOVDconst [c])) 14004 // cond: uint64(c) >= 32 14005 // result: (MOVDconst [0]) 14006 for { 14007 v_1 := v.Args[1] 14008 if v_1.Op != OpARM64MOVDconst { 14009 break 14010 } 14011 c := v_1.AuxInt 14012 if !(uint64(c) >= 32) { 14013 break 14014 } 14015 v.reset(OpARM64MOVDconst) 14016 v.AuxInt = 0 14017 return true 14018 } 14019 // match: (Rsh32Ux64 <t> x y) 14020 // cond: 14021 // result: (CSELULT (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 14022 for { 14023 t := v.Type 14024 x := v.Args[0] 14025 y := v.Args[1] 14026 v.reset(OpARM64CSELULT) 14027 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14028 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 14029 v1.AddArg(x) 14030 v0.AddArg(v1) 14031 v0.AddArg(y) 14032 v.AddArg(v0) 14033 v2 := b.NewValue0(v.Pos, OpConst64, t) 14034 v2.AuxInt = 0 14035 v.AddArg(v2) 14036 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14037 v3.AuxInt = 64 14038 v3.AddArg(y) 14039 v.AddArg(v3) 14040 return true 14041 } 14042 } 14043 func rewriteValueARM64_OpRsh32Ux8_0(v *Value) bool { 14044 b := v.Block 14045 _ = b 14046 types := &b.Func.Config.Types 14047 _ = types 14048 // match: (Rsh32Ux8 <t> x y) 14049 // cond: 14050 // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 14051 for { 14052 t := v.Type 14053 x := v.Args[0] 14054 y := v.Args[1] 14055 v.reset(OpARM64CSELULT) 14056 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14057 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 14058 v1.AddArg(x) 14059 v0.AddArg(v1) 14060 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 14061 v2.AddArg(y) 14062 v0.AddArg(v2) 14063 v.AddArg(v0) 14064 v3 := b.NewValue0(v.Pos, OpConst64, t) 14065 v3.AuxInt = 0 14066 v.AddArg(v3) 14067 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14068 v4.AuxInt = 64 14069 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 14070 v5.AddArg(y) 14071 v4.AddArg(v5) 14072 v.AddArg(v4) 14073 return true 14074 } 14075 } 14076 func rewriteValueARM64_OpRsh32x16_0(v *Value) bool { 14077 b := v.Block 14078 _ = b 14079 types := &b.Func.Config.Types 14080 _ = types 14081 // match: (Rsh32x16 x y) 14082 // cond: 14083 // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 14084 for { 14085 x := v.Args[0] 14086 y := v.Args[1] 14087 v.reset(OpARM64SRA) 14088 v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64) 14089 v0.AddArg(x) 14090 v.AddArg(v0) 14091 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14092 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 14093 v2.AddArg(y) 14094 v1.AddArg(v2) 14095 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 14096 v3.AuxInt = 63 14097 v1.AddArg(v3) 14098 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14099 v4.AuxInt = 64 14100 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 14101 v5.AddArg(y) 14102 v4.AddArg(v5) 14103 v1.AddArg(v4) 14104 v.AddArg(v1) 14105 return true 14106 } 14107 } 14108 func rewriteValueARM64_OpRsh32x32_0(v *Value) bool { 14109 b := v.Block 14110 _ = b 14111 types := &b.Func.Config.Types 14112 _ = types 14113 // match: (Rsh32x32 x y) 14114 // cond: 14115 // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 14116 for { 14117 x := v.Args[0] 14118 y := v.Args[1] 14119 v.reset(OpARM64SRA) 14120 v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64) 14121 v0.AddArg(x) 14122 v.AddArg(v0) 14123 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14124 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 14125 v2.AddArg(y) 14126 v1.AddArg(v2) 14127 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 14128 v3.AuxInt = 63 14129 v1.AddArg(v3) 14130 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14131 v4.AuxInt = 64 14132 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 14133 v5.AddArg(y) 14134 v4.AddArg(v5) 14135 v1.AddArg(v4) 14136 v.AddArg(v1) 14137 return true 14138 } 14139 } 14140 func rewriteValueARM64_OpRsh32x64_0(v *Value) bool { 14141 b := v.Block 14142 _ = b 14143 types := &b.Func.Config.Types 14144 _ = types 14145 // match: (Rsh32x64 x (MOVDconst [c])) 14146 // cond: uint64(c) < 32 14147 // result: (SRAconst (SignExt32to64 x) [c]) 14148 for { 14149 x := v.Args[0] 14150 v_1 := v.Args[1] 14151 if v_1.Op != OpARM64MOVDconst { 14152 break 14153 } 14154 c := v_1.AuxInt 14155 if !(uint64(c) < 32) { 14156 break 14157 } 14158 v.reset(OpARM64SRAconst) 14159 v.AuxInt = c 14160 v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64) 14161 v0.AddArg(x) 14162 v.AddArg(v0) 14163 return true 14164 } 14165 // match: (Rsh32x64 x (MOVDconst [c])) 14166 // cond: uint64(c) >= 32 14167 // result: (SRAconst (SignExt32to64 x) [63]) 14168 for { 14169 x := v.Args[0] 14170 v_1 := v.Args[1] 14171 if v_1.Op != OpARM64MOVDconst { 14172 break 14173 } 14174 c := v_1.AuxInt 14175 if !(uint64(c) >= 32) { 14176 break 14177 } 14178 v.reset(OpARM64SRAconst) 14179 v.AuxInt = 63 14180 v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64) 14181 v0.AddArg(x) 14182 v.AddArg(v0) 14183 return true 14184 } 14185 // match: (Rsh32x64 x y) 14186 // cond: 14187 // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 14188 for { 14189 x := v.Args[0] 14190 y := v.Args[1] 14191 v.reset(OpARM64SRA) 14192 v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64) 14193 v0.AddArg(x) 14194 v.AddArg(v0) 14195 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14196 v1.AddArg(y) 14197 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 14198 v2.AuxInt = 63 14199 v1.AddArg(v2) 14200 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14201 v3.AuxInt = 64 14202 v3.AddArg(y) 14203 v1.AddArg(v3) 14204 v.AddArg(v1) 14205 return true 14206 } 14207 } 14208 func rewriteValueARM64_OpRsh32x8_0(v *Value) bool { 14209 b := v.Block 14210 _ = b 14211 types := &b.Func.Config.Types 14212 _ = types 14213 // match: (Rsh32x8 x y) 14214 // cond: 14215 // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 14216 for { 14217 x := v.Args[0] 14218 y := v.Args[1] 14219 v.reset(OpARM64SRA) 14220 v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64) 14221 v0.AddArg(x) 14222 v.AddArg(v0) 14223 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14224 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 14225 v2.AddArg(y) 14226 v1.AddArg(v2) 14227 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 14228 v3.AuxInt = 63 14229 v1.AddArg(v3) 14230 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14231 v4.AuxInt = 64 14232 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 14233 v5.AddArg(y) 14234 v4.AddArg(v5) 14235 v1.AddArg(v4) 14236 v.AddArg(v1) 14237 return true 14238 } 14239 } 14240 func rewriteValueARM64_OpRsh64Ux16_0(v *Value) bool { 14241 b := v.Block 14242 _ = b 14243 types := &b.Func.Config.Types 14244 _ = types 14245 // match: (Rsh64Ux16 <t> x y) 14246 // cond: 14247 // result: (CSELULT (SRL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 14248 for { 14249 t := v.Type 14250 x := v.Args[0] 14251 y := v.Args[1] 14252 v.reset(OpARM64CSELULT) 14253 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14254 v0.AddArg(x) 14255 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 14256 v1.AddArg(y) 14257 v0.AddArg(v1) 14258 v.AddArg(v0) 14259 v2 := b.NewValue0(v.Pos, OpConst64, t) 14260 v2.AuxInt = 0 14261 v.AddArg(v2) 14262 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14263 v3.AuxInt = 64 14264 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 14265 v4.AddArg(y) 14266 v3.AddArg(v4) 14267 v.AddArg(v3) 14268 return true 14269 } 14270 } 14271 func rewriteValueARM64_OpRsh64Ux32_0(v *Value) bool { 14272 b := v.Block 14273 _ = b 14274 types := &b.Func.Config.Types 14275 _ = types 14276 // match: (Rsh64Ux32 <t> x y) 14277 // cond: 14278 // result: (CSELULT (SRL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 14279 for { 14280 t := v.Type 14281 x := v.Args[0] 14282 y := v.Args[1] 14283 v.reset(OpARM64CSELULT) 14284 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14285 v0.AddArg(x) 14286 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 14287 v1.AddArg(y) 14288 v0.AddArg(v1) 14289 v.AddArg(v0) 14290 v2 := b.NewValue0(v.Pos, OpConst64, t) 14291 v2.AuxInt = 0 14292 v.AddArg(v2) 14293 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14294 v3.AuxInt = 64 14295 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 14296 v4.AddArg(y) 14297 v3.AddArg(v4) 14298 v.AddArg(v3) 14299 return true 14300 } 14301 } 14302 func rewriteValueARM64_OpRsh64Ux64_0(v *Value) bool { 14303 b := v.Block 14304 _ = b 14305 // match: (Rsh64Ux64 x (MOVDconst [c])) 14306 // cond: uint64(c) < 64 14307 // result: (SRLconst x [c]) 14308 for { 14309 x := v.Args[0] 14310 v_1 := v.Args[1] 14311 if v_1.Op != OpARM64MOVDconst { 14312 break 14313 } 14314 c := v_1.AuxInt 14315 if !(uint64(c) < 64) { 14316 break 14317 } 14318 v.reset(OpARM64SRLconst) 14319 v.AuxInt = c 14320 v.AddArg(x) 14321 return true 14322 } 14323 // match: (Rsh64Ux64 _ (MOVDconst [c])) 14324 // cond: uint64(c) >= 64 14325 // result: (MOVDconst [0]) 14326 for { 14327 v_1 := v.Args[1] 14328 if v_1.Op != OpARM64MOVDconst { 14329 break 14330 } 14331 c := v_1.AuxInt 14332 if !(uint64(c) >= 64) { 14333 break 14334 } 14335 v.reset(OpARM64MOVDconst) 14336 v.AuxInt = 0 14337 return true 14338 } 14339 // match: (Rsh64Ux64 <t> x y) 14340 // cond: 14341 // result: (CSELULT (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 14342 for { 14343 t := v.Type 14344 x := v.Args[0] 14345 y := v.Args[1] 14346 v.reset(OpARM64CSELULT) 14347 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14348 v0.AddArg(x) 14349 v0.AddArg(y) 14350 v.AddArg(v0) 14351 v1 := b.NewValue0(v.Pos, OpConst64, t) 14352 v1.AuxInt = 0 14353 v.AddArg(v1) 14354 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14355 v2.AuxInt = 64 14356 v2.AddArg(y) 14357 v.AddArg(v2) 14358 return true 14359 } 14360 } 14361 func rewriteValueARM64_OpRsh64Ux8_0(v *Value) bool { 14362 b := v.Block 14363 _ = b 14364 types := &b.Func.Config.Types 14365 _ = types 14366 // match: (Rsh64Ux8 <t> x y) 14367 // cond: 14368 // result: (CSELULT (SRL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 14369 for { 14370 t := v.Type 14371 x := v.Args[0] 14372 y := v.Args[1] 14373 v.reset(OpARM64CSELULT) 14374 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14375 v0.AddArg(x) 14376 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 14377 v1.AddArg(y) 14378 v0.AddArg(v1) 14379 v.AddArg(v0) 14380 v2 := b.NewValue0(v.Pos, OpConst64, t) 14381 v2.AuxInt = 0 14382 v.AddArg(v2) 14383 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14384 v3.AuxInt = 64 14385 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 14386 v4.AddArg(y) 14387 v3.AddArg(v4) 14388 v.AddArg(v3) 14389 return true 14390 } 14391 } 14392 func rewriteValueARM64_OpRsh64x16_0(v *Value) bool { 14393 b := v.Block 14394 _ = b 14395 types := &b.Func.Config.Types 14396 _ = types 14397 // match: (Rsh64x16 x y) 14398 // cond: 14399 // result: (SRA x (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 14400 for { 14401 x := v.Args[0] 14402 y := v.Args[1] 14403 v.reset(OpARM64SRA) 14404 v.AddArg(x) 14405 v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14406 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 14407 v1.AddArg(y) 14408 v0.AddArg(v1) 14409 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 14410 v2.AuxInt = 63 14411 v0.AddArg(v2) 14412 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14413 v3.AuxInt = 64 14414 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 14415 v4.AddArg(y) 14416 v3.AddArg(v4) 14417 v0.AddArg(v3) 14418 v.AddArg(v0) 14419 return true 14420 } 14421 } 14422 func rewriteValueARM64_OpRsh64x32_0(v *Value) bool { 14423 b := v.Block 14424 _ = b 14425 types := &b.Func.Config.Types 14426 _ = types 14427 // match: (Rsh64x32 x y) 14428 // cond: 14429 // result: (SRA x (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 14430 for { 14431 x := v.Args[0] 14432 y := v.Args[1] 14433 v.reset(OpARM64SRA) 14434 v.AddArg(x) 14435 v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14436 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 14437 v1.AddArg(y) 14438 v0.AddArg(v1) 14439 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 14440 v2.AuxInt = 63 14441 v0.AddArg(v2) 14442 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14443 v3.AuxInt = 64 14444 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 14445 v4.AddArg(y) 14446 v3.AddArg(v4) 14447 v0.AddArg(v3) 14448 v.AddArg(v0) 14449 return true 14450 } 14451 } 14452 func rewriteValueARM64_OpRsh64x64_0(v *Value) bool { 14453 b := v.Block 14454 _ = b 14455 // match: (Rsh64x64 x (MOVDconst [c])) 14456 // cond: uint64(c) < 64 14457 // result: (SRAconst x [c]) 14458 for { 14459 x := v.Args[0] 14460 v_1 := v.Args[1] 14461 if v_1.Op != OpARM64MOVDconst { 14462 break 14463 } 14464 c := v_1.AuxInt 14465 if !(uint64(c) < 64) { 14466 break 14467 } 14468 v.reset(OpARM64SRAconst) 14469 v.AuxInt = c 14470 v.AddArg(x) 14471 return true 14472 } 14473 // match: (Rsh64x64 x (MOVDconst [c])) 14474 // cond: uint64(c) >= 64 14475 // result: (SRAconst x [63]) 14476 for { 14477 x := v.Args[0] 14478 v_1 := v.Args[1] 14479 if v_1.Op != OpARM64MOVDconst { 14480 break 14481 } 14482 c := v_1.AuxInt 14483 if !(uint64(c) >= 64) { 14484 break 14485 } 14486 v.reset(OpARM64SRAconst) 14487 v.AuxInt = 63 14488 v.AddArg(x) 14489 return true 14490 } 14491 // match: (Rsh64x64 x y) 14492 // cond: 14493 // result: (SRA x (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 14494 for { 14495 x := v.Args[0] 14496 y := v.Args[1] 14497 v.reset(OpARM64SRA) 14498 v.AddArg(x) 14499 v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14500 v0.AddArg(y) 14501 v1 := b.NewValue0(v.Pos, OpConst64, y.Type) 14502 v1.AuxInt = 63 14503 v0.AddArg(v1) 14504 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14505 v2.AuxInt = 64 14506 v2.AddArg(y) 14507 v0.AddArg(v2) 14508 v.AddArg(v0) 14509 return true 14510 } 14511 } 14512 func rewriteValueARM64_OpRsh64x8_0(v *Value) bool { 14513 b := v.Block 14514 _ = b 14515 types := &b.Func.Config.Types 14516 _ = types 14517 // match: (Rsh64x8 x y) 14518 // cond: 14519 // result: (SRA x (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 14520 for { 14521 x := v.Args[0] 14522 y := v.Args[1] 14523 v.reset(OpARM64SRA) 14524 v.AddArg(x) 14525 v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14526 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 14527 v1.AddArg(y) 14528 v0.AddArg(v1) 14529 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 14530 v2.AuxInt = 63 14531 v0.AddArg(v2) 14532 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14533 v3.AuxInt = 64 14534 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 14535 v4.AddArg(y) 14536 v3.AddArg(v4) 14537 v0.AddArg(v3) 14538 v.AddArg(v0) 14539 return true 14540 } 14541 } 14542 func rewriteValueARM64_OpRsh8Ux16_0(v *Value) bool { 14543 b := v.Block 14544 _ = b 14545 types := &b.Func.Config.Types 14546 _ = types 14547 // match: (Rsh8Ux16 <t> x y) 14548 // cond: 14549 // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 14550 for { 14551 t := v.Type 14552 x := v.Args[0] 14553 y := v.Args[1] 14554 v.reset(OpARM64CSELULT) 14555 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14556 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 14557 v1.AddArg(x) 14558 v0.AddArg(v1) 14559 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 14560 v2.AddArg(y) 14561 v0.AddArg(v2) 14562 v.AddArg(v0) 14563 v3 := b.NewValue0(v.Pos, OpConst64, t) 14564 v3.AuxInt = 0 14565 v.AddArg(v3) 14566 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14567 v4.AuxInt = 64 14568 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 14569 v5.AddArg(y) 14570 v4.AddArg(v5) 14571 v.AddArg(v4) 14572 return true 14573 } 14574 } 14575 func rewriteValueARM64_OpRsh8Ux32_0(v *Value) bool { 14576 b := v.Block 14577 _ = b 14578 types := &b.Func.Config.Types 14579 _ = types 14580 // match: (Rsh8Ux32 <t> x y) 14581 // cond: 14582 // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 14583 for { 14584 t := v.Type 14585 x := v.Args[0] 14586 y := v.Args[1] 14587 v.reset(OpARM64CSELULT) 14588 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14589 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 14590 v1.AddArg(x) 14591 v0.AddArg(v1) 14592 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 14593 v2.AddArg(y) 14594 v0.AddArg(v2) 14595 v.AddArg(v0) 14596 v3 := b.NewValue0(v.Pos, OpConst64, t) 14597 v3.AuxInt = 0 14598 v.AddArg(v3) 14599 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14600 v4.AuxInt = 64 14601 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 14602 v5.AddArg(y) 14603 v4.AddArg(v5) 14604 v.AddArg(v4) 14605 return true 14606 } 14607 } 14608 func rewriteValueARM64_OpRsh8Ux64_0(v *Value) bool { 14609 b := v.Block 14610 _ = b 14611 types := &b.Func.Config.Types 14612 _ = types 14613 // match: (Rsh8Ux64 x (MOVDconst [c])) 14614 // cond: uint64(c) < 8 14615 // result: (SRLconst (ZeroExt8to64 x) [c]) 14616 for { 14617 x := v.Args[0] 14618 v_1 := v.Args[1] 14619 if v_1.Op != OpARM64MOVDconst { 14620 break 14621 } 14622 c := v_1.AuxInt 14623 if !(uint64(c) < 8) { 14624 break 14625 } 14626 v.reset(OpARM64SRLconst) 14627 v.AuxInt = c 14628 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 14629 v0.AddArg(x) 14630 v.AddArg(v0) 14631 return true 14632 } 14633 // match: (Rsh8Ux64 _ (MOVDconst [c])) 14634 // cond: uint64(c) >= 8 14635 // result: (MOVDconst [0]) 14636 for { 14637 v_1 := v.Args[1] 14638 if v_1.Op != OpARM64MOVDconst { 14639 break 14640 } 14641 c := v_1.AuxInt 14642 if !(uint64(c) >= 8) { 14643 break 14644 } 14645 v.reset(OpARM64MOVDconst) 14646 v.AuxInt = 0 14647 return true 14648 } 14649 // match: (Rsh8Ux64 <t> x y) 14650 // cond: 14651 // result: (CSELULT (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 14652 for { 14653 t := v.Type 14654 x := v.Args[0] 14655 y := v.Args[1] 14656 v.reset(OpARM64CSELULT) 14657 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14658 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 14659 v1.AddArg(x) 14660 v0.AddArg(v1) 14661 v0.AddArg(y) 14662 v.AddArg(v0) 14663 v2 := b.NewValue0(v.Pos, OpConst64, t) 14664 v2.AuxInt = 0 14665 v.AddArg(v2) 14666 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14667 v3.AuxInt = 64 14668 v3.AddArg(y) 14669 v.AddArg(v3) 14670 return true 14671 } 14672 } 14673 func rewriteValueARM64_OpRsh8Ux8_0(v *Value) bool { 14674 b := v.Block 14675 _ = b 14676 types := &b.Func.Config.Types 14677 _ = types 14678 // match: (Rsh8Ux8 <t> x y) 14679 // cond: 14680 // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 14681 for { 14682 t := v.Type 14683 x := v.Args[0] 14684 y := v.Args[1] 14685 v.reset(OpARM64CSELULT) 14686 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14687 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 14688 v1.AddArg(x) 14689 v0.AddArg(v1) 14690 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 14691 v2.AddArg(y) 14692 v0.AddArg(v2) 14693 v.AddArg(v0) 14694 v3 := b.NewValue0(v.Pos, OpConst64, t) 14695 v3.AuxInt = 0 14696 v.AddArg(v3) 14697 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14698 v4.AuxInt = 64 14699 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 14700 v5.AddArg(y) 14701 v4.AddArg(v5) 14702 v.AddArg(v4) 14703 return true 14704 } 14705 } 14706 func rewriteValueARM64_OpRsh8x16_0(v *Value) bool { 14707 b := v.Block 14708 _ = b 14709 types := &b.Func.Config.Types 14710 _ = types 14711 // match: (Rsh8x16 x y) 14712 // cond: 14713 // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 14714 for { 14715 x := v.Args[0] 14716 y := v.Args[1] 14717 v.reset(OpARM64SRA) 14718 v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64) 14719 v0.AddArg(x) 14720 v.AddArg(v0) 14721 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14722 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 14723 v2.AddArg(y) 14724 v1.AddArg(v2) 14725 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 14726 v3.AuxInt = 63 14727 v1.AddArg(v3) 14728 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14729 v4.AuxInt = 64 14730 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64) 14731 v5.AddArg(y) 14732 v4.AddArg(v5) 14733 v1.AddArg(v4) 14734 v.AddArg(v1) 14735 return true 14736 } 14737 } 14738 func rewriteValueARM64_OpRsh8x32_0(v *Value) bool { 14739 b := v.Block 14740 _ = b 14741 types := &b.Func.Config.Types 14742 _ = types 14743 // match: (Rsh8x32 x y) 14744 // cond: 14745 // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 14746 for { 14747 x := v.Args[0] 14748 y := v.Args[1] 14749 v.reset(OpARM64SRA) 14750 v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64) 14751 v0.AddArg(x) 14752 v.AddArg(v0) 14753 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14754 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 14755 v2.AddArg(y) 14756 v1.AddArg(v2) 14757 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 14758 v3.AuxInt = 63 14759 v1.AddArg(v3) 14760 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14761 v4.AuxInt = 64 14762 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64) 14763 v5.AddArg(y) 14764 v4.AddArg(v5) 14765 v1.AddArg(v4) 14766 v.AddArg(v1) 14767 return true 14768 } 14769 } 14770 func rewriteValueARM64_OpRsh8x64_0(v *Value) bool { 14771 b := v.Block 14772 _ = b 14773 types := &b.Func.Config.Types 14774 _ = types 14775 // match: (Rsh8x64 x (MOVDconst [c])) 14776 // cond: uint64(c) < 8 14777 // result: (SRAconst (SignExt8to64 x) [c]) 14778 for { 14779 x := v.Args[0] 14780 v_1 := v.Args[1] 14781 if v_1.Op != OpARM64MOVDconst { 14782 break 14783 } 14784 c := v_1.AuxInt 14785 if !(uint64(c) < 8) { 14786 break 14787 } 14788 v.reset(OpARM64SRAconst) 14789 v.AuxInt = c 14790 v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64) 14791 v0.AddArg(x) 14792 v.AddArg(v0) 14793 return true 14794 } 14795 // match: (Rsh8x64 x (MOVDconst [c])) 14796 // cond: uint64(c) >= 8 14797 // result: (SRAconst (SignExt8to64 x) [63]) 14798 for { 14799 x := v.Args[0] 14800 v_1 := v.Args[1] 14801 if v_1.Op != OpARM64MOVDconst { 14802 break 14803 } 14804 c := v_1.AuxInt 14805 if !(uint64(c) >= 8) { 14806 break 14807 } 14808 v.reset(OpARM64SRAconst) 14809 v.AuxInt = 63 14810 v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64) 14811 v0.AddArg(x) 14812 v.AddArg(v0) 14813 return true 14814 } 14815 // match: (Rsh8x64 x y) 14816 // cond: 14817 // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 14818 for { 14819 x := v.Args[0] 14820 y := v.Args[1] 14821 v.reset(OpARM64SRA) 14822 v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64) 14823 v0.AddArg(x) 14824 v.AddArg(v0) 14825 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14826 v1.AddArg(y) 14827 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 14828 v2.AuxInt = 63 14829 v1.AddArg(v2) 14830 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14831 v3.AuxInt = 64 14832 v3.AddArg(y) 14833 v1.AddArg(v3) 14834 v.AddArg(v1) 14835 return true 14836 } 14837 } 14838 func rewriteValueARM64_OpRsh8x8_0(v *Value) bool { 14839 b := v.Block 14840 _ = b 14841 types := &b.Func.Config.Types 14842 _ = types 14843 // match: (Rsh8x8 x y) 14844 // cond: 14845 // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 14846 for { 14847 x := v.Args[0] 14848 y := v.Args[1] 14849 v.reset(OpARM64SRA) 14850 v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64) 14851 v0.AddArg(x) 14852 v.AddArg(v0) 14853 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14854 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 14855 v2.AddArg(y) 14856 v1.AddArg(v2) 14857 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 14858 v3.AuxInt = 63 14859 v1.AddArg(v3) 14860 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, TypeFlags) 14861 v4.AuxInt = 64 14862 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64) 14863 v5.AddArg(y) 14864 v4.AddArg(v5) 14865 v1.AddArg(v4) 14866 v.AddArg(v1) 14867 return true 14868 } 14869 } 14870 func rewriteValueARM64_OpSignExt16to32_0(v *Value) bool { 14871 // match: (SignExt16to32 x) 14872 // cond: 14873 // result: (MOVHreg x) 14874 for { 14875 x := v.Args[0] 14876 v.reset(OpARM64MOVHreg) 14877 v.AddArg(x) 14878 return true 14879 } 14880 } 14881 func rewriteValueARM64_OpSignExt16to64_0(v *Value) bool { 14882 // match: (SignExt16to64 x) 14883 // cond: 14884 // result: (MOVHreg x) 14885 for { 14886 x := v.Args[0] 14887 v.reset(OpARM64MOVHreg) 14888 v.AddArg(x) 14889 return true 14890 } 14891 } 14892 func rewriteValueARM64_OpSignExt32to64_0(v *Value) bool { 14893 // match: (SignExt32to64 x) 14894 // cond: 14895 // result: (MOVWreg x) 14896 for { 14897 x := v.Args[0] 14898 v.reset(OpARM64MOVWreg) 14899 v.AddArg(x) 14900 return true 14901 } 14902 } 14903 func rewriteValueARM64_OpSignExt8to16_0(v *Value) bool { 14904 // match: (SignExt8to16 x) 14905 // cond: 14906 // result: (MOVBreg x) 14907 for { 14908 x := v.Args[0] 14909 v.reset(OpARM64MOVBreg) 14910 v.AddArg(x) 14911 return true 14912 } 14913 } 14914 func rewriteValueARM64_OpSignExt8to32_0(v *Value) bool { 14915 // match: (SignExt8to32 x) 14916 // cond: 14917 // result: (MOVBreg x) 14918 for { 14919 x := v.Args[0] 14920 v.reset(OpARM64MOVBreg) 14921 v.AddArg(x) 14922 return true 14923 } 14924 } 14925 func rewriteValueARM64_OpSignExt8to64_0(v *Value) bool { 14926 // match: (SignExt8to64 x) 14927 // cond: 14928 // result: (MOVBreg x) 14929 for { 14930 x := v.Args[0] 14931 v.reset(OpARM64MOVBreg) 14932 v.AddArg(x) 14933 return true 14934 } 14935 } 14936 func rewriteValueARM64_OpSlicemask_0(v *Value) bool { 14937 b := v.Block 14938 _ = b 14939 // match: (Slicemask <t> x) 14940 // cond: 14941 // result: (SRAconst (NEG <t> x) [63]) 14942 for { 14943 t := v.Type 14944 x := v.Args[0] 14945 v.reset(OpARM64SRAconst) 14946 v.AuxInt = 63 14947 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 14948 v0.AddArg(x) 14949 v.AddArg(v0) 14950 return true 14951 } 14952 } 14953 func rewriteValueARM64_OpSqrt_0(v *Value) bool { 14954 // match: (Sqrt x) 14955 // cond: 14956 // result: (FSQRTD x) 14957 for { 14958 x := v.Args[0] 14959 v.reset(OpARM64FSQRTD) 14960 v.AddArg(x) 14961 return true 14962 } 14963 } 14964 func rewriteValueARM64_OpStaticCall_0(v *Value) bool { 14965 // match: (StaticCall [argwid] {target} mem) 14966 // cond: 14967 // result: (CALLstatic [argwid] {target} mem) 14968 for { 14969 argwid := v.AuxInt 14970 target := v.Aux 14971 mem := v.Args[0] 14972 v.reset(OpARM64CALLstatic) 14973 v.AuxInt = argwid 14974 v.Aux = target 14975 v.AddArg(mem) 14976 return true 14977 } 14978 } 14979 func rewriteValueARM64_OpStore_0(v *Value) bool { 14980 // match: (Store {t} ptr val mem) 14981 // cond: t.(Type).Size() == 1 14982 // result: (MOVBstore ptr val mem) 14983 for { 14984 t := v.Aux 14985 ptr := v.Args[0] 14986 val := v.Args[1] 14987 mem := v.Args[2] 14988 if !(t.(Type).Size() == 1) { 14989 break 14990 } 14991 v.reset(OpARM64MOVBstore) 14992 v.AddArg(ptr) 14993 v.AddArg(val) 14994 v.AddArg(mem) 14995 return true 14996 } 14997 // match: (Store {t} ptr val mem) 14998 // cond: t.(Type).Size() == 2 14999 // result: (MOVHstore ptr val mem) 15000 for { 15001 t := v.Aux 15002 ptr := v.Args[0] 15003 val := v.Args[1] 15004 mem := v.Args[2] 15005 if !(t.(Type).Size() == 2) { 15006 break 15007 } 15008 v.reset(OpARM64MOVHstore) 15009 v.AddArg(ptr) 15010 v.AddArg(val) 15011 v.AddArg(mem) 15012 return true 15013 } 15014 // match: (Store {t} ptr val mem) 15015 // cond: t.(Type).Size() == 4 && !is32BitFloat(val.Type) 15016 // result: (MOVWstore ptr val mem) 15017 for { 15018 t := v.Aux 15019 ptr := v.Args[0] 15020 val := v.Args[1] 15021 mem := v.Args[2] 15022 if !(t.(Type).Size() == 4 && !is32BitFloat(val.Type)) { 15023 break 15024 } 15025 v.reset(OpARM64MOVWstore) 15026 v.AddArg(ptr) 15027 v.AddArg(val) 15028 v.AddArg(mem) 15029 return true 15030 } 15031 // match: (Store {t} ptr val mem) 15032 // cond: t.(Type).Size() == 8 && !is64BitFloat(val.Type) 15033 // result: (MOVDstore ptr val mem) 15034 for { 15035 t := v.Aux 15036 ptr := v.Args[0] 15037 val := v.Args[1] 15038 mem := v.Args[2] 15039 if !(t.(Type).Size() == 8 && !is64BitFloat(val.Type)) { 15040 break 15041 } 15042 v.reset(OpARM64MOVDstore) 15043 v.AddArg(ptr) 15044 v.AddArg(val) 15045 v.AddArg(mem) 15046 return true 15047 } 15048 // match: (Store {t} ptr val mem) 15049 // cond: t.(Type).Size() == 4 && is32BitFloat(val.Type) 15050 // result: (FMOVSstore ptr val mem) 15051 for { 15052 t := v.Aux 15053 ptr := v.Args[0] 15054 val := v.Args[1] 15055 mem := v.Args[2] 15056 if !(t.(Type).Size() == 4 && is32BitFloat(val.Type)) { 15057 break 15058 } 15059 v.reset(OpARM64FMOVSstore) 15060 v.AddArg(ptr) 15061 v.AddArg(val) 15062 v.AddArg(mem) 15063 return true 15064 } 15065 // match: (Store {t} ptr val mem) 15066 // cond: t.(Type).Size() == 8 && is64BitFloat(val.Type) 15067 // result: (FMOVDstore ptr val mem) 15068 for { 15069 t := v.Aux 15070 ptr := v.Args[0] 15071 val := v.Args[1] 15072 mem := v.Args[2] 15073 if !(t.(Type).Size() == 8 && is64BitFloat(val.Type)) { 15074 break 15075 } 15076 v.reset(OpARM64FMOVDstore) 15077 v.AddArg(ptr) 15078 v.AddArg(val) 15079 v.AddArg(mem) 15080 return true 15081 } 15082 return false 15083 } 15084 func rewriteValueARM64_OpSub16_0(v *Value) bool { 15085 // match: (Sub16 x y) 15086 // cond: 15087 // result: (SUB x y) 15088 for { 15089 x := v.Args[0] 15090 y := v.Args[1] 15091 v.reset(OpARM64SUB) 15092 v.AddArg(x) 15093 v.AddArg(y) 15094 return true 15095 } 15096 } 15097 func rewriteValueARM64_OpSub32_0(v *Value) bool { 15098 // match: (Sub32 x y) 15099 // cond: 15100 // result: (SUB x y) 15101 for { 15102 x := v.Args[0] 15103 y := v.Args[1] 15104 v.reset(OpARM64SUB) 15105 v.AddArg(x) 15106 v.AddArg(y) 15107 return true 15108 } 15109 } 15110 func rewriteValueARM64_OpSub32F_0(v *Value) bool { 15111 // match: (Sub32F x y) 15112 // cond: 15113 // result: (FSUBS x y) 15114 for { 15115 x := v.Args[0] 15116 y := v.Args[1] 15117 v.reset(OpARM64FSUBS) 15118 v.AddArg(x) 15119 v.AddArg(y) 15120 return true 15121 } 15122 } 15123 func rewriteValueARM64_OpSub64_0(v *Value) bool { 15124 // match: (Sub64 x y) 15125 // cond: 15126 // result: (SUB x y) 15127 for { 15128 x := v.Args[0] 15129 y := v.Args[1] 15130 v.reset(OpARM64SUB) 15131 v.AddArg(x) 15132 v.AddArg(y) 15133 return true 15134 } 15135 } 15136 func rewriteValueARM64_OpSub64F_0(v *Value) bool { 15137 // match: (Sub64F x y) 15138 // cond: 15139 // result: (FSUBD x y) 15140 for { 15141 x := v.Args[0] 15142 y := v.Args[1] 15143 v.reset(OpARM64FSUBD) 15144 v.AddArg(x) 15145 v.AddArg(y) 15146 return true 15147 } 15148 } 15149 func rewriteValueARM64_OpSub8_0(v *Value) bool { 15150 // match: (Sub8 x y) 15151 // cond: 15152 // result: (SUB x y) 15153 for { 15154 x := v.Args[0] 15155 y := v.Args[1] 15156 v.reset(OpARM64SUB) 15157 v.AddArg(x) 15158 v.AddArg(y) 15159 return true 15160 } 15161 } 15162 func rewriteValueARM64_OpSubPtr_0(v *Value) bool { 15163 // match: (SubPtr x y) 15164 // cond: 15165 // result: (SUB x y) 15166 for { 15167 x := v.Args[0] 15168 y := v.Args[1] 15169 v.reset(OpARM64SUB) 15170 v.AddArg(x) 15171 v.AddArg(y) 15172 return true 15173 } 15174 } 15175 func rewriteValueARM64_OpTrunc16to8_0(v *Value) bool { 15176 // match: (Trunc16to8 x) 15177 // cond: 15178 // result: x 15179 for { 15180 x := v.Args[0] 15181 v.reset(OpCopy) 15182 v.Type = x.Type 15183 v.AddArg(x) 15184 return true 15185 } 15186 } 15187 func rewriteValueARM64_OpTrunc32to16_0(v *Value) bool { 15188 // match: (Trunc32to16 x) 15189 // cond: 15190 // result: x 15191 for { 15192 x := v.Args[0] 15193 v.reset(OpCopy) 15194 v.Type = x.Type 15195 v.AddArg(x) 15196 return true 15197 } 15198 } 15199 func rewriteValueARM64_OpTrunc32to8_0(v *Value) bool { 15200 // match: (Trunc32to8 x) 15201 // cond: 15202 // result: x 15203 for { 15204 x := v.Args[0] 15205 v.reset(OpCopy) 15206 v.Type = x.Type 15207 v.AddArg(x) 15208 return true 15209 } 15210 } 15211 func rewriteValueARM64_OpTrunc64to16_0(v *Value) bool { 15212 // match: (Trunc64to16 x) 15213 // cond: 15214 // result: x 15215 for { 15216 x := v.Args[0] 15217 v.reset(OpCopy) 15218 v.Type = x.Type 15219 v.AddArg(x) 15220 return true 15221 } 15222 } 15223 func rewriteValueARM64_OpTrunc64to32_0(v *Value) bool { 15224 // match: (Trunc64to32 x) 15225 // cond: 15226 // result: x 15227 for { 15228 x := v.Args[0] 15229 v.reset(OpCopy) 15230 v.Type = x.Type 15231 v.AddArg(x) 15232 return true 15233 } 15234 } 15235 func rewriteValueARM64_OpTrunc64to8_0(v *Value) bool { 15236 // match: (Trunc64to8 x) 15237 // cond: 15238 // result: x 15239 for { 15240 x := v.Args[0] 15241 v.reset(OpCopy) 15242 v.Type = x.Type 15243 v.AddArg(x) 15244 return true 15245 } 15246 } 15247 func rewriteValueARM64_OpXor16_0(v *Value) bool { 15248 // match: (Xor16 x y) 15249 // cond: 15250 // result: (XOR x y) 15251 for { 15252 x := v.Args[0] 15253 y := v.Args[1] 15254 v.reset(OpARM64XOR) 15255 v.AddArg(x) 15256 v.AddArg(y) 15257 return true 15258 } 15259 } 15260 func rewriteValueARM64_OpXor32_0(v *Value) bool { 15261 // match: (Xor32 x y) 15262 // cond: 15263 // result: (XOR x y) 15264 for { 15265 x := v.Args[0] 15266 y := v.Args[1] 15267 v.reset(OpARM64XOR) 15268 v.AddArg(x) 15269 v.AddArg(y) 15270 return true 15271 } 15272 } 15273 func rewriteValueARM64_OpXor64_0(v *Value) bool { 15274 // match: (Xor64 x y) 15275 // cond: 15276 // result: (XOR x y) 15277 for { 15278 x := v.Args[0] 15279 y := v.Args[1] 15280 v.reset(OpARM64XOR) 15281 v.AddArg(x) 15282 v.AddArg(y) 15283 return true 15284 } 15285 } 15286 func rewriteValueARM64_OpXor8_0(v *Value) bool { 15287 // match: (Xor8 x y) 15288 // cond: 15289 // result: (XOR x y) 15290 for { 15291 x := v.Args[0] 15292 y := v.Args[1] 15293 v.reset(OpARM64XOR) 15294 v.AddArg(x) 15295 v.AddArg(y) 15296 return true 15297 } 15298 } 15299 func rewriteValueARM64_OpZero_0(v *Value) bool { 15300 b := v.Block 15301 _ = b 15302 types := &b.Func.Config.Types 15303 _ = types 15304 // match: (Zero [0] _ mem) 15305 // cond: 15306 // result: mem 15307 for { 15308 if v.AuxInt != 0 { 15309 break 15310 } 15311 mem := v.Args[1] 15312 v.reset(OpCopy) 15313 v.Type = mem.Type 15314 v.AddArg(mem) 15315 return true 15316 } 15317 // match: (Zero [1] ptr mem) 15318 // cond: 15319 // result: (MOVBstore ptr (MOVDconst [0]) mem) 15320 for { 15321 if v.AuxInt != 1 { 15322 break 15323 } 15324 ptr := v.Args[0] 15325 mem := v.Args[1] 15326 v.reset(OpARM64MOVBstore) 15327 v.AddArg(ptr) 15328 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15329 v0.AuxInt = 0 15330 v.AddArg(v0) 15331 v.AddArg(mem) 15332 return true 15333 } 15334 // match: (Zero [2] ptr mem) 15335 // cond: 15336 // result: (MOVHstore ptr (MOVDconst [0]) mem) 15337 for { 15338 if v.AuxInt != 2 { 15339 break 15340 } 15341 ptr := v.Args[0] 15342 mem := v.Args[1] 15343 v.reset(OpARM64MOVHstore) 15344 v.AddArg(ptr) 15345 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15346 v0.AuxInt = 0 15347 v.AddArg(v0) 15348 v.AddArg(mem) 15349 return true 15350 } 15351 // match: (Zero [4] ptr mem) 15352 // cond: 15353 // result: (MOVWstore ptr (MOVDconst [0]) mem) 15354 for { 15355 if v.AuxInt != 4 { 15356 break 15357 } 15358 ptr := v.Args[0] 15359 mem := v.Args[1] 15360 v.reset(OpARM64MOVWstore) 15361 v.AddArg(ptr) 15362 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15363 v0.AuxInt = 0 15364 v.AddArg(v0) 15365 v.AddArg(mem) 15366 return true 15367 } 15368 // match: (Zero [8] ptr mem) 15369 // cond: 15370 // result: (MOVDstore ptr (MOVDconst [0]) mem) 15371 for { 15372 if v.AuxInt != 8 { 15373 break 15374 } 15375 ptr := v.Args[0] 15376 mem := v.Args[1] 15377 v.reset(OpARM64MOVDstore) 15378 v.AddArg(ptr) 15379 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15380 v0.AuxInt = 0 15381 v.AddArg(v0) 15382 v.AddArg(mem) 15383 return true 15384 } 15385 // match: (Zero [3] ptr mem) 15386 // cond: 15387 // result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)) 15388 for { 15389 if v.AuxInt != 3 { 15390 break 15391 } 15392 ptr := v.Args[0] 15393 mem := v.Args[1] 15394 v.reset(OpARM64MOVBstore) 15395 v.AuxInt = 2 15396 v.AddArg(ptr) 15397 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15398 v0.AuxInt = 0 15399 v.AddArg(v0) 15400 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, TypeMem) 15401 v1.AddArg(ptr) 15402 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15403 v2.AuxInt = 0 15404 v1.AddArg(v2) 15405 v1.AddArg(mem) 15406 v.AddArg(v1) 15407 return true 15408 } 15409 // match: (Zero [5] ptr mem) 15410 // cond: 15411 // result: (MOVBstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 15412 for { 15413 if v.AuxInt != 5 { 15414 break 15415 } 15416 ptr := v.Args[0] 15417 mem := v.Args[1] 15418 v.reset(OpARM64MOVBstore) 15419 v.AuxInt = 4 15420 v.AddArg(ptr) 15421 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15422 v0.AuxInt = 0 15423 v.AddArg(v0) 15424 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, TypeMem) 15425 v1.AddArg(ptr) 15426 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15427 v2.AuxInt = 0 15428 v1.AddArg(v2) 15429 v1.AddArg(mem) 15430 v.AddArg(v1) 15431 return true 15432 } 15433 // match: (Zero [6] ptr mem) 15434 // cond: 15435 // result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 15436 for { 15437 if v.AuxInt != 6 { 15438 break 15439 } 15440 ptr := v.Args[0] 15441 mem := v.Args[1] 15442 v.reset(OpARM64MOVHstore) 15443 v.AuxInt = 4 15444 v.AddArg(ptr) 15445 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15446 v0.AuxInt = 0 15447 v.AddArg(v0) 15448 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, TypeMem) 15449 v1.AddArg(ptr) 15450 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15451 v2.AuxInt = 0 15452 v1.AddArg(v2) 15453 v1.AddArg(mem) 15454 v.AddArg(v1) 15455 return true 15456 } 15457 // match: (Zero [7] ptr mem) 15458 // cond: 15459 // result: (MOVBstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))) 15460 for { 15461 if v.AuxInt != 7 { 15462 break 15463 } 15464 ptr := v.Args[0] 15465 mem := v.Args[1] 15466 v.reset(OpARM64MOVBstore) 15467 v.AuxInt = 6 15468 v.AddArg(ptr) 15469 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15470 v0.AuxInt = 0 15471 v.AddArg(v0) 15472 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, TypeMem) 15473 v1.AuxInt = 4 15474 v1.AddArg(ptr) 15475 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15476 v2.AuxInt = 0 15477 v1.AddArg(v2) 15478 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, TypeMem) 15479 v3.AddArg(ptr) 15480 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15481 v4.AuxInt = 0 15482 v3.AddArg(v4) 15483 v3.AddArg(mem) 15484 v1.AddArg(v3) 15485 v.AddArg(v1) 15486 return true 15487 } 15488 // match: (Zero [12] ptr mem) 15489 // cond: 15490 // result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 15491 for { 15492 if v.AuxInt != 12 { 15493 break 15494 } 15495 ptr := v.Args[0] 15496 mem := v.Args[1] 15497 v.reset(OpARM64MOVWstore) 15498 v.AuxInt = 8 15499 v.AddArg(ptr) 15500 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15501 v0.AuxInt = 0 15502 v.AddArg(v0) 15503 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, TypeMem) 15504 v1.AddArg(ptr) 15505 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15506 v2.AuxInt = 0 15507 v1.AddArg(v2) 15508 v1.AddArg(mem) 15509 v.AddArg(v1) 15510 return true 15511 } 15512 return false 15513 } 15514 func rewriteValueARM64_OpZero_10(v *Value) bool { 15515 b := v.Block 15516 _ = b 15517 config := b.Func.Config 15518 _ = config 15519 types := &b.Func.Config.Types 15520 _ = types 15521 // match: (Zero [16] ptr mem) 15522 // cond: 15523 // result: (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 15524 for { 15525 if v.AuxInt != 16 { 15526 break 15527 } 15528 ptr := v.Args[0] 15529 mem := v.Args[1] 15530 v.reset(OpARM64MOVDstore) 15531 v.AuxInt = 8 15532 v.AddArg(ptr) 15533 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15534 v0.AuxInt = 0 15535 v.AddArg(v0) 15536 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, TypeMem) 15537 v1.AddArg(ptr) 15538 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15539 v2.AuxInt = 0 15540 v1.AddArg(v2) 15541 v1.AddArg(mem) 15542 v.AddArg(v1) 15543 return true 15544 } 15545 // match: (Zero [24] ptr mem) 15546 // cond: 15547 // result: (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 15548 for { 15549 if v.AuxInt != 24 { 15550 break 15551 } 15552 ptr := v.Args[0] 15553 mem := v.Args[1] 15554 v.reset(OpARM64MOVDstore) 15555 v.AuxInt = 16 15556 v.AddArg(ptr) 15557 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15558 v0.AuxInt = 0 15559 v.AddArg(v0) 15560 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, TypeMem) 15561 v1.AuxInt = 8 15562 v1.AddArg(ptr) 15563 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15564 v2.AuxInt = 0 15565 v1.AddArg(v2) 15566 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, TypeMem) 15567 v3.AddArg(ptr) 15568 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, types.UInt64) 15569 v4.AuxInt = 0 15570 v3.AddArg(v4) 15571 v3.AddArg(mem) 15572 v1.AddArg(v3) 15573 v.AddArg(v1) 15574 return true 15575 } 15576 // match: (Zero [s] ptr mem) 15577 // cond: s%8 != 0 && s > 8 15578 // result: (Zero [s%8] (OffPtr <ptr.Type> ptr [s-s%8]) (Zero [s-s%8] ptr mem)) 15579 for { 15580 s := v.AuxInt 15581 ptr := v.Args[0] 15582 mem := v.Args[1] 15583 if !(s%8 != 0 && s > 8) { 15584 break 15585 } 15586 v.reset(OpZero) 15587 v.AuxInt = s % 8 15588 v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type) 15589 v0.AuxInt = s - s%8 15590 v0.AddArg(ptr) 15591 v.AddArg(v0) 15592 v1 := b.NewValue0(v.Pos, OpZero, TypeMem) 15593 v1.AuxInt = s - s%8 15594 v1.AddArg(ptr) 15595 v1.AddArg(mem) 15596 v.AddArg(v1) 15597 return true 15598 } 15599 // match: (Zero [s] ptr mem) 15600 // cond: s%8 == 0 && s > 24 && s <= 8*128 && !config.noDuffDevice 15601 // result: (DUFFZERO [4 * (128 - int64(s/8))] ptr mem) 15602 for { 15603 s := v.AuxInt 15604 ptr := v.Args[0] 15605 mem := v.Args[1] 15606 if !(s%8 == 0 && s > 24 && s <= 8*128 && !config.noDuffDevice) { 15607 break 15608 } 15609 v.reset(OpARM64DUFFZERO) 15610 v.AuxInt = 4 * (128 - int64(s/8)) 15611 v.AddArg(ptr) 15612 v.AddArg(mem) 15613 return true 15614 } 15615 // match: (Zero [s] ptr mem) 15616 // cond: s%8 == 0 && (s > 8*128 || config.noDuffDevice) 15617 // result: (LoweredZero ptr (ADDconst <ptr.Type> [s-8] ptr) mem) 15618 for { 15619 s := v.AuxInt 15620 ptr := v.Args[0] 15621 mem := v.Args[1] 15622 if !(s%8 == 0 && (s > 8*128 || config.noDuffDevice)) { 15623 break 15624 } 15625 v.reset(OpARM64LoweredZero) 15626 v.AddArg(ptr) 15627 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, ptr.Type) 15628 v0.AuxInt = s - 8 15629 v0.AddArg(ptr) 15630 v.AddArg(v0) 15631 v.AddArg(mem) 15632 return true 15633 } 15634 return false 15635 } 15636 func rewriteValueARM64_OpZeroExt16to32_0(v *Value) bool { 15637 // match: (ZeroExt16to32 x) 15638 // cond: 15639 // result: (MOVHUreg x) 15640 for { 15641 x := v.Args[0] 15642 v.reset(OpARM64MOVHUreg) 15643 v.AddArg(x) 15644 return true 15645 } 15646 } 15647 func rewriteValueARM64_OpZeroExt16to64_0(v *Value) bool { 15648 // match: (ZeroExt16to64 x) 15649 // cond: 15650 // result: (MOVHUreg x) 15651 for { 15652 x := v.Args[0] 15653 v.reset(OpARM64MOVHUreg) 15654 v.AddArg(x) 15655 return true 15656 } 15657 } 15658 func rewriteValueARM64_OpZeroExt32to64_0(v *Value) bool { 15659 // match: (ZeroExt32to64 x) 15660 // cond: 15661 // result: (MOVWUreg x) 15662 for { 15663 x := v.Args[0] 15664 v.reset(OpARM64MOVWUreg) 15665 v.AddArg(x) 15666 return true 15667 } 15668 } 15669 func rewriteValueARM64_OpZeroExt8to16_0(v *Value) bool { 15670 // match: (ZeroExt8to16 x) 15671 // cond: 15672 // result: (MOVBUreg x) 15673 for { 15674 x := v.Args[0] 15675 v.reset(OpARM64MOVBUreg) 15676 v.AddArg(x) 15677 return true 15678 } 15679 } 15680 func rewriteValueARM64_OpZeroExt8to32_0(v *Value) bool { 15681 // match: (ZeroExt8to32 x) 15682 // cond: 15683 // result: (MOVBUreg x) 15684 for { 15685 x := v.Args[0] 15686 v.reset(OpARM64MOVBUreg) 15687 v.AddArg(x) 15688 return true 15689 } 15690 } 15691 func rewriteValueARM64_OpZeroExt8to64_0(v *Value) bool { 15692 // match: (ZeroExt8to64 x) 15693 // cond: 15694 // result: (MOVBUreg x) 15695 for { 15696 x := v.Args[0] 15697 v.reset(OpARM64MOVBUreg) 15698 v.AddArg(x) 15699 return true 15700 } 15701 } 15702 func rewriteBlockARM64(b *Block) bool { 15703 config := b.Func.Config 15704 _ = config 15705 fe := b.Func.fe 15706 _ = fe 15707 types := &config.Types 15708 _ = types 15709 switch b.Kind { 15710 case BlockARM64EQ: 15711 // match: (EQ (CMPconst [0] x) yes no) 15712 // cond: 15713 // result: (Z x yes no) 15714 for { 15715 v := b.Control 15716 if v.Op != OpARM64CMPconst { 15717 break 15718 } 15719 if v.AuxInt != 0 { 15720 break 15721 } 15722 x := v.Args[0] 15723 b.Kind = BlockARM64Z 15724 b.SetControl(x) 15725 return true 15726 } 15727 // match: (EQ (CMPWconst [0] x) yes no) 15728 // cond: 15729 // result: (ZW x yes no) 15730 for { 15731 v := b.Control 15732 if v.Op != OpARM64CMPWconst { 15733 break 15734 } 15735 if v.AuxInt != 0 { 15736 break 15737 } 15738 x := v.Args[0] 15739 b.Kind = BlockARM64ZW 15740 b.SetControl(x) 15741 return true 15742 } 15743 // match: (EQ (FlagEQ) yes no) 15744 // cond: 15745 // result: (First nil yes no) 15746 for { 15747 v := b.Control 15748 if v.Op != OpARM64FlagEQ { 15749 break 15750 } 15751 b.Kind = BlockFirst 15752 b.SetControl(nil) 15753 return true 15754 } 15755 // match: (EQ (FlagLT_ULT) yes no) 15756 // cond: 15757 // result: (First nil no yes) 15758 for { 15759 v := b.Control 15760 if v.Op != OpARM64FlagLT_ULT { 15761 break 15762 } 15763 b.Kind = BlockFirst 15764 b.SetControl(nil) 15765 b.swapSuccessors() 15766 return true 15767 } 15768 // match: (EQ (FlagLT_UGT) yes no) 15769 // cond: 15770 // result: (First nil no yes) 15771 for { 15772 v := b.Control 15773 if v.Op != OpARM64FlagLT_UGT { 15774 break 15775 } 15776 b.Kind = BlockFirst 15777 b.SetControl(nil) 15778 b.swapSuccessors() 15779 return true 15780 } 15781 // match: (EQ (FlagGT_ULT) yes no) 15782 // cond: 15783 // result: (First nil no yes) 15784 for { 15785 v := b.Control 15786 if v.Op != OpARM64FlagGT_ULT { 15787 break 15788 } 15789 b.Kind = BlockFirst 15790 b.SetControl(nil) 15791 b.swapSuccessors() 15792 return true 15793 } 15794 // match: (EQ (FlagGT_UGT) yes no) 15795 // cond: 15796 // result: (First nil no yes) 15797 for { 15798 v := b.Control 15799 if v.Op != OpARM64FlagGT_UGT { 15800 break 15801 } 15802 b.Kind = BlockFirst 15803 b.SetControl(nil) 15804 b.swapSuccessors() 15805 return true 15806 } 15807 // match: (EQ (InvertFlags cmp) yes no) 15808 // cond: 15809 // result: (EQ cmp yes no) 15810 for { 15811 v := b.Control 15812 if v.Op != OpARM64InvertFlags { 15813 break 15814 } 15815 cmp := v.Args[0] 15816 b.Kind = BlockARM64EQ 15817 b.SetControl(cmp) 15818 return true 15819 } 15820 case BlockARM64GE: 15821 // match: (GE (FlagEQ) yes no) 15822 // cond: 15823 // result: (First nil yes no) 15824 for { 15825 v := b.Control 15826 if v.Op != OpARM64FlagEQ { 15827 break 15828 } 15829 b.Kind = BlockFirst 15830 b.SetControl(nil) 15831 return true 15832 } 15833 // match: (GE (FlagLT_ULT) yes no) 15834 // cond: 15835 // result: (First nil no yes) 15836 for { 15837 v := b.Control 15838 if v.Op != OpARM64FlagLT_ULT { 15839 break 15840 } 15841 b.Kind = BlockFirst 15842 b.SetControl(nil) 15843 b.swapSuccessors() 15844 return true 15845 } 15846 // match: (GE (FlagLT_UGT) yes no) 15847 // cond: 15848 // result: (First nil no yes) 15849 for { 15850 v := b.Control 15851 if v.Op != OpARM64FlagLT_UGT { 15852 break 15853 } 15854 b.Kind = BlockFirst 15855 b.SetControl(nil) 15856 b.swapSuccessors() 15857 return true 15858 } 15859 // match: (GE (FlagGT_ULT) yes no) 15860 // cond: 15861 // result: (First nil yes no) 15862 for { 15863 v := b.Control 15864 if v.Op != OpARM64FlagGT_ULT { 15865 break 15866 } 15867 b.Kind = BlockFirst 15868 b.SetControl(nil) 15869 return true 15870 } 15871 // match: (GE (FlagGT_UGT) yes no) 15872 // cond: 15873 // result: (First nil yes no) 15874 for { 15875 v := b.Control 15876 if v.Op != OpARM64FlagGT_UGT { 15877 break 15878 } 15879 b.Kind = BlockFirst 15880 b.SetControl(nil) 15881 return true 15882 } 15883 // match: (GE (InvertFlags cmp) yes no) 15884 // cond: 15885 // result: (LE cmp yes no) 15886 for { 15887 v := b.Control 15888 if v.Op != OpARM64InvertFlags { 15889 break 15890 } 15891 cmp := v.Args[0] 15892 b.Kind = BlockARM64LE 15893 b.SetControl(cmp) 15894 return true 15895 } 15896 case BlockARM64GT: 15897 // match: (GT (FlagEQ) yes no) 15898 // cond: 15899 // result: (First nil no yes) 15900 for { 15901 v := b.Control 15902 if v.Op != OpARM64FlagEQ { 15903 break 15904 } 15905 b.Kind = BlockFirst 15906 b.SetControl(nil) 15907 b.swapSuccessors() 15908 return true 15909 } 15910 // match: (GT (FlagLT_ULT) yes no) 15911 // cond: 15912 // result: (First nil no yes) 15913 for { 15914 v := b.Control 15915 if v.Op != OpARM64FlagLT_ULT { 15916 break 15917 } 15918 b.Kind = BlockFirst 15919 b.SetControl(nil) 15920 b.swapSuccessors() 15921 return true 15922 } 15923 // match: (GT (FlagLT_UGT) yes no) 15924 // cond: 15925 // result: (First nil no yes) 15926 for { 15927 v := b.Control 15928 if v.Op != OpARM64FlagLT_UGT { 15929 break 15930 } 15931 b.Kind = BlockFirst 15932 b.SetControl(nil) 15933 b.swapSuccessors() 15934 return true 15935 } 15936 // match: (GT (FlagGT_ULT) yes no) 15937 // cond: 15938 // result: (First nil yes no) 15939 for { 15940 v := b.Control 15941 if v.Op != OpARM64FlagGT_ULT { 15942 break 15943 } 15944 b.Kind = BlockFirst 15945 b.SetControl(nil) 15946 return true 15947 } 15948 // match: (GT (FlagGT_UGT) yes no) 15949 // cond: 15950 // result: (First nil yes no) 15951 for { 15952 v := b.Control 15953 if v.Op != OpARM64FlagGT_UGT { 15954 break 15955 } 15956 b.Kind = BlockFirst 15957 b.SetControl(nil) 15958 return true 15959 } 15960 // match: (GT (InvertFlags cmp) yes no) 15961 // cond: 15962 // result: (LT cmp yes no) 15963 for { 15964 v := b.Control 15965 if v.Op != OpARM64InvertFlags { 15966 break 15967 } 15968 cmp := v.Args[0] 15969 b.Kind = BlockARM64LT 15970 b.SetControl(cmp) 15971 return true 15972 } 15973 case BlockIf: 15974 // match: (If (Equal cc) yes no) 15975 // cond: 15976 // result: (EQ cc yes no) 15977 for { 15978 v := b.Control 15979 if v.Op != OpARM64Equal { 15980 break 15981 } 15982 cc := v.Args[0] 15983 b.Kind = BlockARM64EQ 15984 b.SetControl(cc) 15985 return true 15986 } 15987 // match: (If (NotEqual cc) yes no) 15988 // cond: 15989 // result: (NE cc yes no) 15990 for { 15991 v := b.Control 15992 if v.Op != OpARM64NotEqual { 15993 break 15994 } 15995 cc := v.Args[0] 15996 b.Kind = BlockARM64NE 15997 b.SetControl(cc) 15998 return true 15999 } 16000 // match: (If (LessThan cc) yes no) 16001 // cond: 16002 // result: (LT cc yes no) 16003 for { 16004 v := b.Control 16005 if v.Op != OpARM64LessThan { 16006 break 16007 } 16008 cc := v.Args[0] 16009 b.Kind = BlockARM64LT 16010 b.SetControl(cc) 16011 return true 16012 } 16013 // match: (If (LessThanU cc) yes no) 16014 // cond: 16015 // result: (ULT cc yes no) 16016 for { 16017 v := b.Control 16018 if v.Op != OpARM64LessThanU { 16019 break 16020 } 16021 cc := v.Args[0] 16022 b.Kind = BlockARM64ULT 16023 b.SetControl(cc) 16024 return true 16025 } 16026 // match: (If (LessEqual cc) yes no) 16027 // cond: 16028 // result: (LE cc yes no) 16029 for { 16030 v := b.Control 16031 if v.Op != OpARM64LessEqual { 16032 break 16033 } 16034 cc := v.Args[0] 16035 b.Kind = BlockARM64LE 16036 b.SetControl(cc) 16037 return true 16038 } 16039 // match: (If (LessEqualU cc) yes no) 16040 // cond: 16041 // result: (ULE cc yes no) 16042 for { 16043 v := b.Control 16044 if v.Op != OpARM64LessEqualU { 16045 break 16046 } 16047 cc := v.Args[0] 16048 b.Kind = BlockARM64ULE 16049 b.SetControl(cc) 16050 return true 16051 } 16052 // match: (If (GreaterThan cc) yes no) 16053 // cond: 16054 // result: (GT cc yes no) 16055 for { 16056 v := b.Control 16057 if v.Op != OpARM64GreaterThan { 16058 break 16059 } 16060 cc := v.Args[0] 16061 b.Kind = BlockARM64GT 16062 b.SetControl(cc) 16063 return true 16064 } 16065 // match: (If (GreaterThanU cc) yes no) 16066 // cond: 16067 // result: (UGT cc yes no) 16068 for { 16069 v := b.Control 16070 if v.Op != OpARM64GreaterThanU { 16071 break 16072 } 16073 cc := v.Args[0] 16074 b.Kind = BlockARM64UGT 16075 b.SetControl(cc) 16076 return true 16077 } 16078 // match: (If (GreaterEqual cc) yes no) 16079 // cond: 16080 // result: (GE cc yes no) 16081 for { 16082 v := b.Control 16083 if v.Op != OpARM64GreaterEqual { 16084 break 16085 } 16086 cc := v.Args[0] 16087 b.Kind = BlockARM64GE 16088 b.SetControl(cc) 16089 return true 16090 } 16091 // match: (If (GreaterEqualU cc) yes no) 16092 // cond: 16093 // result: (UGE cc yes no) 16094 for { 16095 v := b.Control 16096 if v.Op != OpARM64GreaterEqualU { 16097 break 16098 } 16099 cc := v.Args[0] 16100 b.Kind = BlockARM64UGE 16101 b.SetControl(cc) 16102 return true 16103 } 16104 // match: (If cond yes no) 16105 // cond: 16106 // result: (NZ cond yes no) 16107 for { 16108 v := b.Control 16109 _ = v 16110 cond := b.Control 16111 b.Kind = BlockARM64NZ 16112 b.SetControl(cond) 16113 return true 16114 } 16115 case BlockARM64LE: 16116 // match: (LE (FlagEQ) yes no) 16117 // cond: 16118 // result: (First nil yes no) 16119 for { 16120 v := b.Control 16121 if v.Op != OpARM64FlagEQ { 16122 break 16123 } 16124 b.Kind = BlockFirst 16125 b.SetControl(nil) 16126 return true 16127 } 16128 // match: (LE (FlagLT_ULT) yes no) 16129 // cond: 16130 // result: (First nil yes no) 16131 for { 16132 v := b.Control 16133 if v.Op != OpARM64FlagLT_ULT { 16134 break 16135 } 16136 b.Kind = BlockFirst 16137 b.SetControl(nil) 16138 return true 16139 } 16140 // match: (LE (FlagLT_UGT) yes no) 16141 // cond: 16142 // result: (First nil yes no) 16143 for { 16144 v := b.Control 16145 if v.Op != OpARM64FlagLT_UGT { 16146 break 16147 } 16148 b.Kind = BlockFirst 16149 b.SetControl(nil) 16150 return true 16151 } 16152 // match: (LE (FlagGT_ULT) yes no) 16153 // cond: 16154 // result: (First nil no yes) 16155 for { 16156 v := b.Control 16157 if v.Op != OpARM64FlagGT_ULT { 16158 break 16159 } 16160 b.Kind = BlockFirst 16161 b.SetControl(nil) 16162 b.swapSuccessors() 16163 return true 16164 } 16165 // match: (LE (FlagGT_UGT) yes no) 16166 // cond: 16167 // result: (First nil no yes) 16168 for { 16169 v := b.Control 16170 if v.Op != OpARM64FlagGT_UGT { 16171 break 16172 } 16173 b.Kind = BlockFirst 16174 b.SetControl(nil) 16175 b.swapSuccessors() 16176 return true 16177 } 16178 // match: (LE (InvertFlags cmp) yes no) 16179 // cond: 16180 // result: (GE cmp yes no) 16181 for { 16182 v := b.Control 16183 if v.Op != OpARM64InvertFlags { 16184 break 16185 } 16186 cmp := v.Args[0] 16187 b.Kind = BlockARM64GE 16188 b.SetControl(cmp) 16189 return true 16190 } 16191 case BlockARM64LT: 16192 // match: (LT (FlagEQ) yes no) 16193 // cond: 16194 // result: (First nil no yes) 16195 for { 16196 v := b.Control 16197 if v.Op != OpARM64FlagEQ { 16198 break 16199 } 16200 b.Kind = BlockFirst 16201 b.SetControl(nil) 16202 b.swapSuccessors() 16203 return true 16204 } 16205 // match: (LT (FlagLT_ULT) yes no) 16206 // cond: 16207 // result: (First nil yes no) 16208 for { 16209 v := b.Control 16210 if v.Op != OpARM64FlagLT_ULT { 16211 break 16212 } 16213 b.Kind = BlockFirst 16214 b.SetControl(nil) 16215 return true 16216 } 16217 // match: (LT (FlagLT_UGT) yes no) 16218 // cond: 16219 // result: (First nil yes no) 16220 for { 16221 v := b.Control 16222 if v.Op != OpARM64FlagLT_UGT { 16223 break 16224 } 16225 b.Kind = BlockFirst 16226 b.SetControl(nil) 16227 return true 16228 } 16229 // match: (LT (FlagGT_ULT) yes no) 16230 // cond: 16231 // result: (First nil no yes) 16232 for { 16233 v := b.Control 16234 if v.Op != OpARM64FlagGT_ULT { 16235 break 16236 } 16237 b.Kind = BlockFirst 16238 b.SetControl(nil) 16239 b.swapSuccessors() 16240 return true 16241 } 16242 // match: (LT (FlagGT_UGT) yes no) 16243 // cond: 16244 // result: (First nil no yes) 16245 for { 16246 v := b.Control 16247 if v.Op != OpARM64FlagGT_UGT { 16248 break 16249 } 16250 b.Kind = BlockFirst 16251 b.SetControl(nil) 16252 b.swapSuccessors() 16253 return true 16254 } 16255 // match: (LT (InvertFlags cmp) yes no) 16256 // cond: 16257 // result: (GT cmp yes no) 16258 for { 16259 v := b.Control 16260 if v.Op != OpARM64InvertFlags { 16261 break 16262 } 16263 cmp := v.Args[0] 16264 b.Kind = BlockARM64GT 16265 b.SetControl(cmp) 16266 return true 16267 } 16268 case BlockARM64NE: 16269 // match: (NE (CMPconst [0] x) yes no) 16270 // cond: 16271 // result: (NZ x yes no) 16272 for { 16273 v := b.Control 16274 if v.Op != OpARM64CMPconst { 16275 break 16276 } 16277 if v.AuxInt != 0 { 16278 break 16279 } 16280 x := v.Args[0] 16281 b.Kind = BlockARM64NZ 16282 b.SetControl(x) 16283 return true 16284 } 16285 // match: (NE (CMPWconst [0] x) yes no) 16286 // cond: 16287 // result: (NZW x yes no) 16288 for { 16289 v := b.Control 16290 if v.Op != OpARM64CMPWconst { 16291 break 16292 } 16293 if v.AuxInt != 0 { 16294 break 16295 } 16296 x := v.Args[0] 16297 b.Kind = BlockARM64NZW 16298 b.SetControl(x) 16299 return true 16300 } 16301 // match: (NE (FlagEQ) yes no) 16302 // cond: 16303 // result: (First nil no yes) 16304 for { 16305 v := b.Control 16306 if v.Op != OpARM64FlagEQ { 16307 break 16308 } 16309 b.Kind = BlockFirst 16310 b.SetControl(nil) 16311 b.swapSuccessors() 16312 return true 16313 } 16314 // match: (NE (FlagLT_ULT) yes no) 16315 // cond: 16316 // result: (First nil yes no) 16317 for { 16318 v := b.Control 16319 if v.Op != OpARM64FlagLT_ULT { 16320 break 16321 } 16322 b.Kind = BlockFirst 16323 b.SetControl(nil) 16324 return true 16325 } 16326 // match: (NE (FlagLT_UGT) yes no) 16327 // cond: 16328 // result: (First nil yes no) 16329 for { 16330 v := b.Control 16331 if v.Op != OpARM64FlagLT_UGT { 16332 break 16333 } 16334 b.Kind = BlockFirst 16335 b.SetControl(nil) 16336 return true 16337 } 16338 // match: (NE (FlagGT_ULT) yes no) 16339 // cond: 16340 // result: (First nil yes no) 16341 for { 16342 v := b.Control 16343 if v.Op != OpARM64FlagGT_ULT { 16344 break 16345 } 16346 b.Kind = BlockFirst 16347 b.SetControl(nil) 16348 return true 16349 } 16350 // match: (NE (FlagGT_UGT) yes no) 16351 // cond: 16352 // result: (First nil yes no) 16353 for { 16354 v := b.Control 16355 if v.Op != OpARM64FlagGT_UGT { 16356 break 16357 } 16358 b.Kind = BlockFirst 16359 b.SetControl(nil) 16360 return true 16361 } 16362 // match: (NE (InvertFlags cmp) yes no) 16363 // cond: 16364 // result: (NE cmp yes no) 16365 for { 16366 v := b.Control 16367 if v.Op != OpARM64InvertFlags { 16368 break 16369 } 16370 cmp := v.Args[0] 16371 b.Kind = BlockARM64NE 16372 b.SetControl(cmp) 16373 return true 16374 } 16375 case BlockARM64NZ: 16376 // match: (NZ (Equal cc) yes no) 16377 // cond: 16378 // result: (EQ cc yes no) 16379 for { 16380 v := b.Control 16381 if v.Op != OpARM64Equal { 16382 break 16383 } 16384 cc := v.Args[0] 16385 b.Kind = BlockARM64EQ 16386 b.SetControl(cc) 16387 return true 16388 } 16389 // match: (NZ (NotEqual cc) yes no) 16390 // cond: 16391 // result: (NE cc yes no) 16392 for { 16393 v := b.Control 16394 if v.Op != OpARM64NotEqual { 16395 break 16396 } 16397 cc := v.Args[0] 16398 b.Kind = BlockARM64NE 16399 b.SetControl(cc) 16400 return true 16401 } 16402 // match: (NZ (LessThan cc) yes no) 16403 // cond: 16404 // result: (LT cc yes no) 16405 for { 16406 v := b.Control 16407 if v.Op != OpARM64LessThan { 16408 break 16409 } 16410 cc := v.Args[0] 16411 b.Kind = BlockARM64LT 16412 b.SetControl(cc) 16413 return true 16414 } 16415 // match: (NZ (LessThanU cc) yes no) 16416 // cond: 16417 // result: (ULT cc yes no) 16418 for { 16419 v := b.Control 16420 if v.Op != OpARM64LessThanU { 16421 break 16422 } 16423 cc := v.Args[0] 16424 b.Kind = BlockARM64ULT 16425 b.SetControl(cc) 16426 return true 16427 } 16428 // match: (NZ (LessEqual cc) yes no) 16429 // cond: 16430 // result: (LE cc yes no) 16431 for { 16432 v := b.Control 16433 if v.Op != OpARM64LessEqual { 16434 break 16435 } 16436 cc := v.Args[0] 16437 b.Kind = BlockARM64LE 16438 b.SetControl(cc) 16439 return true 16440 } 16441 // match: (NZ (LessEqualU cc) yes no) 16442 // cond: 16443 // result: (ULE cc yes no) 16444 for { 16445 v := b.Control 16446 if v.Op != OpARM64LessEqualU { 16447 break 16448 } 16449 cc := v.Args[0] 16450 b.Kind = BlockARM64ULE 16451 b.SetControl(cc) 16452 return true 16453 } 16454 // match: (NZ (GreaterThan cc) yes no) 16455 // cond: 16456 // result: (GT cc yes no) 16457 for { 16458 v := b.Control 16459 if v.Op != OpARM64GreaterThan { 16460 break 16461 } 16462 cc := v.Args[0] 16463 b.Kind = BlockARM64GT 16464 b.SetControl(cc) 16465 return true 16466 } 16467 // match: (NZ (GreaterThanU cc) yes no) 16468 // cond: 16469 // result: (UGT cc yes no) 16470 for { 16471 v := b.Control 16472 if v.Op != OpARM64GreaterThanU { 16473 break 16474 } 16475 cc := v.Args[0] 16476 b.Kind = BlockARM64UGT 16477 b.SetControl(cc) 16478 return true 16479 } 16480 // match: (NZ (GreaterEqual cc) yes no) 16481 // cond: 16482 // result: (GE cc yes no) 16483 for { 16484 v := b.Control 16485 if v.Op != OpARM64GreaterEqual { 16486 break 16487 } 16488 cc := v.Args[0] 16489 b.Kind = BlockARM64GE 16490 b.SetControl(cc) 16491 return true 16492 } 16493 // match: (NZ (GreaterEqualU cc) yes no) 16494 // cond: 16495 // result: (UGE cc yes no) 16496 for { 16497 v := b.Control 16498 if v.Op != OpARM64GreaterEqualU { 16499 break 16500 } 16501 cc := v.Args[0] 16502 b.Kind = BlockARM64UGE 16503 b.SetControl(cc) 16504 return true 16505 } 16506 // match: (NZ (MOVDconst [0]) yes no) 16507 // cond: 16508 // result: (First nil no yes) 16509 for { 16510 v := b.Control 16511 if v.Op != OpARM64MOVDconst { 16512 break 16513 } 16514 if v.AuxInt != 0 { 16515 break 16516 } 16517 b.Kind = BlockFirst 16518 b.SetControl(nil) 16519 b.swapSuccessors() 16520 return true 16521 } 16522 // match: (NZ (MOVDconst [c]) yes no) 16523 // cond: c != 0 16524 // result: (First nil yes no) 16525 for { 16526 v := b.Control 16527 if v.Op != OpARM64MOVDconst { 16528 break 16529 } 16530 c := v.AuxInt 16531 if !(c != 0) { 16532 break 16533 } 16534 b.Kind = BlockFirst 16535 b.SetControl(nil) 16536 return true 16537 } 16538 case BlockARM64NZW: 16539 // match: (NZW (MOVDconst [c]) yes no) 16540 // cond: int32(c) == 0 16541 // result: (First nil no yes) 16542 for { 16543 v := b.Control 16544 if v.Op != OpARM64MOVDconst { 16545 break 16546 } 16547 c := v.AuxInt 16548 if !(int32(c) == 0) { 16549 break 16550 } 16551 b.Kind = BlockFirst 16552 b.SetControl(nil) 16553 b.swapSuccessors() 16554 return true 16555 } 16556 // match: (NZW (MOVDconst [c]) yes no) 16557 // cond: int32(c) != 0 16558 // result: (First nil yes no) 16559 for { 16560 v := b.Control 16561 if v.Op != OpARM64MOVDconst { 16562 break 16563 } 16564 c := v.AuxInt 16565 if !(int32(c) != 0) { 16566 break 16567 } 16568 b.Kind = BlockFirst 16569 b.SetControl(nil) 16570 return true 16571 } 16572 case BlockARM64UGE: 16573 // match: (UGE (FlagEQ) yes no) 16574 // cond: 16575 // result: (First nil yes no) 16576 for { 16577 v := b.Control 16578 if v.Op != OpARM64FlagEQ { 16579 break 16580 } 16581 b.Kind = BlockFirst 16582 b.SetControl(nil) 16583 return true 16584 } 16585 // match: (UGE (FlagLT_ULT) yes no) 16586 // cond: 16587 // result: (First nil no yes) 16588 for { 16589 v := b.Control 16590 if v.Op != OpARM64FlagLT_ULT { 16591 break 16592 } 16593 b.Kind = BlockFirst 16594 b.SetControl(nil) 16595 b.swapSuccessors() 16596 return true 16597 } 16598 // match: (UGE (FlagLT_UGT) yes no) 16599 // cond: 16600 // result: (First nil yes no) 16601 for { 16602 v := b.Control 16603 if v.Op != OpARM64FlagLT_UGT { 16604 break 16605 } 16606 b.Kind = BlockFirst 16607 b.SetControl(nil) 16608 return true 16609 } 16610 // match: (UGE (FlagGT_ULT) yes no) 16611 // cond: 16612 // result: (First nil no yes) 16613 for { 16614 v := b.Control 16615 if v.Op != OpARM64FlagGT_ULT { 16616 break 16617 } 16618 b.Kind = BlockFirst 16619 b.SetControl(nil) 16620 b.swapSuccessors() 16621 return true 16622 } 16623 // match: (UGE (FlagGT_UGT) yes no) 16624 // cond: 16625 // result: (First nil yes no) 16626 for { 16627 v := b.Control 16628 if v.Op != OpARM64FlagGT_UGT { 16629 break 16630 } 16631 b.Kind = BlockFirst 16632 b.SetControl(nil) 16633 return true 16634 } 16635 // match: (UGE (InvertFlags cmp) yes no) 16636 // cond: 16637 // result: (ULE cmp yes no) 16638 for { 16639 v := b.Control 16640 if v.Op != OpARM64InvertFlags { 16641 break 16642 } 16643 cmp := v.Args[0] 16644 b.Kind = BlockARM64ULE 16645 b.SetControl(cmp) 16646 return true 16647 } 16648 case BlockARM64UGT: 16649 // match: (UGT (FlagEQ) yes no) 16650 // cond: 16651 // result: (First nil no yes) 16652 for { 16653 v := b.Control 16654 if v.Op != OpARM64FlagEQ { 16655 break 16656 } 16657 b.Kind = BlockFirst 16658 b.SetControl(nil) 16659 b.swapSuccessors() 16660 return true 16661 } 16662 // match: (UGT (FlagLT_ULT) yes no) 16663 // cond: 16664 // result: (First nil no yes) 16665 for { 16666 v := b.Control 16667 if v.Op != OpARM64FlagLT_ULT { 16668 break 16669 } 16670 b.Kind = BlockFirst 16671 b.SetControl(nil) 16672 b.swapSuccessors() 16673 return true 16674 } 16675 // match: (UGT (FlagLT_UGT) yes no) 16676 // cond: 16677 // result: (First nil yes no) 16678 for { 16679 v := b.Control 16680 if v.Op != OpARM64FlagLT_UGT { 16681 break 16682 } 16683 b.Kind = BlockFirst 16684 b.SetControl(nil) 16685 return true 16686 } 16687 // match: (UGT (FlagGT_ULT) yes no) 16688 // cond: 16689 // result: (First nil no yes) 16690 for { 16691 v := b.Control 16692 if v.Op != OpARM64FlagGT_ULT { 16693 break 16694 } 16695 b.Kind = BlockFirst 16696 b.SetControl(nil) 16697 b.swapSuccessors() 16698 return true 16699 } 16700 // match: (UGT (FlagGT_UGT) yes no) 16701 // cond: 16702 // result: (First nil yes no) 16703 for { 16704 v := b.Control 16705 if v.Op != OpARM64FlagGT_UGT { 16706 break 16707 } 16708 b.Kind = BlockFirst 16709 b.SetControl(nil) 16710 return true 16711 } 16712 // match: (UGT (InvertFlags cmp) yes no) 16713 // cond: 16714 // result: (ULT cmp yes no) 16715 for { 16716 v := b.Control 16717 if v.Op != OpARM64InvertFlags { 16718 break 16719 } 16720 cmp := v.Args[0] 16721 b.Kind = BlockARM64ULT 16722 b.SetControl(cmp) 16723 return true 16724 } 16725 case BlockARM64ULE: 16726 // match: (ULE (FlagEQ) yes no) 16727 // cond: 16728 // result: (First nil yes no) 16729 for { 16730 v := b.Control 16731 if v.Op != OpARM64FlagEQ { 16732 break 16733 } 16734 b.Kind = BlockFirst 16735 b.SetControl(nil) 16736 return true 16737 } 16738 // match: (ULE (FlagLT_ULT) yes no) 16739 // cond: 16740 // result: (First nil yes no) 16741 for { 16742 v := b.Control 16743 if v.Op != OpARM64FlagLT_ULT { 16744 break 16745 } 16746 b.Kind = BlockFirst 16747 b.SetControl(nil) 16748 return true 16749 } 16750 // match: (ULE (FlagLT_UGT) yes no) 16751 // cond: 16752 // result: (First nil no yes) 16753 for { 16754 v := b.Control 16755 if v.Op != OpARM64FlagLT_UGT { 16756 break 16757 } 16758 b.Kind = BlockFirst 16759 b.SetControl(nil) 16760 b.swapSuccessors() 16761 return true 16762 } 16763 // match: (ULE (FlagGT_ULT) yes no) 16764 // cond: 16765 // result: (First nil yes no) 16766 for { 16767 v := b.Control 16768 if v.Op != OpARM64FlagGT_ULT { 16769 break 16770 } 16771 b.Kind = BlockFirst 16772 b.SetControl(nil) 16773 return true 16774 } 16775 // match: (ULE (FlagGT_UGT) yes no) 16776 // cond: 16777 // result: (First nil no yes) 16778 for { 16779 v := b.Control 16780 if v.Op != OpARM64FlagGT_UGT { 16781 break 16782 } 16783 b.Kind = BlockFirst 16784 b.SetControl(nil) 16785 b.swapSuccessors() 16786 return true 16787 } 16788 // match: (ULE (InvertFlags cmp) yes no) 16789 // cond: 16790 // result: (UGE cmp yes no) 16791 for { 16792 v := b.Control 16793 if v.Op != OpARM64InvertFlags { 16794 break 16795 } 16796 cmp := v.Args[0] 16797 b.Kind = BlockARM64UGE 16798 b.SetControl(cmp) 16799 return true 16800 } 16801 case BlockARM64ULT: 16802 // match: (ULT (FlagEQ) yes no) 16803 // cond: 16804 // result: (First nil no yes) 16805 for { 16806 v := b.Control 16807 if v.Op != OpARM64FlagEQ { 16808 break 16809 } 16810 b.Kind = BlockFirst 16811 b.SetControl(nil) 16812 b.swapSuccessors() 16813 return true 16814 } 16815 // match: (ULT (FlagLT_ULT) yes no) 16816 // cond: 16817 // result: (First nil yes no) 16818 for { 16819 v := b.Control 16820 if v.Op != OpARM64FlagLT_ULT { 16821 break 16822 } 16823 b.Kind = BlockFirst 16824 b.SetControl(nil) 16825 return true 16826 } 16827 // match: (ULT (FlagLT_UGT) yes no) 16828 // cond: 16829 // result: (First nil no yes) 16830 for { 16831 v := b.Control 16832 if v.Op != OpARM64FlagLT_UGT { 16833 break 16834 } 16835 b.Kind = BlockFirst 16836 b.SetControl(nil) 16837 b.swapSuccessors() 16838 return true 16839 } 16840 // match: (ULT (FlagGT_ULT) yes no) 16841 // cond: 16842 // result: (First nil yes no) 16843 for { 16844 v := b.Control 16845 if v.Op != OpARM64FlagGT_ULT { 16846 break 16847 } 16848 b.Kind = BlockFirst 16849 b.SetControl(nil) 16850 return true 16851 } 16852 // match: (ULT (FlagGT_UGT) yes no) 16853 // cond: 16854 // result: (First nil no yes) 16855 for { 16856 v := b.Control 16857 if v.Op != OpARM64FlagGT_UGT { 16858 break 16859 } 16860 b.Kind = BlockFirst 16861 b.SetControl(nil) 16862 b.swapSuccessors() 16863 return true 16864 } 16865 // match: (ULT (InvertFlags cmp) yes no) 16866 // cond: 16867 // result: (UGT cmp yes no) 16868 for { 16869 v := b.Control 16870 if v.Op != OpARM64InvertFlags { 16871 break 16872 } 16873 cmp := v.Args[0] 16874 b.Kind = BlockARM64UGT 16875 b.SetControl(cmp) 16876 return true 16877 } 16878 case BlockARM64Z: 16879 // match: (Z (MOVDconst [0]) yes no) 16880 // cond: 16881 // result: (First nil yes no) 16882 for { 16883 v := b.Control 16884 if v.Op != OpARM64MOVDconst { 16885 break 16886 } 16887 if v.AuxInt != 0 { 16888 break 16889 } 16890 b.Kind = BlockFirst 16891 b.SetControl(nil) 16892 return true 16893 } 16894 // match: (Z (MOVDconst [c]) yes no) 16895 // cond: c != 0 16896 // result: (First nil no yes) 16897 for { 16898 v := b.Control 16899 if v.Op != OpARM64MOVDconst { 16900 break 16901 } 16902 c := v.AuxInt 16903 if !(c != 0) { 16904 break 16905 } 16906 b.Kind = BlockFirst 16907 b.SetControl(nil) 16908 b.swapSuccessors() 16909 return true 16910 } 16911 case BlockARM64ZW: 16912 // match: (ZW (MOVDconst [c]) yes no) 16913 // cond: int32(c) == 0 16914 // result: (First nil yes no) 16915 for { 16916 v := b.Control 16917 if v.Op != OpARM64MOVDconst { 16918 break 16919 } 16920 c := v.AuxInt 16921 if !(int32(c) == 0) { 16922 break 16923 } 16924 b.Kind = BlockFirst 16925 b.SetControl(nil) 16926 return true 16927 } 16928 // match: (ZW (MOVDconst [c]) yes no) 16929 // cond: int32(c) != 0 16930 // result: (First nil no yes) 16931 for { 16932 v := b.Control 16933 if v.Op != OpARM64MOVDconst { 16934 break 16935 } 16936 c := v.AuxInt 16937 if !(int32(c) != 0) { 16938 break 16939 } 16940 b.Kind = BlockFirst 16941 b.SetControl(nil) 16942 b.swapSuccessors() 16943 return true 16944 } 16945 } 16946 return false 16947 }