github.com/corona10/go@v0.0.0-20180224231303-7a218942be57/src/cmd/compile/internal/ssa/rewriteARM64.go (about) 1 // Code generated from gen/ARM64.rules; DO NOT EDIT. 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "math" 7 import "cmd/internal/obj" 8 import "cmd/internal/objabi" 9 import "cmd/compile/internal/types" 10 11 var _ = math.MinInt8 // in case not otherwise used 12 var _ = obj.ANOP // in case not otherwise used 13 var _ = objabi.GOROOT // in case not otherwise used 14 var _ = types.TypeMem // in case not otherwise used 15 16 func rewriteValueARM64(v *Value) bool { 17 switch v.Op { 18 case OpARM64ADD: 19 return rewriteValueARM64_OpARM64ADD_0(v) 20 case OpARM64ADDconst: 21 return rewriteValueARM64_OpARM64ADDconst_0(v) 22 case OpARM64ADDshiftLL: 23 return rewriteValueARM64_OpARM64ADDshiftLL_0(v) 24 case OpARM64ADDshiftRA: 25 return rewriteValueARM64_OpARM64ADDshiftRA_0(v) 26 case OpARM64ADDshiftRL: 27 return rewriteValueARM64_OpARM64ADDshiftRL_0(v) 28 case OpARM64AND: 29 return rewriteValueARM64_OpARM64AND_0(v) || rewriteValueARM64_OpARM64AND_10(v) 30 case OpARM64ANDconst: 31 return rewriteValueARM64_OpARM64ANDconst_0(v) 32 case OpARM64ANDshiftLL: 33 return rewriteValueARM64_OpARM64ANDshiftLL_0(v) 34 case OpARM64ANDshiftRA: 35 return rewriteValueARM64_OpARM64ANDshiftRA_0(v) 36 case OpARM64ANDshiftRL: 37 return rewriteValueARM64_OpARM64ANDshiftRL_0(v) 38 case OpARM64BIC: 39 return rewriteValueARM64_OpARM64BIC_0(v) 40 case OpARM64BICconst: 41 return rewriteValueARM64_OpARM64BICconst_0(v) 42 case OpARM64BICshiftLL: 43 return rewriteValueARM64_OpARM64BICshiftLL_0(v) 44 case OpARM64BICshiftRA: 45 return rewriteValueARM64_OpARM64BICshiftRA_0(v) 46 case OpARM64BICshiftRL: 47 return rewriteValueARM64_OpARM64BICshiftRL_0(v) 48 case OpARM64CMP: 49 return rewriteValueARM64_OpARM64CMP_0(v) 50 case OpARM64CMPW: 51 return rewriteValueARM64_OpARM64CMPW_0(v) 52 case OpARM64CMPWconst: 53 return rewriteValueARM64_OpARM64CMPWconst_0(v) 54 case OpARM64CMPconst: 55 return rewriteValueARM64_OpARM64CMPconst_0(v) 56 case OpARM64CMPshiftLL: 57 return rewriteValueARM64_OpARM64CMPshiftLL_0(v) 58 case OpARM64CMPshiftRA: 59 return rewriteValueARM64_OpARM64CMPshiftRA_0(v) 60 case OpARM64CMPshiftRL: 61 return rewriteValueARM64_OpARM64CMPshiftRL_0(v) 62 case OpARM64CSEL: 63 return rewriteValueARM64_OpARM64CSEL_0(v) 64 case OpARM64CSEL0: 65 return rewriteValueARM64_OpARM64CSEL0_0(v) 66 case OpARM64DIV: 67 return rewriteValueARM64_OpARM64DIV_0(v) 68 case OpARM64DIVW: 69 return rewriteValueARM64_OpARM64DIVW_0(v) 70 case OpARM64Equal: 71 return rewriteValueARM64_OpARM64Equal_0(v) 72 case OpARM64FADDD: 73 return rewriteValueARM64_OpARM64FADDD_0(v) 74 case OpARM64FADDS: 75 return rewriteValueARM64_OpARM64FADDS_0(v) 76 case OpARM64FMOVDgpfp: 77 return rewriteValueARM64_OpARM64FMOVDgpfp_0(v) 78 case OpARM64FMOVDload: 79 return rewriteValueARM64_OpARM64FMOVDload_0(v) 80 case OpARM64FMOVDstore: 81 return rewriteValueARM64_OpARM64FMOVDstore_0(v) 82 case OpARM64FMOVSload: 83 return rewriteValueARM64_OpARM64FMOVSload_0(v) 84 case OpARM64FMOVSstore: 85 return rewriteValueARM64_OpARM64FMOVSstore_0(v) 86 case OpARM64FMULD: 87 return rewriteValueARM64_OpARM64FMULD_0(v) 88 case OpARM64FMULS: 89 return rewriteValueARM64_OpARM64FMULS_0(v) 90 case OpARM64FNEGD: 91 return rewriteValueARM64_OpARM64FNEGD_0(v) 92 case OpARM64FNEGS: 93 return rewriteValueARM64_OpARM64FNEGS_0(v) 94 case OpARM64FNMULD: 95 return rewriteValueARM64_OpARM64FNMULD_0(v) 96 case OpARM64FNMULS: 97 return rewriteValueARM64_OpARM64FNMULS_0(v) 98 case OpARM64FSUBD: 99 return rewriteValueARM64_OpARM64FSUBD_0(v) 100 case OpARM64FSUBS: 101 return rewriteValueARM64_OpARM64FSUBS_0(v) 102 case OpARM64GreaterEqual: 103 return rewriteValueARM64_OpARM64GreaterEqual_0(v) 104 case OpARM64GreaterEqualU: 105 return rewriteValueARM64_OpARM64GreaterEqualU_0(v) 106 case OpARM64GreaterThan: 107 return rewriteValueARM64_OpARM64GreaterThan_0(v) 108 case OpARM64GreaterThanU: 109 return rewriteValueARM64_OpARM64GreaterThanU_0(v) 110 case OpARM64LessEqual: 111 return rewriteValueARM64_OpARM64LessEqual_0(v) 112 case OpARM64LessEqualU: 113 return rewriteValueARM64_OpARM64LessEqualU_0(v) 114 case OpARM64LessThan: 115 return rewriteValueARM64_OpARM64LessThan_0(v) 116 case OpARM64LessThanU: 117 return rewriteValueARM64_OpARM64LessThanU_0(v) 118 case OpARM64MNEG: 119 return rewriteValueARM64_OpARM64MNEG_0(v) || rewriteValueARM64_OpARM64MNEG_10(v) || rewriteValueARM64_OpARM64MNEG_20(v) 120 case OpARM64MNEGW: 121 return rewriteValueARM64_OpARM64MNEGW_0(v) || rewriteValueARM64_OpARM64MNEGW_10(v) || rewriteValueARM64_OpARM64MNEGW_20(v) 122 case OpARM64MOD: 123 return rewriteValueARM64_OpARM64MOD_0(v) 124 case OpARM64MODW: 125 return rewriteValueARM64_OpARM64MODW_0(v) 126 case OpARM64MOVBUload: 127 return rewriteValueARM64_OpARM64MOVBUload_0(v) 128 case OpARM64MOVBUreg: 129 return rewriteValueARM64_OpARM64MOVBUreg_0(v) 130 case OpARM64MOVBload: 131 return rewriteValueARM64_OpARM64MOVBload_0(v) 132 case OpARM64MOVBreg: 133 return rewriteValueARM64_OpARM64MOVBreg_0(v) 134 case OpARM64MOVBstore: 135 return rewriteValueARM64_OpARM64MOVBstore_0(v) 136 case OpARM64MOVBstorezero: 137 return rewriteValueARM64_OpARM64MOVBstorezero_0(v) 138 case OpARM64MOVDload: 139 return rewriteValueARM64_OpARM64MOVDload_0(v) 140 case OpARM64MOVDreg: 141 return rewriteValueARM64_OpARM64MOVDreg_0(v) 142 case OpARM64MOVDstore: 143 return rewriteValueARM64_OpARM64MOVDstore_0(v) 144 case OpARM64MOVDstorezero: 145 return rewriteValueARM64_OpARM64MOVDstorezero_0(v) 146 case OpARM64MOVHUload: 147 return rewriteValueARM64_OpARM64MOVHUload_0(v) 148 case OpARM64MOVHUreg: 149 return rewriteValueARM64_OpARM64MOVHUreg_0(v) 150 case OpARM64MOVHload: 151 return rewriteValueARM64_OpARM64MOVHload_0(v) 152 case OpARM64MOVHreg: 153 return rewriteValueARM64_OpARM64MOVHreg_0(v) 154 case OpARM64MOVHstore: 155 return rewriteValueARM64_OpARM64MOVHstore_0(v) 156 case OpARM64MOVHstorezero: 157 return rewriteValueARM64_OpARM64MOVHstorezero_0(v) 158 case OpARM64MOVQstorezero: 159 return rewriteValueARM64_OpARM64MOVQstorezero_0(v) 160 case OpARM64MOVWUload: 161 return rewriteValueARM64_OpARM64MOVWUload_0(v) 162 case OpARM64MOVWUreg: 163 return rewriteValueARM64_OpARM64MOVWUreg_0(v) 164 case OpARM64MOVWload: 165 return rewriteValueARM64_OpARM64MOVWload_0(v) 166 case OpARM64MOVWreg: 167 return rewriteValueARM64_OpARM64MOVWreg_0(v) || rewriteValueARM64_OpARM64MOVWreg_10(v) 168 case OpARM64MOVWstore: 169 return rewriteValueARM64_OpARM64MOVWstore_0(v) 170 case OpARM64MOVWstorezero: 171 return rewriteValueARM64_OpARM64MOVWstorezero_0(v) 172 case OpARM64MUL: 173 return rewriteValueARM64_OpARM64MUL_0(v) || rewriteValueARM64_OpARM64MUL_10(v) || rewriteValueARM64_OpARM64MUL_20(v) 174 case OpARM64MULW: 175 return rewriteValueARM64_OpARM64MULW_0(v) || rewriteValueARM64_OpARM64MULW_10(v) || rewriteValueARM64_OpARM64MULW_20(v) 176 case OpARM64MVN: 177 return rewriteValueARM64_OpARM64MVN_0(v) 178 case OpARM64NEG: 179 return rewriteValueARM64_OpARM64NEG_0(v) 180 case OpARM64NotEqual: 181 return rewriteValueARM64_OpARM64NotEqual_0(v) 182 case OpARM64OR: 183 return rewriteValueARM64_OpARM64OR_0(v) || rewriteValueARM64_OpARM64OR_10(v) 184 case OpARM64ORconst: 185 return rewriteValueARM64_OpARM64ORconst_0(v) 186 case OpARM64ORshiftLL: 187 return rewriteValueARM64_OpARM64ORshiftLL_0(v) || rewriteValueARM64_OpARM64ORshiftLL_10(v) 188 case OpARM64ORshiftRA: 189 return rewriteValueARM64_OpARM64ORshiftRA_0(v) 190 case OpARM64ORshiftRL: 191 return rewriteValueARM64_OpARM64ORshiftRL_0(v) 192 case OpARM64SLL: 193 return rewriteValueARM64_OpARM64SLL_0(v) 194 case OpARM64SLLconst: 195 return rewriteValueARM64_OpARM64SLLconst_0(v) 196 case OpARM64SRA: 197 return rewriteValueARM64_OpARM64SRA_0(v) 198 case OpARM64SRAconst: 199 return rewriteValueARM64_OpARM64SRAconst_0(v) 200 case OpARM64SRL: 201 return rewriteValueARM64_OpARM64SRL_0(v) 202 case OpARM64SRLconst: 203 return rewriteValueARM64_OpARM64SRLconst_0(v) 204 case OpARM64STP: 205 return rewriteValueARM64_OpARM64STP_0(v) 206 case OpARM64SUB: 207 return rewriteValueARM64_OpARM64SUB_0(v) 208 case OpARM64SUBconst: 209 return rewriteValueARM64_OpARM64SUBconst_0(v) 210 case OpARM64SUBshiftLL: 211 return rewriteValueARM64_OpARM64SUBshiftLL_0(v) 212 case OpARM64SUBshiftRA: 213 return rewriteValueARM64_OpARM64SUBshiftRA_0(v) 214 case OpARM64SUBshiftRL: 215 return rewriteValueARM64_OpARM64SUBshiftRL_0(v) 216 case OpARM64UDIV: 217 return rewriteValueARM64_OpARM64UDIV_0(v) 218 case OpARM64UDIVW: 219 return rewriteValueARM64_OpARM64UDIVW_0(v) 220 case OpARM64UMOD: 221 return rewriteValueARM64_OpARM64UMOD_0(v) 222 case OpARM64UMODW: 223 return rewriteValueARM64_OpARM64UMODW_0(v) 224 case OpARM64XOR: 225 return rewriteValueARM64_OpARM64XOR_0(v) 226 case OpARM64XORconst: 227 return rewriteValueARM64_OpARM64XORconst_0(v) 228 case OpARM64XORshiftLL: 229 return rewriteValueARM64_OpARM64XORshiftLL_0(v) 230 case OpARM64XORshiftRA: 231 return rewriteValueARM64_OpARM64XORshiftRA_0(v) 232 case OpARM64XORshiftRL: 233 return rewriteValueARM64_OpARM64XORshiftRL_0(v) 234 case OpAdd16: 235 return rewriteValueARM64_OpAdd16_0(v) 236 case OpAdd32: 237 return rewriteValueARM64_OpAdd32_0(v) 238 case OpAdd32F: 239 return rewriteValueARM64_OpAdd32F_0(v) 240 case OpAdd64: 241 return rewriteValueARM64_OpAdd64_0(v) 242 case OpAdd64F: 243 return rewriteValueARM64_OpAdd64F_0(v) 244 case OpAdd8: 245 return rewriteValueARM64_OpAdd8_0(v) 246 case OpAddPtr: 247 return rewriteValueARM64_OpAddPtr_0(v) 248 case OpAddr: 249 return rewriteValueARM64_OpAddr_0(v) 250 case OpAnd16: 251 return rewriteValueARM64_OpAnd16_0(v) 252 case OpAnd32: 253 return rewriteValueARM64_OpAnd32_0(v) 254 case OpAnd64: 255 return rewriteValueARM64_OpAnd64_0(v) 256 case OpAnd8: 257 return rewriteValueARM64_OpAnd8_0(v) 258 case OpAndB: 259 return rewriteValueARM64_OpAndB_0(v) 260 case OpAtomicAdd32: 261 return rewriteValueARM64_OpAtomicAdd32_0(v) 262 case OpAtomicAdd64: 263 return rewriteValueARM64_OpAtomicAdd64_0(v) 264 case OpAtomicAnd8: 265 return rewriteValueARM64_OpAtomicAnd8_0(v) 266 case OpAtomicCompareAndSwap32: 267 return rewriteValueARM64_OpAtomicCompareAndSwap32_0(v) 268 case OpAtomicCompareAndSwap64: 269 return rewriteValueARM64_OpAtomicCompareAndSwap64_0(v) 270 case OpAtomicExchange32: 271 return rewriteValueARM64_OpAtomicExchange32_0(v) 272 case OpAtomicExchange64: 273 return rewriteValueARM64_OpAtomicExchange64_0(v) 274 case OpAtomicLoad32: 275 return rewriteValueARM64_OpAtomicLoad32_0(v) 276 case OpAtomicLoad64: 277 return rewriteValueARM64_OpAtomicLoad64_0(v) 278 case OpAtomicLoadPtr: 279 return rewriteValueARM64_OpAtomicLoadPtr_0(v) 280 case OpAtomicOr8: 281 return rewriteValueARM64_OpAtomicOr8_0(v) 282 case OpAtomicStore32: 283 return rewriteValueARM64_OpAtomicStore32_0(v) 284 case OpAtomicStore64: 285 return rewriteValueARM64_OpAtomicStore64_0(v) 286 case OpAtomicStorePtrNoWB: 287 return rewriteValueARM64_OpAtomicStorePtrNoWB_0(v) 288 case OpAvg64u: 289 return rewriteValueARM64_OpAvg64u_0(v) 290 case OpBitLen64: 291 return rewriteValueARM64_OpBitLen64_0(v) 292 case OpBitRev16: 293 return rewriteValueARM64_OpBitRev16_0(v) 294 case OpBitRev32: 295 return rewriteValueARM64_OpBitRev32_0(v) 296 case OpBitRev64: 297 return rewriteValueARM64_OpBitRev64_0(v) 298 case OpBitRev8: 299 return rewriteValueARM64_OpBitRev8_0(v) 300 case OpBswap32: 301 return rewriteValueARM64_OpBswap32_0(v) 302 case OpBswap64: 303 return rewriteValueARM64_OpBswap64_0(v) 304 case OpCeil: 305 return rewriteValueARM64_OpCeil_0(v) 306 case OpClosureCall: 307 return rewriteValueARM64_OpClosureCall_0(v) 308 case OpCom16: 309 return rewriteValueARM64_OpCom16_0(v) 310 case OpCom32: 311 return rewriteValueARM64_OpCom32_0(v) 312 case OpCom64: 313 return rewriteValueARM64_OpCom64_0(v) 314 case OpCom8: 315 return rewriteValueARM64_OpCom8_0(v) 316 case OpCondSelect: 317 return rewriteValueARM64_OpCondSelect_0(v) 318 case OpConst16: 319 return rewriteValueARM64_OpConst16_0(v) 320 case OpConst32: 321 return rewriteValueARM64_OpConst32_0(v) 322 case OpConst32F: 323 return rewriteValueARM64_OpConst32F_0(v) 324 case OpConst64: 325 return rewriteValueARM64_OpConst64_0(v) 326 case OpConst64F: 327 return rewriteValueARM64_OpConst64F_0(v) 328 case OpConst8: 329 return rewriteValueARM64_OpConst8_0(v) 330 case OpConstBool: 331 return rewriteValueARM64_OpConstBool_0(v) 332 case OpConstNil: 333 return rewriteValueARM64_OpConstNil_0(v) 334 case OpConvert: 335 return rewriteValueARM64_OpConvert_0(v) 336 case OpCtz32: 337 return rewriteValueARM64_OpCtz32_0(v) 338 case OpCtz64: 339 return rewriteValueARM64_OpCtz64_0(v) 340 case OpCvt32Fto32: 341 return rewriteValueARM64_OpCvt32Fto32_0(v) 342 case OpCvt32Fto32U: 343 return rewriteValueARM64_OpCvt32Fto32U_0(v) 344 case OpCvt32Fto64: 345 return rewriteValueARM64_OpCvt32Fto64_0(v) 346 case OpCvt32Fto64F: 347 return rewriteValueARM64_OpCvt32Fto64F_0(v) 348 case OpCvt32Fto64U: 349 return rewriteValueARM64_OpCvt32Fto64U_0(v) 350 case OpCvt32Uto32F: 351 return rewriteValueARM64_OpCvt32Uto32F_0(v) 352 case OpCvt32Uto64F: 353 return rewriteValueARM64_OpCvt32Uto64F_0(v) 354 case OpCvt32to32F: 355 return rewriteValueARM64_OpCvt32to32F_0(v) 356 case OpCvt32to64F: 357 return rewriteValueARM64_OpCvt32to64F_0(v) 358 case OpCvt64Fto32: 359 return rewriteValueARM64_OpCvt64Fto32_0(v) 360 case OpCvt64Fto32F: 361 return rewriteValueARM64_OpCvt64Fto32F_0(v) 362 case OpCvt64Fto32U: 363 return rewriteValueARM64_OpCvt64Fto32U_0(v) 364 case OpCvt64Fto64: 365 return rewriteValueARM64_OpCvt64Fto64_0(v) 366 case OpCvt64Fto64U: 367 return rewriteValueARM64_OpCvt64Fto64U_0(v) 368 case OpCvt64Uto32F: 369 return rewriteValueARM64_OpCvt64Uto32F_0(v) 370 case OpCvt64Uto64F: 371 return rewriteValueARM64_OpCvt64Uto64F_0(v) 372 case OpCvt64to32F: 373 return rewriteValueARM64_OpCvt64to32F_0(v) 374 case OpCvt64to64F: 375 return rewriteValueARM64_OpCvt64to64F_0(v) 376 case OpDiv16: 377 return rewriteValueARM64_OpDiv16_0(v) 378 case OpDiv16u: 379 return rewriteValueARM64_OpDiv16u_0(v) 380 case OpDiv32: 381 return rewriteValueARM64_OpDiv32_0(v) 382 case OpDiv32F: 383 return rewriteValueARM64_OpDiv32F_0(v) 384 case OpDiv32u: 385 return rewriteValueARM64_OpDiv32u_0(v) 386 case OpDiv64: 387 return rewriteValueARM64_OpDiv64_0(v) 388 case OpDiv64F: 389 return rewriteValueARM64_OpDiv64F_0(v) 390 case OpDiv64u: 391 return rewriteValueARM64_OpDiv64u_0(v) 392 case OpDiv8: 393 return rewriteValueARM64_OpDiv8_0(v) 394 case OpDiv8u: 395 return rewriteValueARM64_OpDiv8u_0(v) 396 case OpEq16: 397 return rewriteValueARM64_OpEq16_0(v) 398 case OpEq32: 399 return rewriteValueARM64_OpEq32_0(v) 400 case OpEq32F: 401 return rewriteValueARM64_OpEq32F_0(v) 402 case OpEq64: 403 return rewriteValueARM64_OpEq64_0(v) 404 case OpEq64F: 405 return rewriteValueARM64_OpEq64F_0(v) 406 case OpEq8: 407 return rewriteValueARM64_OpEq8_0(v) 408 case OpEqB: 409 return rewriteValueARM64_OpEqB_0(v) 410 case OpEqPtr: 411 return rewriteValueARM64_OpEqPtr_0(v) 412 case OpFloor: 413 return rewriteValueARM64_OpFloor_0(v) 414 case OpGeq16: 415 return rewriteValueARM64_OpGeq16_0(v) 416 case OpGeq16U: 417 return rewriteValueARM64_OpGeq16U_0(v) 418 case OpGeq32: 419 return rewriteValueARM64_OpGeq32_0(v) 420 case OpGeq32F: 421 return rewriteValueARM64_OpGeq32F_0(v) 422 case OpGeq32U: 423 return rewriteValueARM64_OpGeq32U_0(v) 424 case OpGeq64: 425 return rewriteValueARM64_OpGeq64_0(v) 426 case OpGeq64F: 427 return rewriteValueARM64_OpGeq64F_0(v) 428 case OpGeq64U: 429 return rewriteValueARM64_OpGeq64U_0(v) 430 case OpGeq8: 431 return rewriteValueARM64_OpGeq8_0(v) 432 case OpGeq8U: 433 return rewriteValueARM64_OpGeq8U_0(v) 434 case OpGetCallerSP: 435 return rewriteValueARM64_OpGetCallerSP_0(v) 436 case OpGetClosurePtr: 437 return rewriteValueARM64_OpGetClosurePtr_0(v) 438 case OpGreater16: 439 return rewriteValueARM64_OpGreater16_0(v) 440 case OpGreater16U: 441 return rewriteValueARM64_OpGreater16U_0(v) 442 case OpGreater32: 443 return rewriteValueARM64_OpGreater32_0(v) 444 case OpGreater32F: 445 return rewriteValueARM64_OpGreater32F_0(v) 446 case OpGreater32U: 447 return rewriteValueARM64_OpGreater32U_0(v) 448 case OpGreater64: 449 return rewriteValueARM64_OpGreater64_0(v) 450 case OpGreater64F: 451 return rewriteValueARM64_OpGreater64F_0(v) 452 case OpGreater64U: 453 return rewriteValueARM64_OpGreater64U_0(v) 454 case OpGreater8: 455 return rewriteValueARM64_OpGreater8_0(v) 456 case OpGreater8U: 457 return rewriteValueARM64_OpGreater8U_0(v) 458 case OpHmul32: 459 return rewriteValueARM64_OpHmul32_0(v) 460 case OpHmul32u: 461 return rewriteValueARM64_OpHmul32u_0(v) 462 case OpHmul64: 463 return rewriteValueARM64_OpHmul64_0(v) 464 case OpHmul64u: 465 return rewriteValueARM64_OpHmul64u_0(v) 466 case OpInterCall: 467 return rewriteValueARM64_OpInterCall_0(v) 468 case OpIsInBounds: 469 return rewriteValueARM64_OpIsInBounds_0(v) 470 case OpIsNonNil: 471 return rewriteValueARM64_OpIsNonNil_0(v) 472 case OpIsSliceInBounds: 473 return rewriteValueARM64_OpIsSliceInBounds_0(v) 474 case OpLeq16: 475 return rewriteValueARM64_OpLeq16_0(v) 476 case OpLeq16U: 477 return rewriteValueARM64_OpLeq16U_0(v) 478 case OpLeq32: 479 return rewriteValueARM64_OpLeq32_0(v) 480 case OpLeq32F: 481 return rewriteValueARM64_OpLeq32F_0(v) 482 case OpLeq32U: 483 return rewriteValueARM64_OpLeq32U_0(v) 484 case OpLeq64: 485 return rewriteValueARM64_OpLeq64_0(v) 486 case OpLeq64F: 487 return rewriteValueARM64_OpLeq64F_0(v) 488 case OpLeq64U: 489 return rewriteValueARM64_OpLeq64U_0(v) 490 case OpLeq8: 491 return rewriteValueARM64_OpLeq8_0(v) 492 case OpLeq8U: 493 return rewriteValueARM64_OpLeq8U_0(v) 494 case OpLess16: 495 return rewriteValueARM64_OpLess16_0(v) 496 case OpLess16U: 497 return rewriteValueARM64_OpLess16U_0(v) 498 case OpLess32: 499 return rewriteValueARM64_OpLess32_0(v) 500 case OpLess32F: 501 return rewriteValueARM64_OpLess32F_0(v) 502 case OpLess32U: 503 return rewriteValueARM64_OpLess32U_0(v) 504 case OpLess64: 505 return rewriteValueARM64_OpLess64_0(v) 506 case OpLess64F: 507 return rewriteValueARM64_OpLess64F_0(v) 508 case OpLess64U: 509 return rewriteValueARM64_OpLess64U_0(v) 510 case OpLess8: 511 return rewriteValueARM64_OpLess8_0(v) 512 case OpLess8U: 513 return rewriteValueARM64_OpLess8U_0(v) 514 case OpLoad: 515 return rewriteValueARM64_OpLoad_0(v) 516 case OpLsh16x16: 517 return rewriteValueARM64_OpLsh16x16_0(v) 518 case OpLsh16x32: 519 return rewriteValueARM64_OpLsh16x32_0(v) 520 case OpLsh16x64: 521 return rewriteValueARM64_OpLsh16x64_0(v) 522 case OpLsh16x8: 523 return rewriteValueARM64_OpLsh16x8_0(v) 524 case OpLsh32x16: 525 return rewriteValueARM64_OpLsh32x16_0(v) 526 case OpLsh32x32: 527 return rewriteValueARM64_OpLsh32x32_0(v) 528 case OpLsh32x64: 529 return rewriteValueARM64_OpLsh32x64_0(v) 530 case OpLsh32x8: 531 return rewriteValueARM64_OpLsh32x8_0(v) 532 case OpLsh64x16: 533 return rewriteValueARM64_OpLsh64x16_0(v) 534 case OpLsh64x32: 535 return rewriteValueARM64_OpLsh64x32_0(v) 536 case OpLsh64x64: 537 return rewriteValueARM64_OpLsh64x64_0(v) 538 case OpLsh64x8: 539 return rewriteValueARM64_OpLsh64x8_0(v) 540 case OpLsh8x16: 541 return rewriteValueARM64_OpLsh8x16_0(v) 542 case OpLsh8x32: 543 return rewriteValueARM64_OpLsh8x32_0(v) 544 case OpLsh8x64: 545 return rewriteValueARM64_OpLsh8x64_0(v) 546 case OpLsh8x8: 547 return rewriteValueARM64_OpLsh8x8_0(v) 548 case OpMod16: 549 return rewriteValueARM64_OpMod16_0(v) 550 case OpMod16u: 551 return rewriteValueARM64_OpMod16u_0(v) 552 case OpMod32: 553 return rewriteValueARM64_OpMod32_0(v) 554 case OpMod32u: 555 return rewriteValueARM64_OpMod32u_0(v) 556 case OpMod64: 557 return rewriteValueARM64_OpMod64_0(v) 558 case OpMod64u: 559 return rewriteValueARM64_OpMod64u_0(v) 560 case OpMod8: 561 return rewriteValueARM64_OpMod8_0(v) 562 case OpMod8u: 563 return rewriteValueARM64_OpMod8u_0(v) 564 case OpMove: 565 return rewriteValueARM64_OpMove_0(v) || rewriteValueARM64_OpMove_10(v) 566 case OpMul16: 567 return rewriteValueARM64_OpMul16_0(v) 568 case OpMul32: 569 return rewriteValueARM64_OpMul32_0(v) 570 case OpMul32F: 571 return rewriteValueARM64_OpMul32F_0(v) 572 case OpMul64: 573 return rewriteValueARM64_OpMul64_0(v) 574 case OpMul64F: 575 return rewriteValueARM64_OpMul64F_0(v) 576 case OpMul8: 577 return rewriteValueARM64_OpMul8_0(v) 578 case OpNeg16: 579 return rewriteValueARM64_OpNeg16_0(v) 580 case OpNeg32: 581 return rewriteValueARM64_OpNeg32_0(v) 582 case OpNeg32F: 583 return rewriteValueARM64_OpNeg32F_0(v) 584 case OpNeg64: 585 return rewriteValueARM64_OpNeg64_0(v) 586 case OpNeg64F: 587 return rewriteValueARM64_OpNeg64F_0(v) 588 case OpNeg8: 589 return rewriteValueARM64_OpNeg8_0(v) 590 case OpNeq16: 591 return rewriteValueARM64_OpNeq16_0(v) 592 case OpNeq32: 593 return rewriteValueARM64_OpNeq32_0(v) 594 case OpNeq32F: 595 return rewriteValueARM64_OpNeq32F_0(v) 596 case OpNeq64: 597 return rewriteValueARM64_OpNeq64_0(v) 598 case OpNeq64F: 599 return rewriteValueARM64_OpNeq64F_0(v) 600 case OpNeq8: 601 return rewriteValueARM64_OpNeq8_0(v) 602 case OpNeqB: 603 return rewriteValueARM64_OpNeqB_0(v) 604 case OpNeqPtr: 605 return rewriteValueARM64_OpNeqPtr_0(v) 606 case OpNilCheck: 607 return rewriteValueARM64_OpNilCheck_0(v) 608 case OpNot: 609 return rewriteValueARM64_OpNot_0(v) 610 case OpOffPtr: 611 return rewriteValueARM64_OpOffPtr_0(v) 612 case OpOr16: 613 return rewriteValueARM64_OpOr16_0(v) 614 case OpOr32: 615 return rewriteValueARM64_OpOr32_0(v) 616 case OpOr64: 617 return rewriteValueARM64_OpOr64_0(v) 618 case OpOr8: 619 return rewriteValueARM64_OpOr8_0(v) 620 case OpOrB: 621 return rewriteValueARM64_OpOrB_0(v) 622 case OpPopCount16: 623 return rewriteValueARM64_OpPopCount16_0(v) 624 case OpPopCount32: 625 return rewriteValueARM64_OpPopCount32_0(v) 626 case OpPopCount64: 627 return rewriteValueARM64_OpPopCount64_0(v) 628 case OpRound: 629 return rewriteValueARM64_OpRound_0(v) 630 case OpRound32F: 631 return rewriteValueARM64_OpRound32F_0(v) 632 case OpRound64F: 633 return rewriteValueARM64_OpRound64F_0(v) 634 case OpRsh16Ux16: 635 return rewriteValueARM64_OpRsh16Ux16_0(v) 636 case OpRsh16Ux32: 637 return rewriteValueARM64_OpRsh16Ux32_0(v) 638 case OpRsh16Ux64: 639 return rewriteValueARM64_OpRsh16Ux64_0(v) 640 case OpRsh16Ux8: 641 return rewriteValueARM64_OpRsh16Ux8_0(v) 642 case OpRsh16x16: 643 return rewriteValueARM64_OpRsh16x16_0(v) 644 case OpRsh16x32: 645 return rewriteValueARM64_OpRsh16x32_0(v) 646 case OpRsh16x64: 647 return rewriteValueARM64_OpRsh16x64_0(v) 648 case OpRsh16x8: 649 return rewriteValueARM64_OpRsh16x8_0(v) 650 case OpRsh32Ux16: 651 return rewriteValueARM64_OpRsh32Ux16_0(v) 652 case OpRsh32Ux32: 653 return rewriteValueARM64_OpRsh32Ux32_0(v) 654 case OpRsh32Ux64: 655 return rewriteValueARM64_OpRsh32Ux64_0(v) 656 case OpRsh32Ux8: 657 return rewriteValueARM64_OpRsh32Ux8_0(v) 658 case OpRsh32x16: 659 return rewriteValueARM64_OpRsh32x16_0(v) 660 case OpRsh32x32: 661 return rewriteValueARM64_OpRsh32x32_0(v) 662 case OpRsh32x64: 663 return rewriteValueARM64_OpRsh32x64_0(v) 664 case OpRsh32x8: 665 return rewriteValueARM64_OpRsh32x8_0(v) 666 case OpRsh64Ux16: 667 return rewriteValueARM64_OpRsh64Ux16_0(v) 668 case OpRsh64Ux32: 669 return rewriteValueARM64_OpRsh64Ux32_0(v) 670 case OpRsh64Ux64: 671 return rewriteValueARM64_OpRsh64Ux64_0(v) 672 case OpRsh64Ux8: 673 return rewriteValueARM64_OpRsh64Ux8_0(v) 674 case OpRsh64x16: 675 return rewriteValueARM64_OpRsh64x16_0(v) 676 case OpRsh64x32: 677 return rewriteValueARM64_OpRsh64x32_0(v) 678 case OpRsh64x64: 679 return rewriteValueARM64_OpRsh64x64_0(v) 680 case OpRsh64x8: 681 return rewriteValueARM64_OpRsh64x8_0(v) 682 case OpRsh8Ux16: 683 return rewriteValueARM64_OpRsh8Ux16_0(v) 684 case OpRsh8Ux32: 685 return rewriteValueARM64_OpRsh8Ux32_0(v) 686 case OpRsh8Ux64: 687 return rewriteValueARM64_OpRsh8Ux64_0(v) 688 case OpRsh8Ux8: 689 return rewriteValueARM64_OpRsh8Ux8_0(v) 690 case OpRsh8x16: 691 return rewriteValueARM64_OpRsh8x16_0(v) 692 case OpRsh8x32: 693 return rewriteValueARM64_OpRsh8x32_0(v) 694 case OpRsh8x64: 695 return rewriteValueARM64_OpRsh8x64_0(v) 696 case OpRsh8x8: 697 return rewriteValueARM64_OpRsh8x8_0(v) 698 case OpSignExt16to32: 699 return rewriteValueARM64_OpSignExt16to32_0(v) 700 case OpSignExt16to64: 701 return rewriteValueARM64_OpSignExt16to64_0(v) 702 case OpSignExt32to64: 703 return rewriteValueARM64_OpSignExt32to64_0(v) 704 case OpSignExt8to16: 705 return rewriteValueARM64_OpSignExt8to16_0(v) 706 case OpSignExt8to32: 707 return rewriteValueARM64_OpSignExt8to32_0(v) 708 case OpSignExt8to64: 709 return rewriteValueARM64_OpSignExt8to64_0(v) 710 case OpSlicemask: 711 return rewriteValueARM64_OpSlicemask_0(v) 712 case OpSqrt: 713 return rewriteValueARM64_OpSqrt_0(v) 714 case OpStaticCall: 715 return rewriteValueARM64_OpStaticCall_0(v) 716 case OpStore: 717 return rewriteValueARM64_OpStore_0(v) 718 case OpSub16: 719 return rewriteValueARM64_OpSub16_0(v) 720 case OpSub32: 721 return rewriteValueARM64_OpSub32_0(v) 722 case OpSub32F: 723 return rewriteValueARM64_OpSub32F_0(v) 724 case OpSub64: 725 return rewriteValueARM64_OpSub64_0(v) 726 case OpSub64F: 727 return rewriteValueARM64_OpSub64F_0(v) 728 case OpSub8: 729 return rewriteValueARM64_OpSub8_0(v) 730 case OpSubPtr: 731 return rewriteValueARM64_OpSubPtr_0(v) 732 case OpTrunc: 733 return rewriteValueARM64_OpTrunc_0(v) 734 case OpTrunc16to8: 735 return rewriteValueARM64_OpTrunc16to8_0(v) 736 case OpTrunc32to16: 737 return rewriteValueARM64_OpTrunc32to16_0(v) 738 case OpTrunc32to8: 739 return rewriteValueARM64_OpTrunc32to8_0(v) 740 case OpTrunc64to16: 741 return rewriteValueARM64_OpTrunc64to16_0(v) 742 case OpTrunc64to32: 743 return rewriteValueARM64_OpTrunc64to32_0(v) 744 case OpTrunc64to8: 745 return rewriteValueARM64_OpTrunc64to8_0(v) 746 case OpWB: 747 return rewriteValueARM64_OpWB_0(v) 748 case OpXor16: 749 return rewriteValueARM64_OpXor16_0(v) 750 case OpXor32: 751 return rewriteValueARM64_OpXor32_0(v) 752 case OpXor64: 753 return rewriteValueARM64_OpXor64_0(v) 754 case OpXor8: 755 return rewriteValueARM64_OpXor8_0(v) 756 case OpZero: 757 return rewriteValueARM64_OpZero_0(v) || rewriteValueARM64_OpZero_10(v) || rewriteValueARM64_OpZero_20(v) 758 case OpZeroExt16to32: 759 return rewriteValueARM64_OpZeroExt16to32_0(v) 760 case OpZeroExt16to64: 761 return rewriteValueARM64_OpZeroExt16to64_0(v) 762 case OpZeroExt32to64: 763 return rewriteValueARM64_OpZeroExt32to64_0(v) 764 case OpZeroExt8to16: 765 return rewriteValueARM64_OpZeroExt8to16_0(v) 766 case OpZeroExt8to32: 767 return rewriteValueARM64_OpZeroExt8to32_0(v) 768 case OpZeroExt8to64: 769 return rewriteValueARM64_OpZeroExt8to64_0(v) 770 } 771 return false 772 } 773 func rewriteValueARM64_OpARM64ADD_0(v *Value) bool { 774 // match: (ADD x (MOVDconst [c])) 775 // cond: 776 // result: (ADDconst [c] x) 777 for { 778 _ = v.Args[1] 779 x := v.Args[0] 780 v_1 := v.Args[1] 781 if v_1.Op != OpARM64MOVDconst { 782 break 783 } 784 c := v_1.AuxInt 785 v.reset(OpARM64ADDconst) 786 v.AuxInt = c 787 v.AddArg(x) 788 return true 789 } 790 // match: (ADD (MOVDconst [c]) x) 791 // cond: 792 // result: (ADDconst [c] x) 793 for { 794 _ = v.Args[1] 795 v_0 := v.Args[0] 796 if v_0.Op != OpARM64MOVDconst { 797 break 798 } 799 c := v_0.AuxInt 800 x := v.Args[1] 801 v.reset(OpARM64ADDconst) 802 v.AuxInt = c 803 v.AddArg(x) 804 return true 805 } 806 // match: (ADD x (NEG y)) 807 // cond: 808 // result: (SUB x y) 809 for { 810 _ = v.Args[1] 811 x := v.Args[0] 812 v_1 := v.Args[1] 813 if v_1.Op != OpARM64NEG { 814 break 815 } 816 y := v_1.Args[0] 817 v.reset(OpARM64SUB) 818 v.AddArg(x) 819 v.AddArg(y) 820 return true 821 } 822 // match: (ADD (NEG y) x) 823 // cond: 824 // result: (SUB x y) 825 for { 826 _ = v.Args[1] 827 v_0 := v.Args[0] 828 if v_0.Op != OpARM64NEG { 829 break 830 } 831 y := v_0.Args[0] 832 x := v.Args[1] 833 v.reset(OpARM64SUB) 834 v.AddArg(x) 835 v.AddArg(y) 836 return true 837 } 838 // match: (ADD x0 x1:(SLLconst [c] y)) 839 // cond: clobberIfDead(x1) 840 // result: (ADDshiftLL x0 y [c]) 841 for { 842 _ = v.Args[1] 843 x0 := v.Args[0] 844 x1 := v.Args[1] 845 if x1.Op != OpARM64SLLconst { 846 break 847 } 848 c := x1.AuxInt 849 y := x1.Args[0] 850 if !(clobberIfDead(x1)) { 851 break 852 } 853 v.reset(OpARM64ADDshiftLL) 854 v.AuxInt = c 855 v.AddArg(x0) 856 v.AddArg(y) 857 return true 858 } 859 // match: (ADD x1:(SLLconst [c] y) x0) 860 // cond: clobberIfDead(x1) 861 // result: (ADDshiftLL x0 y [c]) 862 for { 863 _ = v.Args[1] 864 x1 := v.Args[0] 865 if x1.Op != OpARM64SLLconst { 866 break 867 } 868 c := x1.AuxInt 869 y := x1.Args[0] 870 x0 := v.Args[1] 871 if !(clobberIfDead(x1)) { 872 break 873 } 874 v.reset(OpARM64ADDshiftLL) 875 v.AuxInt = c 876 v.AddArg(x0) 877 v.AddArg(y) 878 return true 879 } 880 // match: (ADD x0 x1:(SRLconst [c] y)) 881 // cond: clobberIfDead(x1) 882 // result: (ADDshiftRL x0 y [c]) 883 for { 884 _ = v.Args[1] 885 x0 := v.Args[0] 886 x1 := v.Args[1] 887 if x1.Op != OpARM64SRLconst { 888 break 889 } 890 c := x1.AuxInt 891 y := x1.Args[0] 892 if !(clobberIfDead(x1)) { 893 break 894 } 895 v.reset(OpARM64ADDshiftRL) 896 v.AuxInt = c 897 v.AddArg(x0) 898 v.AddArg(y) 899 return true 900 } 901 // match: (ADD x1:(SRLconst [c] y) x0) 902 // cond: clobberIfDead(x1) 903 // result: (ADDshiftRL x0 y [c]) 904 for { 905 _ = v.Args[1] 906 x1 := v.Args[0] 907 if x1.Op != OpARM64SRLconst { 908 break 909 } 910 c := x1.AuxInt 911 y := x1.Args[0] 912 x0 := v.Args[1] 913 if !(clobberIfDead(x1)) { 914 break 915 } 916 v.reset(OpARM64ADDshiftRL) 917 v.AuxInt = c 918 v.AddArg(x0) 919 v.AddArg(y) 920 return true 921 } 922 // match: (ADD x0 x1:(SRAconst [c] y)) 923 // cond: clobberIfDead(x1) 924 // result: (ADDshiftRA x0 y [c]) 925 for { 926 _ = v.Args[1] 927 x0 := v.Args[0] 928 x1 := v.Args[1] 929 if x1.Op != OpARM64SRAconst { 930 break 931 } 932 c := x1.AuxInt 933 y := x1.Args[0] 934 if !(clobberIfDead(x1)) { 935 break 936 } 937 v.reset(OpARM64ADDshiftRA) 938 v.AuxInt = c 939 v.AddArg(x0) 940 v.AddArg(y) 941 return true 942 } 943 // match: (ADD x1:(SRAconst [c] y) x0) 944 // cond: clobberIfDead(x1) 945 // result: (ADDshiftRA x0 y [c]) 946 for { 947 _ = v.Args[1] 948 x1 := v.Args[0] 949 if x1.Op != OpARM64SRAconst { 950 break 951 } 952 c := x1.AuxInt 953 y := x1.Args[0] 954 x0 := v.Args[1] 955 if !(clobberIfDead(x1)) { 956 break 957 } 958 v.reset(OpARM64ADDshiftRA) 959 v.AuxInt = c 960 v.AddArg(x0) 961 v.AddArg(y) 962 return true 963 } 964 return false 965 } 966 func rewriteValueARM64_OpARM64ADDconst_0(v *Value) bool { 967 // match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr)) 968 // cond: 969 // result: (MOVDaddr [off1+off2] {sym} ptr) 970 for { 971 off1 := v.AuxInt 972 v_0 := v.Args[0] 973 if v_0.Op != OpARM64MOVDaddr { 974 break 975 } 976 off2 := v_0.AuxInt 977 sym := v_0.Aux 978 ptr := v_0.Args[0] 979 v.reset(OpARM64MOVDaddr) 980 v.AuxInt = off1 + off2 981 v.Aux = sym 982 v.AddArg(ptr) 983 return true 984 } 985 // match: (ADDconst [0] x) 986 // cond: 987 // result: x 988 for { 989 if v.AuxInt != 0 { 990 break 991 } 992 x := v.Args[0] 993 v.reset(OpCopy) 994 v.Type = x.Type 995 v.AddArg(x) 996 return true 997 } 998 // match: (ADDconst [c] (MOVDconst [d])) 999 // cond: 1000 // result: (MOVDconst [c+d]) 1001 for { 1002 c := v.AuxInt 1003 v_0 := v.Args[0] 1004 if v_0.Op != OpARM64MOVDconst { 1005 break 1006 } 1007 d := v_0.AuxInt 1008 v.reset(OpARM64MOVDconst) 1009 v.AuxInt = c + d 1010 return true 1011 } 1012 // match: (ADDconst [c] (ADDconst [d] x)) 1013 // cond: 1014 // result: (ADDconst [c+d] x) 1015 for { 1016 c := v.AuxInt 1017 v_0 := v.Args[0] 1018 if v_0.Op != OpARM64ADDconst { 1019 break 1020 } 1021 d := v_0.AuxInt 1022 x := v_0.Args[0] 1023 v.reset(OpARM64ADDconst) 1024 v.AuxInt = c + d 1025 v.AddArg(x) 1026 return true 1027 } 1028 // match: (ADDconst [c] (SUBconst [d] x)) 1029 // cond: 1030 // result: (ADDconst [c-d] x) 1031 for { 1032 c := v.AuxInt 1033 v_0 := v.Args[0] 1034 if v_0.Op != OpARM64SUBconst { 1035 break 1036 } 1037 d := v_0.AuxInt 1038 x := v_0.Args[0] 1039 v.reset(OpARM64ADDconst) 1040 v.AuxInt = c - d 1041 v.AddArg(x) 1042 return true 1043 } 1044 return false 1045 } 1046 func rewriteValueARM64_OpARM64ADDshiftLL_0(v *Value) bool { 1047 b := v.Block 1048 _ = b 1049 // match: (ADDshiftLL (MOVDconst [c]) x [d]) 1050 // cond: 1051 // result: (ADDconst [c] (SLLconst <x.Type> x [d])) 1052 for { 1053 d := v.AuxInt 1054 _ = v.Args[1] 1055 v_0 := v.Args[0] 1056 if v_0.Op != OpARM64MOVDconst { 1057 break 1058 } 1059 c := v_0.AuxInt 1060 x := v.Args[1] 1061 v.reset(OpARM64ADDconst) 1062 v.AuxInt = c 1063 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 1064 v0.AuxInt = d 1065 v0.AddArg(x) 1066 v.AddArg(v0) 1067 return true 1068 } 1069 // match: (ADDshiftLL x (MOVDconst [c]) [d]) 1070 // cond: 1071 // result: (ADDconst x [int64(uint64(c)<<uint64(d))]) 1072 for { 1073 d := v.AuxInt 1074 _ = v.Args[1] 1075 x := v.Args[0] 1076 v_1 := v.Args[1] 1077 if v_1.Op != OpARM64MOVDconst { 1078 break 1079 } 1080 c := v_1.AuxInt 1081 v.reset(OpARM64ADDconst) 1082 v.AuxInt = int64(uint64(c) << uint64(d)) 1083 v.AddArg(x) 1084 return true 1085 } 1086 // match: (ADDshiftLL [c] (SRLconst x [64-c]) x) 1087 // cond: 1088 // result: (RORconst [64-c] x) 1089 for { 1090 c := v.AuxInt 1091 _ = v.Args[1] 1092 v_0 := v.Args[0] 1093 if v_0.Op != OpARM64SRLconst { 1094 break 1095 } 1096 if v_0.AuxInt != 64-c { 1097 break 1098 } 1099 x := v_0.Args[0] 1100 if x != v.Args[1] { 1101 break 1102 } 1103 v.reset(OpARM64RORconst) 1104 v.AuxInt = 64 - c 1105 v.AddArg(x) 1106 return true 1107 } 1108 // match: (ADDshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x) 1109 // cond: c < 32 && t.Size() == 4 1110 // result: (RORWconst [32-c] x) 1111 for { 1112 t := v.Type 1113 c := v.AuxInt 1114 _ = v.Args[1] 1115 v_0 := v.Args[0] 1116 if v_0.Op != OpARM64SRLconst { 1117 break 1118 } 1119 if v_0.AuxInt != 32-c { 1120 break 1121 } 1122 v_0_0 := v_0.Args[0] 1123 if v_0_0.Op != OpARM64MOVWUreg { 1124 break 1125 } 1126 x := v_0_0.Args[0] 1127 if x != v.Args[1] { 1128 break 1129 } 1130 if !(c < 32 && t.Size() == 4) { 1131 break 1132 } 1133 v.reset(OpARM64RORWconst) 1134 v.AuxInt = 32 - c 1135 v.AddArg(x) 1136 return true 1137 } 1138 return false 1139 } 1140 func rewriteValueARM64_OpARM64ADDshiftRA_0(v *Value) bool { 1141 b := v.Block 1142 _ = b 1143 // match: (ADDshiftRA (MOVDconst [c]) x [d]) 1144 // cond: 1145 // result: (ADDconst [c] (SRAconst <x.Type> x [d])) 1146 for { 1147 d := v.AuxInt 1148 _ = v.Args[1] 1149 v_0 := v.Args[0] 1150 if v_0.Op != OpARM64MOVDconst { 1151 break 1152 } 1153 c := v_0.AuxInt 1154 x := v.Args[1] 1155 v.reset(OpARM64ADDconst) 1156 v.AuxInt = c 1157 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 1158 v0.AuxInt = d 1159 v0.AddArg(x) 1160 v.AddArg(v0) 1161 return true 1162 } 1163 // match: (ADDshiftRA x (MOVDconst [c]) [d]) 1164 // cond: 1165 // result: (ADDconst x [int64(int64(c)>>uint64(d))]) 1166 for { 1167 d := v.AuxInt 1168 _ = v.Args[1] 1169 x := v.Args[0] 1170 v_1 := v.Args[1] 1171 if v_1.Op != OpARM64MOVDconst { 1172 break 1173 } 1174 c := v_1.AuxInt 1175 v.reset(OpARM64ADDconst) 1176 v.AuxInt = int64(int64(c) >> uint64(d)) 1177 v.AddArg(x) 1178 return true 1179 } 1180 return false 1181 } 1182 func rewriteValueARM64_OpARM64ADDshiftRL_0(v *Value) bool { 1183 b := v.Block 1184 _ = b 1185 // match: (ADDshiftRL (MOVDconst [c]) x [d]) 1186 // cond: 1187 // result: (ADDconst [c] (SRLconst <x.Type> x [d])) 1188 for { 1189 d := v.AuxInt 1190 _ = v.Args[1] 1191 v_0 := v.Args[0] 1192 if v_0.Op != OpARM64MOVDconst { 1193 break 1194 } 1195 c := v_0.AuxInt 1196 x := v.Args[1] 1197 v.reset(OpARM64ADDconst) 1198 v.AuxInt = c 1199 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 1200 v0.AuxInt = d 1201 v0.AddArg(x) 1202 v.AddArg(v0) 1203 return true 1204 } 1205 // match: (ADDshiftRL x (MOVDconst [c]) [d]) 1206 // cond: 1207 // result: (ADDconst x [int64(uint64(c)>>uint64(d))]) 1208 for { 1209 d := v.AuxInt 1210 _ = v.Args[1] 1211 x := v.Args[0] 1212 v_1 := v.Args[1] 1213 if v_1.Op != OpARM64MOVDconst { 1214 break 1215 } 1216 c := v_1.AuxInt 1217 v.reset(OpARM64ADDconst) 1218 v.AuxInt = int64(uint64(c) >> uint64(d)) 1219 v.AddArg(x) 1220 return true 1221 } 1222 // match: (ADDshiftRL [c] (SLLconst x [64-c]) x) 1223 // cond: 1224 // result: (RORconst [ c] x) 1225 for { 1226 c := v.AuxInt 1227 _ = v.Args[1] 1228 v_0 := v.Args[0] 1229 if v_0.Op != OpARM64SLLconst { 1230 break 1231 } 1232 if v_0.AuxInt != 64-c { 1233 break 1234 } 1235 x := v_0.Args[0] 1236 if x != v.Args[1] { 1237 break 1238 } 1239 v.reset(OpARM64RORconst) 1240 v.AuxInt = c 1241 v.AddArg(x) 1242 return true 1243 } 1244 // match: (ADDshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 1245 // cond: c < 32 && t.Size() == 4 1246 // result: (RORWconst [ c] x) 1247 for { 1248 t := v.Type 1249 c := v.AuxInt 1250 _ = v.Args[1] 1251 v_0 := v.Args[0] 1252 if v_0.Op != OpARM64SLLconst { 1253 break 1254 } 1255 if v_0.AuxInt != 32-c { 1256 break 1257 } 1258 x := v_0.Args[0] 1259 v_1 := v.Args[1] 1260 if v_1.Op != OpARM64MOVWUreg { 1261 break 1262 } 1263 if x != v_1.Args[0] { 1264 break 1265 } 1266 if !(c < 32 && t.Size() == 4) { 1267 break 1268 } 1269 v.reset(OpARM64RORWconst) 1270 v.AuxInt = c 1271 v.AddArg(x) 1272 return true 1273 } 1274 return false 1275 } 1276 func rewriteValueARM64_OpARM64AND_0(v *Value) bool { 1277 // match: (AND x (MOVDconst [c])) 1278 // cond: 1279 // result: (ANDconst [c] x) 1280 for { 1281 _ = v.Args[1] 1282 x := v.Args[0] 1283 v_1 := v.Args[1] 1284 if v_1.Op != OpARM64MOVDconst { 1285 break 1286 } 1287 c := v_1.AuxInt 1288 v.reset(OpARM64ANDconst) 1289 v.AuxInt = c 1290 v.AddArg(x) 1291 return true 1292 } 1293 // match: (AND (MOVDconst [c]) x) 1294 // cond: 1295 // result: (ANDconst [c] x) 1296 for { 1297 _ = v.Args[1] 1298 v_0 := v.Args[0] 1299 if v_0.Op != OpARM64MOVDconst { 1300 break 1301 } 1302 c := v_0.AuxInt 1303 x := v.Args[1] 1304 v.reset(OpARM64ANDconst) 1305 v.AuxInt = c 1306 v.AddArg(x) 1307 return true 1308 } 1309 // match: (AND x x) 1310 // cond: 1311 // result: x 1312 for { 1313 _ = v.Args[1] 1314 x := v.Args[0] 1315 if x != v.Args[1] { 1316 break 1317 } 1318 v.reset(OpCopy) 1319 v.Type = x.Type 1320 v.AddArg(x) 1321 return true 1322 } 1323 // match: (AND x (MVN y)) 1324 // cond: 1325 // result: (BIC x y) 1326 for { 1327 _ = v.Args[1] 1328 x := v.Args[0] 1329 v_1 := v.Args[1] 1330 if v_1.Op != OpARM64MVN { 1331 break 1332 } 1333 y := v_1.Args[0] 1334 v.reset(OpARM64BIC) 1335 v.AddArg(x) 1336 v.AddArg(y) 1337 return true 1338 } 1339 // match: (AND (MVN y) x) 1340 // cond: 1341 // result: (BIC x y) 1342 for { 1343 _ = v.Args[1] 1344 v_0 := v.Args[0] 1345 if v_0.Op != OpARM64MVN { 1346 break 1347 } 1348 y := v_0.Args[0] 1349 x := v.Args[1] 1350 v.reset(OpARM64BIC) 1351 v.AddArg(x) 1352 v.AddArg(y) 1353 return true 1354 } 1355 // match: (AND x0 x1:(SLLconst [c] y)) 1356 // cond: clobberIfDead(x1) 1357 // result: (ANDshiftLL x0 y [c]) 1358 for { 1359 _ = v.Args[1] 1360 x0 := v.Args[0] 1361 x1 := v.Args[1] 1362 if x1.Op != OpARM64SLLconst { 1363 break 1364 } 1365 c := x1.AuxInt 1366 y := x1.Args[0] 1367 if !(clobberIfDead(x1)) { 1368 break 1369 } 1370 v.reset(OpARM64ANDshiftLL) 1371 v.AuxInt = c 1372 v.AddArg(x0) 1373 v.AddArg(y) 1374 return true 1375 } 1376 // match: (AND x1:(SLLconst [c] y) x0) 1377 // cond: clobberIfDead(x1) 1378 // result: (ANDshiftLL x0 y [c]) 1379 for { 1380 _ = v.Args[1] 1381 x1 := v.Args[0] 1382 if x1.Op != OpARM64SLLconst { 1383 break 1384 } 1385 c := x1.AuxInt 1386 y := x1.Args[0] 1387 x0 := v.Args[1] 1388 if !(clobberIfDead(x1)) { 1389 break 1390 } 1391 v.reset(OpARM64ANDshiftLL) 1392 v.AuxInt = c 1393 v.AddArg(x0) 1394 v.AddArg(y) 1395 return true 1396 } 1397 // match: (AND x0 x1:(SRLconst [c] y)) 1398 // cond: clobberIfDead(x1) 1399 // result: (ANDshiftRL x0 y [c]) 1400 for { 1401 _ = v.Args[1] 1402 x0 := v.Args[0] 1403 x1 := v.Args[1] 1404 if x1.Op != OpARM64SRLconst { 1405 break 1406 } 1407 c := x1.AuxInt 1408 y := x1.Args[0] 1409 if !(clobberIfDead(x1)) { 1410 break 1411 } 1412 v.reset(OpARM64ANDshiftRL) 1413 v.AuxInt = c 1414 v.AddArg(x0) 1415 v.AddArg(y) 1416 return true 1417 } 1418 // match: (AND x1:(SRLconst [c] y) x0) 1419 // cond: clobberIfDead(x1) 1420 // result: (ANDshiftRL x0 y [c]) 1421 for { 1422 _ = v.Args[1] 1423 x1 := v.Args[0] 1424 if x1.Op != OpARM64SRLconst { 1425 break 1426 } 1427 c := x1.AuxInt 1428 y := x1.Args[0] 1429 x0 := v.Args[1] 1430 if !(clobberIfDead(x1)) { 1431 break 1432 } 1433 v.reset(OpARM64ANDshiftRL) 1434 v.AuxInt = c 1435 v.AddArg(x0) 1436 v.AddArg(y) 1437 return true 1438 } 1439 // match: (AND x0 x1:(SRAconst [c] y)) 1440 // cond: clobberIfDead(x1) 1441 // result: (ANDshiftRA x0 y [c]) 1442 for { 1443 _ = v.Args[1] 1444 x0 := v.Args[0] 1445 x1 := v.Args[1] 1446 if x1.Op != OpARM64SRAconst { 1447 break 1448 } 1449 c := x1.AuxInt 1450 y := x1.Args[0] 1451 if !(clobberIfDead(x1)) { 1452 break 1453 } 1454 v.reset(OpARM64ANDshiftRA) 1455 v.AuxInt = c 1456 v.AddArg(x0) 1457 v.AddArg(y) 1458 return true 1459 } 1460 return false 1461 } 1462 func rewriteValueARM64_OpARM64AND_10(v *Value) bool { 1463 // match: (AND x1:(SRAconst [c] y) x0) 1464 // cond: clobberIfDead(x1) 1465 // result: (ANDshiftRA x0 y [c]) 1466 for { 1467 _ = v.Args[1] 1468 x1 := v.Args[0] 1469 if x1.Op != OpARM64SRAconst { 1470 break 1471 } 1472 c := x1.AuxInt 1473 y := x1.Args[0] 1474 x0 := v.Args[1] 1475 if !(clobberIfDead(x1)) { 1476 break 1477 } 1478 v.reset(OpARM64ANDshiftRA) 1479 v.AuxInt = c 1480 v.AddArg(x0) 1481 v.AddArg(y) 1482 return true 1483 } 1484 return false 1485 } 1486 func rewriteValueARM64_OpARM64ANDconst_0(v *Value) bool { 1487 // match: (ANDconst [0] _) 1488 // cond: 1489 // result: (MOVDconst [0]) 1490 for { 1491 if v.AuxInt != 0 { 1492 break 1493 } 1494 v.reset(OpARM64MOVDconst) 1495 v.AuxInt = 0 1496 return true 1497 } 1498 // match: (ANDconst [-1] x) 1499 // cond: 1500 // result: x 1501 for { 1502 if v.AuxInt != -1 { 1503 break 1504 } 1505 x := v.Args[0] 1506 v.reset(OpCopy) 1507 v.Type = x.Type 1508 v.AddArg(x) 1509 return true 1510 } 1511 // match: (ANDconst [c] (MOVDconst [d])) 1512 // cond: 1513 // result: (MOVDconst [c&d]) 1514 for { 1515 c := v.AuxInt 1516 v_0 := v.Args[0] 1517 if v_0.Op != OpARM64MOVDconst { 1518 break 1519 } 1520 d := v_0.AuxInt 1521 v.reset(OpARM64MOVDconst) 1522 v.AuxInt = c & d 1523 return true 1524 } 1525 // match: (ANDconst [c] (ANDconst [d] x)) 1526 // cond: 1527 // result: (ANDconst [c&d] x) 1528 for { 1529 c := v.AuxInt 1530 v_0 := v.Args[0] 1531 if v_0.Op != OpARM64ANDconst { 1532 break 1533 } 1534 d := v_0.AuxInt 1535 x := v_0.Args[0] 1536 v.reset(OpARM64ANDconst) 1537 v.AuxInt = c & d 1538 v.AddArg(x) 1539 return true 1540 } 1541 return false 1542 } 1543 func rewriteValueARM64_OpARM64ANDshiftLL_0(v *Value) bool { 1544 b := v.Block 1545 _ = b 1546 // match: (ANDshiftLL (MOVDconst [c]) x [d]) 1547 // cond: 1548 // result: (ANDconst [c] (SLLconst <x.Type> x [d])) 1549 for { 1550 d := v.AuxInt 1551 _ = v.Args[1] 1552 v_0 := v.Args[0] 1553 if v_0.Op != OpARM64MOVDconst { 1554 break 1555 } 1556 c := v_0.AuxInt 1557 x := v.Args[1] 1558 v.reset(OpARM64ANDconst) 1559 v.AuxInt = c 1560 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 1561 v0.AuxInt = d 1562 v0.AddArg(x) 1563 v.AddArg(v0) 1564 return true 1565 } 1566 // match: (ANDshiftLL x (MOVDconst [c]) [d]) 1567 // cond: 1568 // result: (ANDconst x [int64(uint64(c)<<uint64(d))]) 1569 for { 1570 d := v.AuxInt 1571 _ = v.Args[1] 1572 x := v.Args[0] 1573 v_1 := v.Args[1] 1574 if v_1.Op != OpARM64MOVDconst { 1575 break 1576 } 1577 c := v_1.AuxInt 1578 v.reset(OpARM64ANDconst) 1579 v.AuxInt = int64(uint64(c) << uint64(d)) 1580 v.AddArg(x) 1581 return true 1582 } 1583 // match: (ANDshiftLL x y:(SLLconst x [c]) [d]) 1584 // cond: c==d 1585 // result: y 1586 for { 1587 d := v.AuxInt 1588 _ = v.Args[1] 1589 x := v.Args[0] 1590 y := v.Args[1] 1591 if y.Op != OpARM64SLLconst { 1592 break 1593 } 1594 c := y.AuxInt 1595 if x != y.Args[0] { 1596 break 1597 } 1598 if !(c == d) { 1599 break 1600 } 1601 v.reset(OpCopy) 1602 v.Type = y.Type 1603 v.AddArg(y) 1604 return true 1605 } 1606 return false 1607 } 1608 func rewriteValueARM64_OpARM64ANDshiftRA_0(v *Value) bool { 1609 b := v.Block 1610 _ = b 1611 // match: (ANDshiftRA (MOVDconst [c]) x [d]) 1612 // cond: 1613 // result: (ANDconst [c] (SRAconst <x.Type> x [d])) 1614 for { 1615 d := v.AuxInt 1616 _ = v.Args[1] 1617 v_0 := v.Args[0] 1618 if v_0.Op != OpARM64MOVDconst { 1619 break 1620 } 1621 c := v_0.AuxInt 1622 x := v.Args[1] 1623 v.reset(OpARM64ANDconst) 1624 v.AuxInt = c 1625 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 1626 v0.AuxInt = d 1627 v0.AddArg(x) 1628 v.AddArg(v0) 1629 return true 1630 } 1631 // match: (ANDshiftRA x (MOVDconst [c]) [d]) 1632 // cond: 1633 // result: (ANDconst x [int64(int64(c)>>uint64(d))]) 1634 for { 1635 d := v.AuxInt 1636 _ = v.Args[1] 1637 x := v.Args[0] 1638 v_1 := v.Args[1] 1639 if v_1.Op != OpARM64MOVDconst { 1640 break 1641 } 1642 c := v_1.AuxInt 1643 v.reset(OpARM64ANDconst) 1644 v.AuxInt = int64(int64(c) >> uint64(d)) 1645 v.AddArg(x) 1646 return true 1647 } 1648 // match: (ANDshiftRA x y:(SRAconst x [c]) [d]) 1649 // cond: c==d 1650 // result: y 1651 for { 1652 d := v.AuxInt 1653 _ = v.Args[1] 1654 x := v.Args[0] 1655 y := v.Args[1] 1656 if y.Op != OpARM64SRAconst { 1657 break 1658 } 1659 c := y.AuxInt 1660 if x != y.Args[0] { 1661 break 1662 } 1663 if !(c == d) { 1664 break 1665 } 1666 v.reset(OpCopy) 1667 v.Type = y.Type 1668 v.AddArg(y) 1669 return true 1670 } 1671 return false 1672 } 1673 func rewriteValueARM64_OpARM64ANDshiftRL_0(v *Value) bool { 1674 b := v.Block 1675 _ = b 1676 // match: (ANDshiftRL (MOVDconst [c]) x [d]) 1677 // cond: 1678 // result: (ANDconst [c] (SRLconst <x.Type> x [d])) 1679 for { 1680 d := v.AuxInt 1681 _ = v.Args[1] 1682 v_0 := v.Args[0] 1683 if v_0.Op != OpARM64MOVDconst { 1684 break 1685 } 1686 c := v_0.AuxInt 1687 x := v.Args[1] 1688 v.reset(OpARM64ANDconst) 1689 v.AuxInt = c 1690 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 1691 v0.AuxInt = d 1692 v0.AddArg(x) 1693 v.AddArg(v0) 1694 return true 1695 } 1696 // match: (ANDshiftRL x (MOVDconst [c]) [d]) 1697 // cond: 1698 // result: (ANDconst x [int64(uint64(c)>>uint64(d))]) 1699 for { 1700 d := v.AuxInt 1701 _ = v.Args[1] 1702 x := v.Args[0] 1703 v_1 := v.Args[1] 1704 if v_1.Op != OpARM64MOVDconst { 1705 break 1706 } 1707 c := v_1.AuxInt 1708 v.reset(OpARM64ANDconst) 1709 v.AuxInt = int64(uint64(c) >> uint64(d)) 1710 v.AddArg(x) 1711 return true 1712 } 1713 // match: (ANDshiftRL x y:(SRLconst x [c]) [d]) 1714 // cond: c==d 1715 // result: y 1716 for { 1717 d := v.AuxInt 1718 _ = v.Args[1] 1719 x := v.Args[0] 1720 y := v.Args[1] 1721 if y.Op != OpARM64SRLconst { 1722 break 1723 } 1724 c := y.AuxInt 1725 if x != y.Args[0] { 1726 break 1727 } 1728 if !(c == d) { 1729 break 1730 } 1731 v.reset(OpCopy) 1732 v.Type = y.Type 1733 v.AddArg(y) 1734 return true 1735 } 1736 return false 1737 } 1738 func rewriteValueARM64_OpARM64BIC_0(v *Value) bool { 1739 // match: (BIC x (MOVDconst [c])) 1740 // cond: 1741 // result: (BICconst [c] x) 1742 for { 1743 _ = v.Args[1] 1744 x := v.Args[0] 1745 v_1 := v.Args[1] 1746 if v_1.Op != OpARM64MOVDconst { 1747 break 1748 } 1749 c := v_1.AuxInt 1750 v.reset(OpARM64BICconst) 1751 v.AuxInt = c 1752 v.AddArg(x) 1753 return true 1754 } 1755 // match: (BIC x x) 1756 // cond: 1757 // result: (MOVDconst [0]) 1758 for { 1759 _ = v.Args[1] 1760 x := v.Args[0] 1761 if x != v.Args[1] { 1762 break 1763 } 1764 v.reset(OpARM64MOVDconst) 1765 v.AuxInt = 0 1766 return true 1767 } 1768 // match: (BIC x0 x1:(SLLconst [c] y)) 1769 // cond: clobberIfDead(x1) 1770 // result: (BICshiftLL x0 y [c]) 1771 for { 1772 _ = v.Args[1] 1773 x0 := v.Args[0] 1774 x1 := v.Args[1] 1775 if x1.Op != OpARM64SLLconst { 1776 break 1777 } 1778 c := x1.AuxInt 1779 y := x1.Args[0] 1780 if !(clobberIfDead(x1)) { 1781 break 1782 } 1783 v.reset(OpARM64BICshiftLL) 1784 v.AuxInt = c 1785 v.AddArg(x0) 1786 v.AddArg(y) 1787 return true 1788 } 1789 // match: (BIC x0 x1:(SRLconst [c] y)) 1790 // cond: clobberIfDead(x1) 1791 // result: (BICshiftRL x0 y [c]) 1792 for { 1793 _ = v.Args[1] 1794 x0 := v.Args[0] 1795 x1 := v.Args[1] 1796 if x1.Op != OpARM64SRLconst { 1797 break 1798 } 1799 c := x1.AuxInt 1800 y := x1.Args[0] 1801 if !(clobberIfDead(x1)) { 1802 break 1803 } 1804 v.reset(OpARM64BICshiftRL) 1805 v.AuxInt = c 1806 v.AddArg(x0) 1807 v.AddArg(y) 1808 return true 1809 } 1810 // match: (BIC x0 x1:(SRAconst [c] y)) 1811 // cond: clobberIfDead(x1) 1812 // result: (BICshiftRA x0 y [c]) 1813 for { 1814 _ = v.Args[1] 1815 x0 := v.Args[0] 1816 x1 := v.Args[1] 1817 if x1.Op != OpARM64SRAconst { 1818 break 1819 } 1820 c := x1.AuxInt 1821 y := x1.Args[0] 1822 if !(clobberIfDead(x1)) { 1823 break 1824 } 1825 v.reset(OpARM64BICshiftRA) 1826 v.AuxInt = c 1827 v.AddArg(x0) 1828 v.AddArg(y) 1829 return true 1830 } 1831 return false 1832 } 1833 func rewriteValueARM64_OpARM64BICconst_0(v *Value) bool { 1834 // match: (BICconst [0] x) 1835 // cond: 1836 // result: x 1837 for { 1838 if v.AuxInt != 0 { 1839 break 1840 } 1841 x := v.Args[0] 1842 v.reset(OpCopy) 1843 v.Type = x.Type 1844 v.AddArg(x) 1845 return true 1846 } 1847 // match: (BICconst [-1] _) 1848 // cond: 1849 // result: (MOVDconst [0]) 1850 for { 1851 if v.AuxInt != -1 { 1852 break 1853 } 1854 v.reset(OpARM64MOVDconst) 1855 v.AuxInt = 0 1856 return true 1857 } 1858 // match: (BICconst [c] (MOVDconst [d])) 1859 // cond: 1860 // result: (MOVDconst [d&^c]) 1861 for { 1862 c := v.AuxInt 1863 v_0 := v.Args[0] 1864 if v_0.Op != OpARM64MOVDconst { 1865 break 1866 } 1867 d := v_0.AuxInt 1868 v.reset(OpARM64MOVDconst) 1869 v.AuxInt = d &^ c 1870 return true 1871 } 1872 return false 1873 } 1874 func rewriteValueARM64_OpARM64BICshiftLL_0(v *Value) bool { 1875 // match: (BICshiftLL x (MOVDconst [c]) [d]) 1876 // cond: 1877 // result: (BICconst x [int64(uint64(c)<<uint64(d))]) 1878 for { 1879 d := v.AuxInt 1880 _ = v.Args[1] 1881 x := v.Args[0] 1882 v_1 := v.Args[1] 1883 if v_1.Op != OpARM64MOVDconst { 1884 break 1885 } 1886 c := v_1.AuxInt 1887 v.reset(OpARM64BICconst) 1888 v.AuxInt = int64(uint64(c) << uint64(d)) 1889 v.AddArg(x) 1890 return true 1891 } 1892 // match: (BICshiftLL x (SLLconst x [c]) [d]) 1893 // cond: c==d 1894 // result: (MOVDconst [0]) 1895 for { 1896 d := v.AuxInt 1897 _ = v.Args[1] 1898 x := v.Args[0] 1899 v_1 := v.Args[1] 1900 if v_1.Op != OpARM64SLLconst { 1901 break 1902 } 1903 c := v_1.AuxInt 1904 if x != v_1.Args[0] { 1905 break 1906 } 1907 if !(c == d) { 1908 break 1909 } 1910 v.reset(OpARM64MOVDconst) 1911 v.AuxInt = 0 1912 return true 1913 } 1914 return false 1915 } 1916 func rewriteValueARM64_OpARM64BICshiftRA_0(v *Value) bool { 1917 // match: (BICshiftRA x (MOVDconst [c]) [d]) 1918 // cond: 1919 // result: (BICconst x [int64(int64(c)>>uint64(d))]) 1920 for { 1921 d := v.AuxInt 1922 _ = v.Args[1] 1923 x := v.Args[0] 1924 v_1 := v.Args[1] 1925 if v_1.Op != OpARM64MOVDconst { 1926 break 1927 } 1928 c := v_1.AuxInt 1929 v.reset(OpARM64BICconst) 1930 v.AuxInt = int64(int64(c) >> uint64(d)) 1931 v.AddArg(x) 1932 return true 1933 } 1934 // match: (BICshiftRA x (SRAconst x [c]) [d]) 1935 // cond: c==d 1936 // result: (MOVDconst [0]) 1937 for { 1938 d := v.AuxInt 1939 _ = v.Args[1] 1940 x := v.Args[0] 1941 v_1 := v.Args[1] 1942 if v_1.Op != OpARM64SRAconst { 1943 break 1944 } 1945 c := v_1.AuxInt 1946 if x != v_1.Args[0] { 1947 break 1948 } 1949 if !(c == d) { 1950 break 1951 } 1952 v.reset(OpARM64MOVDconst) 1953 v.AuxInt = 0 1954 return true 1955 } 1956 return false 1957 } 1958 func rewriteValueARM64_OpARM64BICshiftRL_0(v *Value) bool { 1959 // match: (BICshiftRL x (MOVDconst [c]) [d]) 1960 // cond: 1961 // result: (BICconst x [int64(uint64(c)>>uint64(d))]) 1962 for { 1963 d := v.AuxInt 1964 _ = v.Args[1] 1965 x := v.Args[0] 1966 v_1 := v.Args[1] 1967 if v_1.Op != OpARM64MOVDconst { 1968 break 1969 } 1970 c := v_1.AuxInt 1971 v.reset(OpARM64BICconst) 1972 v.AuxInt = int64(uint64(c) >> uint64(d)) 1973 v.AddArg(x) 1974 return true 1975 } 1976 // match: (BICshiftRL x (SRLconst x [c]) [d]) 1977 // cond: c==d 1978 // result: (MOVDconst [0]) 1979 for { 1980 d := v.AuxInt 1981 _ = v.Args[1] 1982 x := v.Args[0] 1983 v_1 := v.Args[1] 1984 if v_1.Op != OpARM64SRLconst { 1985 break 1986 } 1987 c := v_1.AuxInt 1988 if x != v_1.Args[0] { 1989 break 1990 } 1991 if !(c == d) { 1992 break 1993 } 1994 v.reset(OpARM64MOVDconst) 1995 v.AuxInt = 0 1996 return true 1997 } 1998 return false 1999 } 2000 func rewriteValueARM64_OpARM64CMP_0(v *Value) bool { 2001 b := v.Block 2002 _ = b 2003 // match: (CMP x (MOVDconst [c])) 2004 // cond: 2005 // result: (CMPconst [c] x) 2006 for { 2007 _ = v.Args[1] 2008 x := v.Args[0] 2009 v_1 := v.Args[1] 2010 if v_1.Op != OpARM64MOVDconst { 2011 break 2012 } 2013 c := v_1.AuxInt 2014 v.reset(OpARM64CMPconst) 2015 v.AuxInt = c 2016 v.AddArg(x) 2017 return true 2018 } 2019 // match: (CMP (MOVDconst [c]) x) 2020 // cond: 2021 // result: (InvertFlags (CMPconst [c] x)) 2022 for { 2023 _ = v.Args[1] 2024 v_0 := v.Args[0] 2025 if v_0.Op != OpARM64MOVDconst { 2026 break 2027 } 2028 c := v_0.AuxInt 2029 x := v.Args[1] 2030 v.reset(OpARM64InvertFlags) 2031 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 2032 v0.AuxInt = c 2033 v0.AddArg(x) 2034 v.AddArg(v0) 2035 return true 2036 } 2037 // match: (CMP x0 x1:(SLLconst [c] y)) 2038 // cond: clobberIfDead(x1) 2039 // result: (CMPshiftLL x0 y [c]) 2040 for { 2041 _ = v.Args[1] 2042 x0 := v.Args[0] 2043 x1 := v.Args[1] 2044 if x1.Op != OpARM64SLLconst { 2045 break 2046 } 2047 c := x1.AuxInt 2048 y := x1.Args[0] 2049 if !(clobberIfDead(x1)) { 2050 break 2051 } 2052 v.reset(OpARM64CMPshiftLL) 2053 v.AuxInt = c 2054 v.AddArg(x0) 2055 v.AddArg(y) 2056 return true 2057 } 2058 // match: (CMP x0:(SLLconst [c] y) x1) 2059 // cond: clobberIfDead(x0) 2060 // result: (InvertFlags (CMPshiftLL x1 y [c])) 2061 for { 2062 _ = v.Args[1] 2063 x0 := v.Args[0] 2064 if x0.Op != OpARM64SLLconst { 2065 break 2066 } 2067 c := x0.AuxInt 2068 y := x0.Args[0] 2069 x1 := v.Args[1] 2070 if !(clobberIfDead(x0)) { 2071 break 2072 } 2073 v.reset(OpARM64InvertFlags) 2074 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftLL, types.TypeFlags) 2075 v0.AuxInt = c 2076 v0.AddArg(x1) 2077 v0.AddArg(y) 2078 v.AddArg(v0) 2079 return true 2080 } 2081 // match: (CMP x0 x1:(SRLconst [c] y)) 2082 // cond: clobberIfDead(x1) 2083 // result: (CMPshiftRL x0 y [c]) 2084 for { 2085 _ = v.Args[1] 2086 x0 := v.Args[0] 2087 x1 := v.Args[1] 2088 if x1.Op != OpARM64SRLconst { 2089 break 2090 } 2091 c := x1.AuxInt 2092 y := x1.Args[0] 2093 if !(clobberIfDead(x1)) { 2094 break 2095 } 2096 v.reset(OpARM64CMPshiftRL) 2097 v.AuxInt = c 2098 v.AddArg(x0) 2099 v.AddArg(y) 2100 return true 2101 } 2102 // match: (CMP x0:(SRLconst [c] y) x1) 2103 // cond: clobberIfDead(x0) 2104 // result: (InvertFlags (CMPshiftRL x1 y [c])) 2105 for { 2106 _ = v.Args[1] 2107 x0 := v.Args[0] 2108 if x0.Op != OpARM64SRLconst { 2109 break 2110 } 2111 c := x0.AuxInt 2112 y := x0.Args[0] 2113 x1 := v.Args[1] 2114 if !(clobberIfDead(x0)) { 2115 break 2116 } 2117 v.reset(OpARM64InvertFlags) 2118 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRL, types.TypeFlags) 2119 v0.AuxInt = c 2120 v0.AddArg(x1) 2121 v0.AddArg(y) 2122 v.AddArg(v0) 2123 return true 2124 } 2125 // match: (CMP x0 x1:(SRAconst [c] y)) 2126 // cond: clobberIfDead(x1) 2127 // result: (CMPshiftRA x0 y [c]) 2128 for { 2129 _ = v.Args[1] 2130 x0 := v.Args[0] 2131 x1 := v.Args[1] 2132 if x1.Op != OpARM64SRAconst { 2133 break 2134 } 2135 c := x1.AuxInt 2136 y := x1.Args[0] 2137 if !(clobberIfDead(x1)) { 2138 break 2139 } 2140 v.reset(OpARM64CMPshiftRA) 2141 v.AuxInt = c 2142 v.AddArg(x0) 2143 v.AddArg(y) 2144 return true 2145 } 2146 // match: (CMP x0:(SRAconst [c] y) x1) 2147 // cond: clobberIfDead(x0) 2148 // result: (InvertFlags (CMPshiftRA x1 y [c])) 2149 for { 2150 _ = v.Args[1] 2151 x0 := v.Args[0] 2152 if x0.Op != OpARM64SRAconst { 2153 break 2154 } 2155 c := x0.AuxInt 2156 y := x0.Args[0] 2157 x1 := v.Args[1] 2158 if !(clobberIfDead(x0)) { 2159 break 2160 } 2161 v.reset(OpARM64InvertFlags) 2162 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRA, types.TypeFlags) 2163 v0.AuxInt = c 2164 v0.AddArg(x1) 2165 v0.AddArg(y) 2166 v.AddArg(v0) 2167 return true 2168 } 2169 return false 2170 } 2171 func rewriteValueARM64_OpARM64CMPW_0(v *Value) bool { 2172 b := v.Block 2173 _ = b 2174 // match: (CMPW x (MOVDconst [c])) 2175 // cond: 2176 // result: (CMPWconst [int64(int32(c))] x) 2177 for { 2178 _ = v.Args[1] 2179 x := v.Args[0] 2180 v_1 := v.Args[1] 2181 if v_1.Op != OpARM64MOVDconst { 2182 break 2183 } 2184 c := v_1.AuxInt 2185 v.reset(OpARM64CMPWconst) 2186 v.AuxInt = int64(int32(c)) 2187 v.AddArg(x) 2188 return true 2189 } 2190 // match: (CMPW (MOVDconst [c]) x) 2191 // cond: 2192 // result: (InvertFlags (CMPWconst [int64(int32(c))] x)) 2193 for { 2194 _ = v.Args[1] 2195 v_0 := v.Args[0] 2196 if v_0.Op != OpARM64MOVDconst { 2197 break 2198 } 2199 c := v_0.AuxInt 2200 x := v.Args[1] 2201 v.reset(OpARM64InvertFlags) 2202 v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags) 2203 v0.AuxInt = int64(int32(c)) 2204 v0.AddArg(x) 2205 v.AddArg(v0) 2206 return true 2207 } 2208 return false 2209 } 2210 func rewriteValueARM64_OpARM64CMPWconst_0(v *Value) bool { 2211 // match: (CMPWconst (MOVDconst [x]) [y]) 2212 // cond: int32(x)==int32(y) 2213 // result: (FlagEQ) 2214 for { 2215 y := v.AuxInt 2216 v_0 := v.Args[0] 2217 if v_0.Op != OpARM64MOVDconst { 2218 break 2219 } 2220 x := v_0.AuxInt 2221 if !(int32(x) == int32(y)) { 2222 break 2223 } 2224 v.reset(OpARM64FlagEQ) 2225 return true 2226 } 2227 // match: (CMPWconst (MOVDconst [x]) [y]) 2228 // cond: int32(x)<int32(y) && uint32(x)<uint32(y) 2229 // result: (FlagLT_ULT) 2230 for { 2231 y := v.AuxInt 2232 v_0 := v.Args[0] 2233 if v_0.Op != OpARM64MOVDconst { 2234 break 2235 } 2236 x := v_0.AuxInt 2237 if !(int32(x) < int32(y) && uint32(x) < uint32(y)) { 2238 break 2239 } 2240 v.reset(OpARM64FlagLT_ULT) 2241 return true 2242 } 2243 // match: (CMPWconst (MOVDconst [x]) [y]) 2244 // cond: int32(x)<int32(y) && uint32(x)>uint32(y) 2245 // result: (FlagLT_UGT) 2246 for { 2247 y := v.AuxInt 2248 v_0 := v.Args[0] 2249 if v_0.Op != OpARM64MOVDconst { 2250 break 2251 } 2252 x := v_0.AuxInt 2253 if !(int32(x) < int32(y) && uint32(x) > uint32(y)) { 2254 break 2255 } 2256 v.reset(OpARM64FlagLT_UGT) 2257 return true 2258 } 2259 // match: (CMPWconst (MOVDconst [x]) [y]) 2260 // cond: int32(x)>int32(y) && uint32(x)<uint32(y) 2261 // result: (FlagGT_ULT) 2262 for { 2263 y := v.AuxInt 2264 v_0 := v.Args[0] 2265 if v_0.Op != OpARM64MOVDconst { 2266 break 2267 } 2268 x := v_0.AuxInt 2269 if !(int32(x) > int32(y) && uint32(x) < uint32(y)) { 2270 break 2271 } 2272 v.reset(OpARM64FlagGT_ULT) 2273 return true 2274 } 2275 // match: (CMPWconst (MOVDconst [x]) [y]) 2276 // cond: int32(x)>int32(y) && uint32(x)>uint32(y) 2277 // result: (FlagGT_UGT) 2278 for { 2279 y := v.AuxInt 2280 v_0 := v.Args[0] 2281 if v_0.Op != OpARM64MOVDconst { 2282 break 2283 } 2284 x := v_0.AuxInt 2285 if !(int32(x) > int32(y) && uint32(x) > uint32(y)) { 2286 break 2287 } 2288 v.reset(OpARM64FlagGT_UGT) 2289 return true 2290 } 2291 // match: (CMPWconst (MOVBUreg _) [c]) 2292 // cond: 0xff < int32(c) 2293 // result: (FlagLT_ULT) 2294 for { 2295 c := v.AuxInt 2296 v_0 := v.Args[0] 2297 if v_0.Op != OpARM64MOVBUreg { 2298 break 2299 } 2300 if !(0xff < int32(c)) { 2301 break 2302 } 2303 v.reset(OpARM64FlagLT_ULT) 2304 return true 2305 } 2306 // match: (CMPWconst (MOVHUreg _) [c]) 2307 // cond: 0xffff < int32(c) 2308 // result: (FlagLT_ULT) 2309 for { 2310 c := v.AuxInt 2311 v_0 := v.Args[0] 2312 if v_0.Op != OpARM64MOVHUreg { 2313 break 2314 } 2315 if !(0xffff < int32(c)) { 2316 break 2317 } 2318 v.reset(OpARM64FlagLT_ULT) 2319 return true 2320 } 2321 return false 2322 } 2323 func rewriteValueARM64_OpARM64CMPconst_0(v *Value) bool { 2324 // match: (CMPconst (MOVDconst [x]) [y]) 2325 // cond: x==y 2326 // result: (FlagEQ) 2327 for { 2328 y := v.AuxInt 2329 v_0 := v.Args[0] 2330 if v_0.Op != OpARM64MOVDconst { 2331 break 2332 } 2333 x := v_0.AuxInt 2334 if !(x == y) { 2335 break 2336 } 2337 v.reset(OpARM64FlagEQ) 2338 return true 2339 } 2340 // match: (CMPconst (MOVDconst [x]) [y]) 2341 // cond: int64(x)<int64(y) && uint64(x)<uint64(y) 2342 // result: (FlagLT_ULT) 2343 for { 2344 y := v.AuxInt 2345 v_0 := v.Args[0] 2346 if v_0.Op != OpARM64MOVDconst { 2347 break 2348 } 2349 x := v_0.AuxInt 2350 if !(int64(x) < int64(y) && uint64(x) < uint64(y)) { 2351 break 2352 } 2353 v.reset(OpARM64FlagLT_ULT) 2354 return true 2355 } 2356 // match: (CMPconst (MOVDconst [x]) [y]) 2357 // cond: int64(x)<int64(y) && uint64(x)>uint64(y) 2358 // result: (FlagLT_UGT) 2359 for { 2360 y := v.AuxInt 2361 v_0 := v.Args[0] 2362 if v_0.Op != OpARM64MOVDconst { 2363 break 2364 } 2365 x := v_0.AuxInt 2366 if !(int64(x) < int64(y) && uint64(x) > uint64(y)) { 2367 break 2368 } 2369 v.reset(OpARM64FlagLT_UGT) 2370 return true 2371 } 2372 // match: (CMPconst (MOVDconst [x]) [y]) 2373 // cond: int64(x)>int64(y) && uint64(x)<uint64(y) 2374 // result: (FlagGT_ULT) 2375 for { 2376 y := v.AuxInt 2377 v_0 := v.Args[0] 2378 if v_0.Op != OpARM64MOVDconst { 2379 break 2380 } 2381 x := v_0.AuxInt 2382 if !(int64(x) > int64(y) && uint64(x) < uint64(y)) { 2383 break 2384 } 2385 v.reset(OpARM64FlagGT_ULT) 2386 return true 2387 } 2388 // match: (CMPconst (MOVDconst [x]) [y]) 2389 // cond: int64(x)>int64(y) && uint64(x)>uint64(y) 2390 // result: (FlagGT_UGT) 2391 for { 2392 y := v.AuxInt 2393 v_0 := v.Args[0] 2394 if v_0.Op != OpARM64MOVDconst { 2395 break 2396 } 2397 x := v_0.AuxInt 2398 if !(int64(x) > int64(y) && uint64(x) > uint64(y)) { 2399 break 2400 } 2401 v.reset(OpARM64FlagGT_UGT) 2402 return true 2403 } 2404 // match: (CMPconst (MOVBUreg _) [c]) 2405 // cond: 0xff < c 2406 // result: (FlagLT_ULT) 2407 for { 2408 c := v.AuxInt 2409 v_0 := v.Args[0] 2410 if v_0.Op != OpARM64MOVBUreg { 2411 break 2412 } 2413 if !(0xff < c) { 2414 break 2415 } 2416 v.reset(OpARM64FlagLT_ULT) 2417 return true 2418 } 2419 // match: (CMPconst (MOVHUreg _) [c]) 2420 // cond: 0xffff < c 2421 // result: (FlagLT_ULT) 2422 for { 2423 c := v.AuxInt 2424 v_0 := v.Args[0] 2425 if v_0.Op != OpARM64MOVHUreg { 2426 break 2427 } 2428 if !(0xffff < c) { 2429 break 2430 } 2431 v.reset(OpARM64FlagLT_ULT) 2432 return true 2433 } 2434 // match: (CMPconst (MOVWUreg _) [c]) 2435 // cond: 0xffffffff < c 2436 // result: (FlagLT_ULT) 2437 for { 2438 c := v.AuxInt 2439 v_0 := v.Args[0] 2440 if v_0.Op != OpARM64MOVWUreg { 2441 break 2442 } 2443 if !(0xffffffff < c) { 2444 break 2445 } 2446 v.reset(OpARM64FlagLT_ULT) 2447 return true 2448 } 2449 // match: (CMPconst (ANDconst _ [m]) [n]) 2450 // cond: 0 <= m && m < n 2451 // result: (FlagLT_ULT) 2452 for { 2453 n := v.AuxInt 2454 v_0 := v.Args[0] 2455 if v_0.Op != OpARM64ANDconst { 2456 break 2457 } 2458 m := v_0.AuxInt 2459 if !(0 <= m && m < n) { 2460 break 2461 } 2462 v.reset(OpARM64FlagLT_ULT) 2463 return true 2464 } 2465 // match: (CMPconst (SRLconst _ [c]) [n]) 2466 // cond: 0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n) 2467 // result: (FlagLT_ULT) 2468 for { 2469 n := v.AuxInt 2470 v_0 := v.Args[0] 2471 if v_0.Op != OpARM64SRLconst { 2472 break 2473 } 2474 c := v_0.AuxInt 2475 if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) { 2476 break 2477 } 2478 v.reset(OpARM64FlagLT_ULT) 2479 return true 2480 } 2481 return false 2482 } 2483 func rewriteValueARM64_OpARM64CMPshiftLL_0(v *Value) bool { 2484 b := v.Block 2485 _ = b 2486 // match: (CMPshiftLL (MOVDconst [c]) x [d]) 2487 // cond: 2488 // result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d]))) 2489 for { 2490 d := v.AuxInt 2491 _ = v.Args[1] 2492 v_0 := v.Args[0] 2493 if v_0.Op != OpARM64MOVDconst { 2494 break 2495 } 2496 c := v_0.AuxInt 2497 x := v.Args[1] 2498 v.reset(OpARM64InvertFlags) 2499 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 2500 v0.AuxInt = c 2501 v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 2502 v1.AuxInt = d 2503 v1.AddArg(x) 2504 v0.AddArg(v1) 2505 v.AddArg(v0) 2506 return true 2507 } 2508 // match: (CMPshiftLL x (MOVDconst [c]) [d]) 2509 // cond: 2510 // result: (CMPconst x [int64(uint64(c)<<uint64(d))]) 2511 for { 2512 d := v.AuxInt 2513 _ = v.Args[1] 2514 x := v.Args[0] 2515 v_1 := v.Args[1] 2516 if v_1.Op != OpARM64MOVDconst { 2517 break 2518 } 2519 c := v_1.AuxInt 2520 v.reset(OpARM64CMPconst) 2521 v.AuxInt = int64(uint64(c) << uint64(d)) 2522 v.AddArg(x) 2523 return true 2524 } 2525 return false 2526 } 2527 func rewriteValueARM64_OpARM64CMPshiftRA_0(v *Value) bool { 2528 b := v.Block 2529 _ = b 2530 // match: (CMPshiftRA (MOVDconst [c]) x [d]) 2531 // cond: 2532 // result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d]))) 2533 for { 2534 d := v.AuxInt 2535 _ = v.Args[1] 2536 v_0 := v.Args[0] 2537 if v_0.Op != OpARM64MOVDconst { 2538 break 2539 } 2540 c := v_0.AuxInt 2541 x := v.Args[1] 2542 v.reset(OpARM64InvertFlags) 2543 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 2544 v0.AuxInt = c 2545 v1 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 2546 v1.AuxInt = d 2547 v1.AddArg(x) 2548 v0.AddArg(v1) 2549 v.AddArg(v0) 2550 return true 2551 } 2552 // match: (CMPshiftRA x (MOVDconst [c]) [d]) 2553 // cond: 2554 // result: (CMPconst x [int64(int64(c)>>uint64(d))]) 2555 for { 2556 d := v.AuxInt 2557 _ = v.Args[1] 2558 x := v.Args[0] 2559 v_1 := v.Args[1] 2560 if v_1.Op != OpARM64MOVDconst { 2561 break 2562 } 2563 c := v_1.AuxInt 2564 v.reset(OpARM64CMPconst) 2565 v.AuxInt = int64(int64(c) >> uint64(d)) 2566 v.AddArg(x) 2567 return true 2568 } 2569 return false 2570 } 2571 func rewriteValueARM64_OpARM64CMPshiftRL_0(v *Value) bool { 2572 b := v.Block 2573 _ = b 2574 // match: (CMPshiftRL (MOVDconst [c]) x [d]) 2575 // cond: 2576 // result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d]))) 2577 for { 2578 d := v.AuxInt 2579 _ = v.Args[1] 2580 v_0 := v.Args[0] 2581 if v_0.Op != OpARM64MOVDconst { 2582 break 2583 } 2584 c := v_0.AuxInt 2585 x := v.Args[1] 2586 v.reset(OpARM64InvertFlags) 2587 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 2588 v0.AuxInt = c 2589 v1 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 2590 v1.AuxInt = d 2591 v1.AddArg(x) 2592 v0.AddArg(v1) 2593 v.AddArg(v0) 2594 return true 2595 } 2596 // match: (CMPshiftRL x (MOVDconst [c]) [d]) 2597 // cond: 2598 // result: (CMPconst x [int64(uint64(c)>>uint64(d))]) 2599 for { 2600 d := v.AuxInt 2601 _ = v.Args[1] 2602 x := v.Args[0] 2603 v_1 := v.Args[1] 2604 if v_1.Op != OpARM64MOVDconst { 2605 break 2606 } 2607 c := v_1.AuxInt 2608 v.reset(OpARM64CMPconst) 2609 v.AuxInt = int64(uint64(c) >> uint64(d)) 2610 v.AddArg(x) 2611 return true 2612 } 2613 return false 2614 } 2615 func rewriteValueARM64_OpARM64CSEL_0(v *Value) bool { 2616 // match: (CSEL {cc} x (MOVDconst [0]) flag) 2617 // cond: 2618 // result: (CSEL0 {cc} x flag) 2619 for { 2620 cc := v.Aux 2621 _ = v.Args[2] 2622 x := v.Args[0] 2623 v_1 := v.Args[1] 2624 if v_1.Op != OpARM64MOVDconst { 2625 break 2626 } 2627 if v_1.AuxInt != 0 { 2628 break 2629 } 2630 flag := v.Args[2] 2631 v.reset(OpARM64CSEL0) 2632 v.Aux = cc 2633 v.AddArg(x) 2634 v.AddArg(flag) 2635 return true 2636 } 2637 // match: (CSEL {cc} (MOVDconst [0]) y flag) 2638 // cond: 2639 // result: (CSEL0 {arm64Negate(cc.(Op))} y flag) 2640 for { 2641 cc := v.Aux 2642 _ = v.Args[2] 2643 v_0 := v.Args[0] 2644 if v_0.Op != OpARM64MOVDconst { 2645 break 2646 } 2647 if v_0.AuxInt != 0 { 2648 break 2649 } 2650 y := v.Args[1] 2651 flag := v.Args[2] 2652 v.reset(OpARM64CSEL0) 2653 v.Aux = arm64Negate(cc.(Op)) 2654 v.AddArg(y) 2655 v.AddArg(flag) 2656 return true 2657 } 2658 // match: (CSEL {cc} x y (InvertFlags cmp)) 2659 // cond: 2660 // result: (CSEL {arm64Invert(cc.(Op))} x y cmp) 2661 for { 2662 cc := v.Aux 2663 _ = v.Args[2] 2664 x := v.Args[0] 2665 y := v.Args[1] 2666 v_2 := v.Args[2] 2667 if v_2.Op != OpARM64InvertFlags { 2668 break 2669 } 2670 cmp := v_2.Args[0] 2671 v.reset(OpARM64CSEL) 2672 v.Aux = arm64Invert(cc.(Op)) 2673 v.AddArg(x) 2674 v.AddArg(y) 2675 v.AddArg(cmp) 2676 return true 2677 } 2678 // match: (CSEL {cc} x _ flag) 2679 // cond: ccARM64Eval(cc, flag) > 0 2680 // result: x 2681 for { 2682 cc := v.Aux 2683 _ = v.Args[2] 2684 x := v.Args[0] 2685 flag := v.Args[2] 2686 if !(ccARM64Eval(cc, flag) > 0) { 2687 break 2688 } 2689 v.reset(OpCopy) 2690 v.Type = x.Type 2691 v.AddArg(x) 2692 return true 2693 } 2694 // match: (CSEL {cc} _ y flag) 2695 // cond: ccARM64Eval(cc, flag) < 0 2696 // result: y 2697 for { 2698 cc := v.Aux 2699 _ = v.Args[2] 2700 y := v.Args[1] 2701 flag := v.Args[2] 2702 if !(ccARM64Eval(cc, flag) < 0) { 2703 break 2704 } 2705 v.reset(OpCopy) 2706 v.Type = y.Type 2707 v.AddArg(y) 2708 return true 2709 } 2710 // match: (CSEL {cc} x y (CMPWconst [0] bool)) 2711 // cond: cc.(Op) == OpARM64NotEqual && flagArg(bool) != nil 2712 // result: (CSEL {bool.Op} x y flagArg(bool)) 2713 for { 2714 cc := v.Aux 2715 _ = v.Args[2] 2716 x := v.Args[0] 2717 y := v.Args[1] 2718 v_2 := v.Args[2] 2719 if v_2.Op != OpARM64CMPWconst { 2720 break 2721 } 2722 if v_2.AuxInt != 0 { 2723 break 2724 } 2725 bool := v_2.Args[0] 2726 if !(cc.(Op) == OpARM64NotEqual && flagArg(bool) != nil) { 2727 break 2728 } 2729 v.reset(OpARM64CSEL) 2730 v.Aux = bool.Op 2731 v.AddArg(x) 2732 v.AddArg(y) 2733 v.AddArg(flagArg(bool)) 2734 return true 2735 } 2736 // match: (CSEL {cc} x y (CMPWconst [0] bool)) 2737 // cond: cc.(Op) == OpARM64Equal && flagArg(bool) != nil 2738 // result: (CSEL {arm64Negate(bool.Op)} x y flagArg(bool)) 2739 for { 2740 cc := v.Aux 2741 _ = v.Args[2] 2742 x := v.Args[0] 2743 y := v.Args[1] 2744 v_2 := v.Args[2] 2745 if v_2.Op != OpARM64CMPWconst { 2746 break 2747 } 2748 if v_2.AuxInt != 0 { 2749 break 2750 } 2751 bool := v_2.Args[0] 2752 if !(cc.(Op) == OpARM64Equal && flagArg(bool) != nil) { 2753 break 2754 } 2755 v.reset(OpARM64CSEL) 2756 v.Aux = arm64Negate(bool.Op) 2757 v.AddArg(x) 2758 v.AddArg(y) 2759 v.AddArg(flagArg(bool)) 2760 return true 2761 } 2762 return false 2763 } 2764 func rewriteValueARM64_OpARM64CSEL0_0(v *Value) bool { 2765 // match: (CSEL0 {cc} x (InvertFlags cmp)) 2766 // cond: 2767 // result: (CSEL0 {arm64Invert(cc.(Op))} x cmp) 2768 for { 2769 cc := v.Aux 2770 _ = v.Args[1] 2771 x := v.Args[0] 2772 v_1 := v.Args[1] 2773 if v_1.Op != OpARM64InvertFlags { 2774 break 2775 } 2776 cmp := v_1.Args[0] 2777 v.reset(OpARM64CSEL0) 2778 v.Aux = arm64Invert(cc.(Op)) 2779 v.AddArg(x) 2780 v.AddArg(cmp) 2781 return true 2782 } 2783 // match: (CSEL0 {cc} x flag) 2784 // cond: ccARM64Eval(cc, flag) > 0 2785 // result: x 2786 for { 2787 cc := v.Aux 2788 _ = v.Args[1] 2789 x := v.Args[0] 2790 flag := v.Args[1] 2791 if !(ccARM64Eval(cc, flag) > 0) { 2792 break 2793 } 2794 v.reset(OpCopy) 2795 v.Type = x.Type 2796 v.AddArg(x) 2797 return true 2798 } 2799 // match: (CSEL0 {cc} _ flag) 2800 // cond: ccARM64Eval(cc, flag) < 0 2801 // result: (MOVDconst [0]) 2802 for { 2803 cc := v.Aux 2804 _ = v.Args[1] 2805 flag := v.Args[1] 2806 if !(ccARM64Eval(cc, flag) < 0) { 2807 break 2808 } 2809 v.reset(OpARM64MOVDconst) 2810 v.AuxInt = 0 2811 return true 2812 } 2813 // match: (CSEL0 {cc} x (CMPWconst [0] bool)) 2814 // cond: cc.(Op) == OpARM64NotEqual && flagArg(bool) != nil 2815 // result: (CSEL0 {bool.Op} x flagArg(bool)) 2816 for { 2817 cc := v.Aux 2818 _ = v.Args[1] 2819 x := v.Args[0] 2820 v_1 := v.Args[1] 2821 if v_1.Op != OpARM64CMPWconst { 2822 break 2823 } 2824 if v_1.AuxInt != 0 { 2825 break 2826 } 2827 bool := v_1.Args[0] 2828 if !(cc.(Op) == OpARM64NotEqual && flagArg(bool) != nil) { 2829 break 2830 } 2831 v.reset(OpARM64CSEL0) 2832 v.Aux = bool.Op 2833 v.AddArg(x) 2834 v.AddArg(flagArg(bool)) 2835 return true 2836 } 2837 // match: (CSEL0 {cc} x (CMPWconst [0] bool)) 2838 // cond: cc.(Op) == OpARM64Equal && flagArg(bool) != nil 2839 // result: (CSEL0 {arm64Negate(bool.Op)} x flagArg(bool)) 2840 for { 2841 cc := v.Aux 2842 _ = v.Args[1] 2843 x := v.Args[0] 2844 v_1 := v.Args[1] 2845 if v_1.Op != OpARM64CMPWconst { 2846 break 2847 } 2848 if v_1.AuxInt != 0 { 2849 break 2850 } 2851 bool := v_1.Args[0] 2852 if !(cc.(Op) == OpARM64Equal && flagArg(bool) != nil) { 2853 break 2854 } 2855 v.reset(OpARM64CSEL0) 2856 v.Aux = arm64Negate(bool.Op) 2857 v.AddArg(x) 2858 v.AddArg(flagArg(bool)) 2859 return true 2860 } 2861 return false 2862 } 2863 func rewriteValueARM64_OpARM64DIV_0(v *Value) bool { 2864 // match: (DIV (MOVDconst [c]) (MOVDconst [d])) 2865 // cond: 2866 // result: (MOVDconst [int64(c)/int64(d)]) 2867 for { 2868 _ = v.Args[1] 2869 v_0 := v.Args[0] 2870 if v_0.Op != OpARM64MOVDconst { 2871 break 2872 } 2873 c := v_0.AuxInt 2874 v_1 := v.Args[1] 2875 if v_1.Op != OpARM64MOVDconst { 2876 break 2877 } 2878 d := v_1.AuxInt 2879 v.reset(OpARM64MOVDconst) 2880 v.AuxInt = int64(c) / int64(d) 2881 return true 2882 } 2883 return false 2884 } 2885 func rewriteValueARM64_OpARM64DIVW_0(v *Value) bool { 2886 // match: (DIVW (MOVDconst [c]) (MOVDconst [d])) 2887 // cond: 2888 // result: (MOVDconst [int64(int32(c)/int32(d))]) 2889 for { 2890 _ = v.Args[1] 2891 v_0 := v.Args[0] 2892 if v_0.Op != OpARM64MOVDconst { 2893 break 2894 } 2895 c := v_0.AuxInt 2896 v_1 := v.Args[1] 2897 if v_1.Op != OpARM64MOVDconst { 2898 break 2899 } 2900 d := v_1.AuxInt 2901 v.reset(OpARM64MOVDconst) 2902 v.AuxInt = int64(int32(c) / int32(d)) 2903 return true 2904 } 2905 return false 2906 } 2907 func rewriteValueARM64_OpARM64Equal_0(v *Value) bool { 2908 // match: (Equal (FlagEQ)) 2909 // cond: 2910 // result: (MOVDconst [1]) 2911 for { 2912 v_0 := v.Args[0] 2913 if v_0.Op != OpARM64FlagEQ { 2914 break 2915 } 2916 v.reset(OpARM64MOVDconst) 2917 v.AuxInt = 1 2918 return true 2919 } 2920 // match: (Equal (FlagLT_ULT)) 2921 // cond: 2922 // result: (MOVDconst [0]) 2923 for { 2924 v_0 := v.Args[0] 2925 if v_0.Op != OpARM64FlagLT_ULT { 2926 break 2927 } 2928 v.reset(OpARM64MOVDconst) 2929 v.AuxInt = 0 2930 return true 2931 } 2932 // match: (Equal (FlagLT_UGT)) 2933 // cond: 2934 // result: (MOVDconst [0]) 2935 for { 2936 v_0 := v.Args[0] 2937 if v_0.Op != OpARM64FlagLT_UGT { 2938 break 2939 } 2940 v.reset(OpARM64MOVDconst) 2941 v.AuxInt = 0 2942 return true 2943 } 2944 // match: (Equal (FlagGT_ULT)) 2945 // cond: 2946 // result: (MOVDconst [0]) 2947 for { 2948 v_0 := v.Args[0] 2949 if v_0.Op != OpARM64FlagGT_ULT { 2950 break 2951 } 2952 v.reset(OpARM64MOVDconst) 2953 v.AuxInt = 0 2954 return true 2955 } 2956 // match: (Equal (FlagGT_UGT)) 2957 // cond: 2958 // result: (MOVDconst [0]) 2959 for { 2960 v_0 := v.Args[0] 2961 if v_0.Op != OpARM64FlagGT_UGT { 2962 break 2963 } 2964 v.reset(OpARM64MOVDconst) 2965 v.AuxInt = 0 2966 return true 2967 } 2968 // match: (Equal (InvertFlags x)) 2969 // cond: 2970 // result: (Equal x) 2971 for { 2972 v_0 := v.Args[0] 2973 if v_0.Op != OpARM64InvertFlags { 2974 break 2975 } 2976 x := v_0.Args[0] 2977 v.reset(OpARM64Equal) 2978 v.AddArg(x) 2979 return true 2980 } 2981 return false 2982 } 2983 func rewriteValueARM64_OpARM64FADDD_0(v *Value) bool { 2984 // match: (FADDD a (FMULD x y)) 2985 // cond: 2986 // result: (FMADDD a x y) 2987 for { 2988 _ = v.Args[1] 2989 a := v.Args[0] 2990 v_1 := v.Args[1] 2991 if v_1.Op != OpARM64FMULD { 2992 break 2993 } 2994 _ = v_1.Args[1] 2995 x := v_1.Args[0] 2996 y := v_1.Args[1] 2997 v.reset(OpARM64FMADDD) 2998 v.AddArg(a) 2999 v.AddArg(x) 3000 v.AddArg(y) 3001 return true 3002 } 3003 // match: (FADDD (FMULD x y) a) 3004 // cond: 3005 // result: (FMADDD a x y) 3006 for { 3007 _ = v.Args[1] 3008 v_0 := v.Args[0] 3009 if v_0.Op != OpARM64FMULD { 3010 break 3011 } 3012 _ = v_0.Args[1] 3013 x := v_0.Args[0] 3014 y := v_0.Args[1] 3015 a := v.Args[1] 3016 v.reset(OpARM64FMADDD) 3017 v.AddArg(a) 3018 v.AddArg(x) 3019 v.AddArg(y) 3020 return true 3021 } 3022 // match: (FADDD a (FNMULD x y)) 3023 // cond: 3024 // result: (FMSUBD a x y) 3025 for { 3026 _ = v.Args[1] 3027 a := v.Args[0] 3028 v_1 := v.Args[1] 3029 if v_1.Op != OpARM64FNMULD { 3030 break 3031 } 3032 _ = v_1.Args[1] 3033 x := v_1.Args[0] 3034 y := v_1.Args[1] 3035 v.reset(OpARM64FMSUBD) 3036 v.AddArg(a) 3037 v.AddArg(x) 3038 v.AddArg(y) 3039 return true 3040 } 3041 // match: (FADDD (FNMULD x y) a) 3042 // cond: 3043 // result: (FMSUBD a x y) 3044 for { 3045 _ = v.Args[1] 3046 v_0 := v.Args[0] 3047 if v_0.Op != OpARM64FNMULD { 3048 break 3049 } 3050 _ = v_0.Args[1] 3051 x := v_0.Args[0] 3052 y := v_0.Args[1] 3053 a := v.Args[1] 3054 v.reset(OpARM64FMSUBD) 3055 v.AddArg(a) 3056 v.AddArg(x) 3057 v.AddArg(y) 3058 return true 3059 } 3060 return false 3061 } 3062 func rewriteValueARM64_OpARM64FADDS_0(v *Value) bool { 3063 // match: (FADDS a (FMULS x y)) 3064 // cond: 3065 // result: (FMADDS a x y) 3066 for { 3067 _ = v.Args[1] 3068 a := v.Args[0] 3069 v_1 := v.Args[1] 3070 if v_1.Op != OpARM64FMULS { 3071 break 3072 } 3073 _ = v_1.Args[1] 3074 x := v_1.Args[0] 3075 y := v_1.Args[1] 3076 v.reset(OpARM64FMADDS) 3077 v.AddArg(a) 3078 v.AddArg(x) 3079 v.AddArg(y) 3080 return true 3081 } 3082 // match: (FADDS (FMULS x y) a) 3083 // cond: 3084 // result: (FMADDS a x y) 3085 for { 3086 _ = v.Args[1] 3087 v_0 := v.Args[0] 3088 if v_0.Op != OpARM64FMULS { 3089 break 3090 } 3091 _ = v_0.Args[1] 3092 x := v_0.Args[0] 3093 y := v_0.Args[1] 3094 a := v.Args[1] 3095 v.reset(OpARM64FMADDS) 3096 v.AddArg(a) 3097 v.AddArg(x) 3098 v.AddArg(y) 3099 return true 3100 } 3101 // match: (FADDS a (FNMULS x y)) 3102 // cond: 3103 // result: (FMSUBS a x y) 3104 for { 3105 _ = v.Args[1] 3106 a := v.Args[0] 3107 v_1 := v.Args[1] 3108 if v_1.Op != OpARM64FNMULS { 3109 break 3110 } 3111 _ = v_1.Args[1] 3112 x := v_1.Args[0] 3113 y := v_1.Args[1] 3114 v.reset(OpARM64FMSUBS) 3115 v.AddArg(a) 3116 v.AddArg(x) 3117 v.AddArg(y) 3118 return true 3119 } 3120 // match: (FADDS (FNMULS x y) a) 3121 // cond: 3122 // result: (FMSUBS a x y) 3123 for { 3124 _ = v.Args[1] 3125 v_0 := v.Args[0] 3126 if v_0.Op != OpARM64FNMULS { 3127 break 3128 } 3129 _ = v_0.Args[1] 3130 x := v_0.Args[0] 3131 y := v_0.Args[1] 3132 a := v.Args[1] 3133 v.reset(OpARM64FMSUBS) 3134 v.AddArg(a) 3135 v.AddArg(x) 3136 v.AddArg(y) 3137 return true 3138 } 3139 return false 3140 } 3141 func rewriteValueARM64_OpARM64FMOVDgpfp_0(v *Value) bool { 3142 b := v.Block 3143 _ = b 3144 // match: (FMOVDgpfp <t> (Arg [off] {sym})) 3145 // cond: 3146 // result: @b.Func.Entry (Arg <t> [off] {sym}) 3147 for { 3148 t := v.Type 3149 v_0 := v.Args[0] 3150 if v_0.Op != OpArg { 3151 break 3152 } 3153 off := v_0.AuxInt 3154 sym := v_0.Aux 3155 b = b.Func.Entry 3156 v0 := b.NewValue0(v.Pos, OpArg, t) 3157 v.reset(OpCopy) 3158 v.AddArg(v0) 3159 v0.AuxInt = off 3160 v0.Aux = sym 3161 return true 3162 } 3163 return false 3164 } 3165 func rewriteValueARM64_OpARM64FMOVDload_0(v *Value) bool { 3166 b := v.Block 3167 _ = b 3168 config := b.Func.Config 3169 _ = config 3170 // match: (FMOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 3171 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3172 // result: (FMOVDload [off1+off2] {sym} ptr mem) 3173 for { 3174 off1 := v.AuxInt 3175 sym := v.Aux 3176 _ = v.Args[1] 3177 v_0 := v.Args[0] 3178 if v_0.Op != OpARM64ADDconst { 3179 break 3180 } 3181 off2 := v_0.AuxInt 3182 ptr := v_0.Args[0] 3183 mem := v.Args[1] 3184 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3185 break 3186 } 3187 v.reset(OpARM64FMOVDload) 3188 v.AuxInt = off1 + off2 3189 v.Aux = sym 3190 v.AddArg(ptr) 3191 v.AddArg(mem) 3192 return true 3193 } 3194 // match: (FMOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 3195 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3196 // result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3197 for { 3198 off1 := v.AuxInt 3199 sym1 := v.Aux 3200 _ = v.Args[1] 3201 v_0 := v.Args[0] 3202 if v_0.Op != OpARM64MOVDaddr { 3203 break 3204 } 3205 off2 := v_0.AuxInt 3206 sym2 := v_0.Aux 3207 ptr := v_0.Args[0] 3208 mem := v.Args[1] 3209 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3210 break 3211 } 3212 v.reset(OpARM64FMOVDload) 3213 v.AuxInt = off1 + off2 3214 v.Aux = mergeSym(sym1, sym2) 3215 v.AddArg(ptr) 3216 v.AddArg(mem) 3217 return true 3218 } 3219 return false 3220 } 3221 func rewriteValueARM64_OpARM64FMOVDstore_0(v *Value) bool { 3222 b := v.Block 3223 _ = b 3224 config := b.Func.Config 3225 _ = config 3226 // match: (FMOVDstore ptr (FMOVDgpfp val) mem) 3227 // cond: 3228 // result: (MOVDstore ptr val mem) 3229 for { 3230 _ = v.Args[2] 3231 ptr := v.Args[0] 3232 v_1 := v.Args[1] 3233 if v_1.Op != OpARM64FMOVDgpfp { 3234 break 3235 } 3236 val := v_1.Args[0] 3237 mem := v.Args[2] 3238 v.reset(OpARM64MOVDstore) 3239 v.AddArg(ptr) 3240 v.AddArg(val) 3241 v.AddArg(mem) 3242 return true 3243 } 3244 // match: (FMOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 3245 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3246 // result: (FMOVDstore [off1+off2] {sym} ptr val mem) 3247 for { 3248 off1 := v.AuxInt 3249 sym := v.Aux 3250 _ = v.Args[2] 3251 v_0 := v.Args[0] 3252 if v_0.Op != OpARM64ADDconst { 3253 break 3254 } 3255 off2 := v_0.AuxInt 3256 ptr := v_0.Args[0] 3257 val := v.Args[1] 3258 mem := v.Args[2] 3259 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3260 break 3261 } 3262 v.reset(OpARM64FMOVDstore) 3263 v.AuxInt = off1 + off2 3264 v.Aux = sym 3265 v.AddArg(ptr) 3266 v.AddArg(val) 3267 v.AddArg(mem) 3268 return true 3269 } 3270 // match: (FMOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 3271 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3272 // result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 3273 for { 3274 off1 := v.AuxInt 3275 sym1 := v.Aux 3276 _ = v.Args[2] 3277 v_0 := v.Args[0] 3278 if v_0.Op != OpARM64MOVDaddr { 3279 break 3280 } 3281 off2 := v_0.AuxInt 3282 sym2 := v_0.Aux 3283 ptr := v_0.Args[0] 3284 val := v.Args[1] 3285 mem := v.Args[2] 3286 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3287 break 3288 } 3289 v.reset(OpARM64FMOVDstore) 3290 v.AuxInt = off1 + off2 3291 v.Aux = mergeSym(sym1, sym2) 3292 v.AddArg(ptr) 3293 v.AddArg(val) 3294 v.AddArg(mem) 3295 return true 3296 } 3297 return false 3298 } 3299 func rewriteValueARM64_OpARM64FMOVSload_0(v *Value) bool { 3300 b := v.Block 3301 _ = b 3302 config := b.Func.Config 3303 _ = config 3304 // match: (FMOVSload [off1] {sym} (ADDconst [off2] ptr) mem) 3305 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3306 // result: (FMOVSload [off1+off2] {sym} ptr mem) 3307 for { 3308 off1 := v.AuxInt 3309 sym := v.Aux 3310 _ = v.Args[1] 3311 v_0 := v.Args[0] 3312 if v_0.Op != OpARM64ADDconst { 3313 break 3314 } 3315 off2 := v_0.AuxInt 3316 ptr := v_0.Args[0] 3317 mem := v.Args[1] 3318 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3319 break 3320 } 3321 v.reset(OpARM64FMOVSload) 3322 v.AuxInt = off1 + off2 3323 v.Aux = sym 3324 v.AddArg(ptr) 3325 v.AddArg(mem) 3326 return true 3327 } 3328 // match: (FMOVSload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 3329 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3330 // result: (FMOVSload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3331 for { 3332 off1 := v.AuxInt 3333 sym1 := v.Aux 3334 _ = v.Args[1] 3335 v_0 := v.Args[0] 3336 if v_0.Op != OpARM64MOVDaddr { 3337 break 3338 } 3339 off2 := v_0.AuxInt 3340 sym2 := v_0.Aux 3341 ptr := v_0.Args[0] 3342 mem := v.Args[1] 3343 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3344 break 3345 } 3346 v.reset(OpARM64FMOVSload) 3347 v.AuxInt = off1 + off2 3348 v.Aux = mergeSym(sym1, sym2) 3349 v.AddArg(ptr) 3350 v.AddArg(mem) 3351 return true 3352 } 3353 return false 3354 } 3355 func rewriteValueARM64_OpARM64FMOVSstore_0(v *Value) bool { 3356 b := v.Block 3357 _ = b 3358 config := b.Func.Config 3359 _ = config 3360 // match: (FMOVSstore [off1] {sym} (ADDconst [off2] ptr) val mem) 3361 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3362 // result: (FMOVSstore [off1+off2] {sym} ptr val mem) 3363 for { 3364 off1 := v.AuxInt 3365 sym := v.Aux 3366 _ = v.Args[2] 3367 v_0 := v.Args[0] 3368 if v_0.Op != OpARM64ADDconst { 3369 break 3370 } 3371 off2 := v_0.AuxInt 3372 ptr := v_0.Args[0] 3373 val := v.Args[1] 3374 mem := v.Args[2] 3375 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3376 break 3377 } 3378 v.reset(OpARM64FMOVSstore) 3379 v.AuxInt = off1 + off2 3380 v.Aux = sym 3381 v.AddArg(ptr) 3382 v.AddArg(val) 3383 v.AddArg(mem) 3384 return true 3385 } 3386 // match: (FMOVSstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 3387 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3388 // result: (FMOVSstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 3389 for { 3390 off1 := v.AuxInt 3391 sym1 := v.Aux 3392 _ = v.Args[2] 3393 v_0 := v.Args[0] 3394 if v_0.Op != OpARM64MOVDaddr { 3395 break 3396 } 3397 off2 := v_0.AuxInt 3398 sym2 := v_0.Aux 3399 ptr := v_0.Args[0] 3400 val := v.Args[1] 3401 mem := v.Args[2] 3402 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3403 break 3404 } 3405 v.reset(OpARM64FMOVSstore) 3406 v.AuxInt = off1 + off2 3407 v.Aux = mergeSym(sym1, sym2) 3408 v.AddArg(ptr) 3409 v.AddArg(val) 3410 v.AddArg(mem) 3411 return true 3412 } 3413 return false 3414 } 3415 func rewriteValueARM64_OpARM64FMULD_0(v *Value) bool { 3416 // match: (FMULD (FNEGD x) y) 3417 // cond: 3418 // result: (FNMULD x y) 3419 for { 3420 _ = v.Args[1] 3421 v_0 := v.Args[0] 3422 if v_0.Op != OpARM64FNEGD { 3423 break 3424 } 3425 x := v_0.Args[0] 3426 y := v.Args[1] 3427 v.reset(OpARM64FNMULD) 3428 v.AddArg(x) 3429 v.AddArg(y) 3430 return true 3431 } 3432 // match: (FMULD y (FNEGD x)) 3433 // cond: 3434 // result: (FNMULD x y) 3435 for { 3436 _ = v.Args[1] 3437 y := v.Args[0] 3438 v_1 := v.Args[1] 3439 if v_1.Op != OpARM64FNEGD { 3440 break 3441 } 3442 x := v_1.Args[0] 3443 v.reset(OpARM64FNMULD) 3444 v.AddArg(x) 3445 v.AddArg(y) 3446 return true 3447 } 3448 return false 3449 } 3450 func rewriteValueARM64_OpARM64FMULS_0(v *Value) bool { 3451 // match: (FMULS (FNEGS x) y) 3452 // cond: 3453 // result: (FNMULS x y) 3454 for { 3455 _ = v.Args[1] 3456 v_0 := v.Args[0] 3457 if v_0.Op != OpARM64FNEGS { 3458 break 3459 } 3460 x := v_0.Args[0] 3461 y := v.Args[1] 3462 v.reset(OpARM64FNMULS) 3463 v.AddArg(x) 3464 v.AddArg(y) 3465 return true 3466 } 3467 // match: (FMULS y (FNEGS x)) 3468 // cond: 3469 // result: (FNMULS x y) 3470 for { 3471 _ = v.Args[1] 3472 y := v.Args[0] 3473 v_1 := v.Args[1] 3474 if v_1.Op != OpARM64FNEGS { 3475 break 3476 } 3477 x := v_1.Args[0] 3478 v.reset(OpARM64FNMULS) 3479 v.AddArg(x) 3480 v.AddArg(y) 3481 return true 3482 } 3483 return false 3484 } 3485 func rewriteValueARM64_OpARM64FNEGD_0(v *Value) bool { 3486 // match: (FNEGD (FMULD x y)) 3487 // cond: 3488 // result: (FNMULD x y) 3489 for { 3490 v_0 := v.Args[0] 3491 if v_0.Op != OpARM64FMULD { 3492 break 3493 } 3494 _ = v_0.Args[1] 3495 x := v_0.Args[0] 3496 y := v_0.Args[1] 3497 v.reset(OpARM64FNMULD) 3498 v.AddArg(x) 3499 v.AddArg(y) 3500 return true 3501 } 3502 // match: (FNEGD (FNMULD x y)) 3503 // cond: 3504 // result: (FMULD x y) 3505 for { 3506 v_0 := v.Args[0] 3507 if v_0.Op != OpARM64FNMULD { 3508 break 3509 } 3510 _ = v_0.Args[1] 3511 x := v_0.Args[0] 3512 y := v_0.Args[1] 3513 v.reset(OpARM64FMULD) 3514 v.AddArg(x) 3515 v.AddArg(y) 3516 return true 3517 } 3518 return false 3519 } 3520 func rewriteValueARM64_OpARM64FNEGS_0(v *Value) bool { 3521 // match: (FNEGS (FMULS x y)) 3522 // cond: 3523 // result: (FNMULS x y) 3524 for { 3525 v_0 := v.Args[0] 3526 if v_0.Op != OpARM64FMULS { 3527 break 3528 } 3529 _ = v_0.Args[1] 3530 x := v_0.Args[0] 3531 y := v_0.Args[1] 3532 v.reset(OpARM64FNMULS) 3533 v.AddArg(x) 3534 v.AddArg(y) 3535 return true 3536 } 3537 // match: (FNEGS (FNMULS x y)) 3538 // cond: 3539 // result: (FMULS x y) 3540 for { 3541 v_0 := v.Args[0] 3542 if v_0.Op != OpARM64FNMULS { 3543 break 3544 } 3545 _ = v_0.Args[1] 3546 x := v_0.Args[0] 3547 y := v_0.Args[1] 3548 v.reset(OpARM64FMULS) 3549 v.AddArg(x) 3550 v.AddArg(y) 3551 return true 3552 } 3553 return false 3554 } 3555 func rewriteValueARM64_OpARM64FNMULD_0(v *Value) bool { 3556 // match: (FNMULD (FNEGD x) y) 3557 // cond: 3558 // result: (FMULD x y) 3559 for { 3560 _ = v.Args[1] 3561 v_0 := v.Args[0] 3562 if v_0.Op != OpARM64FNEGD { 3563 break 3564 } 3565 x := v_0.Args[0] 3566 y := v.Args[1] 3567 v.reset(OpARM64FMULD) 3568 v.AddArg(x) 3569 v.AddArg(y) 3570 return true 3571 } 3572 // match: (FNMULD y (FNEGD x)) 3573 // cond: 3574 // result: (FMULD x y) 3575 for { 3576 _ = v.Args[1] 3577 y := v.Args[0] 3578 v_1 := v.Args[1] 3579 if v_1.Op != OpARM64FNEGD { 3580 break 3581 } 3582 x := v_1.Args[0] 3583 v.reset(OpARM64FMULD) 3584 v.AddArg(x) 3585 v.AddArg(y) 3586 return true 3587 } 3588 return false 3589 } 3590 func rewriteValueARM64_OpARM64FNMULS_0(v *Value) bool { 3591 // match: (FNMULS (FNEGS x) y) 3592 // cond: 3593 // result: (FMULS x y) 3594 for { 3595 _ = v.Args[1] 3596 v_0 := v.Args[0] 3597 if v_0.Op != OpARM64FNEGS { 3598 break 3599 } 3600 x := v_0.Args[0] 3601 y := v.Args[1] 3602 v.reset(OpARM64FMULS) 3603 v.AddArg(x) 3604 v.AddArg(y) 3605 return true 3606 } 3607 // match: (FNMULS y (FNEGS x)) 3608 // cond: 3609 // result: (FMULS x y) 3610 for { 3611 _ = v.Args[1] 3612 y := v.Args[0] 3613 v_1 := v.Args[1] 3614 if v_1.Op != OpARM64FNEGS { 3615 break 3616 } 3617 x := v_1.Args[0] 3618 v.reset(OpARM64FMULS) 3619 v.AddArg(x) 3620 v.AddArg(y) 3621 return true 3622 } 3623 return false 3624 } 3625 func rewriteValueARM64_OpARM64FSUBD_0(v *Value) bool { 3626 // match: (FSUBD a (FMULD x y)) 3627 // cond: 3628 // result: (FMSUBD a x y) 3629 for { 3630 _ = v.Args[1] 3631 a := v.Args[0] 3632 v_1 := v.Args[1] 3633 if v_1.Op != OpARM64FMULD { 3634 break 3635 } 3636 _ = v_1.Args[1] 3637 x := v_1.Args[0] 3638 y := v_1.Args[1] 3639 v.reset(OpARM64FMSUBD) 3640 v.AddArg(a) 3641 v.AddArg(x) 3642 v.AddArg(y) 3643 return true 3644 } 3645 // match: (FSUBD (FMULD x y) a) 3646 // cond: 3647 // result: (FNMSUBD a x y) 3648 for { 3649 _ = v.Args[1] 3650 v_0 := v.Args[0] 3651 if v_0.Op != OpARM64FMULD { 3652 break 3653 } 3654 _ = v_0.Args[1] 3655 x := v_0.Args[0] 3656 y := v_0.Args[1] 3657 a := v.Args[1] 3658 v.reset(OpARM64FNMSUBD) 3659 v.AddArg(a) 3660 v.AddArg(x) 3661 v.AddArg(y) 3662 return true 3663 } 3664 // match: (FSUBD a (FNMULD x y)) 3665 // cond: 3666 // result: (FMADDD a x y) 3667 for { 3668 _ = v.Args[1] 3669 a := v.Args[0] 3670 v_1 := v.Args[1] 3671 if v_1.Op != OpARM64FNMULD { 3672 break 3673 } 3674 _ = v_1.Args[1] 3675 x := v_1.Args[0] 3676 y := v_1.Args[1] 3677 v.reset(OpARM64FMADDD) 3678 v.AddArg(a) 3679 v.AddArg(x) 3680 v.AddArg(y) 3681 return true 3682 } 3683 // match: (FSUBD (FNMULD x y) a) 3684 // cond: 3685 // result: (FNMADDD a x y) 3686 for { 3687 _ = v.Args[1] 3688 v_0 := v.Args[0] 3689 if v_0.Op != OpARM64FNMULD { 3690 break 3691 } 3692 _ = v_0.Args[1] 3693 x := v_0.Args[0] 3694 y := v_0.Args[1] 3695 a := v.Args[1] 3696 v.reset(OpARM64FNMADDD) 3697 v.AddArg(a) 3698 v.AddArg(x) 3699 v.AddArg(y) 3700 return true 3701 } 3702 return false 3703 } 3704 func rewriteValueARM64_OpARM64FSUBS_0(v *Value) bool { 3705 // match: (FSUBS a (FMULS x y)) 3706 // cond: 3707 // result: (FMSUBS a x y) 3708 for { 3709 _ = v.Args[1] 3710 a := v.Args[0] 3711 v_1 := v.Args[1] 3712 if v_1.Op != OpARM64FMULS { 3713 break 3714 } 3715 _ = v_1.Args[1] 3716 x := v_1.Args[0] 3717 y := v_1.Args[1] 3718 v.reset(OpARM64FMSUBS) 3719 v.AddArg(a) 3720 v.AddArg(x) 3721 v.AddArg(y) 3722 return true 3723 } 3724 // match: (FSUBS (FMULS x y) a) 3725 // cond: 3726 // result: (FNMSUBS a x y) 3727 for { 3728 _ = v.Args[1] 3729 v_0 := v.Args[0] 3730 if v_0.Op != OpARM64FMULS { 3731 break 3732 } 3733 _ = v_0.Args[1] 3734 x := v_0.Args[0] 3735 y := v_0.Args[1] 3736 a := v.Args[1] 3737 v.reset(OpARM64FNMSUBS) 3738 v.AddArg(a) 3739 v.AddArg(x) 3740 v.AddArg(y) 3741 return true 3742 } 3743 // match: (FSUBS a (FNMULS x y)) 3744 // cond: 3745 // result: (FMADDS a x y) 3746 for { 3747 _ = v.Args[1] 3748 a := v.Args[0] 3749 v_1 := v.Args[1] 3750 if v_1.Op != OpARM64FNMULS { 3751 break 3752 } 3753 _ = v_1.Args[1] 3754 x := v_1.Args[0] 3755 y := v_1.Args[1] 3756 v.reset(OpARM64FMADDS) 3757 v.AddArg(a) 3758 v.AddArg(x) 3759 v.AddArg(y) 3760 return true 3761 } 3762 // match: (FSUBS (FNMULS x y) a) 3763 // cond: 3764 // result: (FNMADDS a x y) 3765 for { 3766 _ = v.Args[1] 3767 v_0 := v.Args[0] 3768 if v_0.Op != OpARM64FNMULS { 3769 break 3770 } 3771 _ = v_0.Args[1] 3772 x := v_0.Args[0] 3773 y := v_0.Args[1] 3774 a := v.Args[1] 3775 v.reset(OpARM64FNMADDS) 3776 v.AddArg(a) 3777 v.AddArg(x) 3778 v.AddArg(y) 3779 return true 3780 } 3781 return false 3782 } 3783 func rewriteValueARM64_OpARM64GreaterEqual_0(v *Value) bool { 3784 // match: (GreaterEqual (FlagEQ)) 3785 // cond: 3786 // result: (MOVDconst [1]) 3787 for { 3788 v_0 := v.Args[0] 3789 if v_0.Op != OpARM64FlagEQ { 3790 break 3791 } 3792 v.reset(OpARM64MOVDconst) 3793 v.AuxInt = 1 3794 return true 3795 } 3796 // match: (GreaterEqual (FlagLT_ULT)) 3797 // cond: 3798 // result: (MOVDconst [0]) 3799 for { 3800 v_0 := v.Args[0] 3801 if v_0.Op != OpARM64FlagLT_ULT { 3802 break 3803 } 3804 v.reset(OpARM64MOVDconst) 3805 v.AuxInt = 0 3806 return true 3807 } 3808 // match: (GreaterEqual (FlagLT_UGT)) 3809 // cond: 3810 // result: (MOVDconst [0]) 3811 for { 3812 v_0 := v.Args[0] 3813 if v_0.Op != OpARM64FlagLT_UGT { 3814 break 3815 } 3816 v.reset(OpARM64MOVDconst) 3817 v.AuxInt = 0 3818 return true 3819 } 3820 // match: (GreaterEqual (FlagGT_ULT)) 3821 // cond: 3822 // result: (MOVDconst [1]) 3823 for { 3824 v_0 := v.Args[0] 3825 if v_0.Op != OpARM64FlagGT_ULT { 3826 break 3827 } 3828 v.reset(OpARM64MOVDconst) 3829 v.AuxInt = 1 3830 return true 3831 } 3832 // match: (GreaterEqual (FlagGT_UGT)) 3833 // cond: 3834 // result: (MOVDconst [1]) 3835 for { 3836 v_0 := v.Args[0] 3837 if v_0.Op != OpARM64FlagGT_UGT { 3838 break 3839 } 3840 v.reset(OpARM64MOVDconst) 3841 v.AuxInt = 1 3842 return true 3843 } 3844 // match: (GreaterEqual (InvertFlags x)) 3845 // cond: 3846 // result: (LessEqual x) 3847 for { 3848 v_0 := v.Args[0] 3849 if v_0.Op != OpARM64InvertFlags { 3850 break 3851 } 3852 x := v_0.Args[0] 3853 v.reset(OpARM64LessEqual) 3854 v.AddArg(x) 3855 return true 3856 } 3857 return false 3858 } 3859 func rewriteValueARM64_OpARM64GreaterEqualU_0(v *Value) bool { 3860 // match: (GreaterEqualU (FlagEQ)) 3861 // cond: 3862 // result: (MOVDconst [1]) 3863 for { 3864 v_0 := v.Args[0] 3865 if v_0.Op != OpARM64FlagEQ { 3866 break 3867 } 3868 v.reset(OpARM64MOVDconst) 3869 v.AuxInt = 1 3870 return true 3871 } 3872 // match: (GreaterEqualU (FlagLT_ULT)) 3873 // cond: 3874 // result: (MOVDconst [0]) 3875 for { 3876 v_0 := v.Args[0] 3877 if v_0.Op != OpARM64FlagLT_ULT { 3878 break 3879 } 3880 v.reset(OpARM64MOVDconst) 3881 v.AuxInt = 0 3882 return true 3883 } 3884 // match: (GreaterEqualU (FlagLT_UGT)) 3885 // cond: 3886 // result: (MOVDconst [1]) 3887 for { 3888 v_0 := v.Args[0] 3889 if v_0.Op != OpARM64FlagLT_UGT { 3890 break 3891 } 3892 v.reset(OpARM64MOVDconst) 3893 v.AuxInt = 1 3894 return true 3895 } 3896 // match: (GreaterEqualU (FlagGT_ULT)) 3897 // cond: 3898 // result: (MOVDconst [0]) 3899 for { 3900 v_0 := v.Args[0] 3901 if v_0.Op != OpARM64FlagGT_ULT { 3902 break 3903 } 3904 v.reset(OpARM64MOVDconst) 3905 v.AuxInt = 0 3906 return true 3907 } 3908 // match: (GreaterEqualU (FlagGT_UGT)) 3909 // cond: 3910 // result: (MOVDconst [1]) 3911 for { 3912 v_0 := v.Args[0] 3913 if v_0.Op != OpARM64FlagGT_UGT { 3914 break 3915 } 3916 v.reset(OpARM64MOVDconst) 3917 v.AuxInt = 1 3918 return true 3919 } 3920 // match: (GreaterEqualU (InvertFlags x)) 3921 // cond: 3922 // result: (LessEqualU x) 3923 for { 3924 v_0 := v.Args[0] 3925 if v_0.Op != OpARM64InvertFlags { 3926 break 3927 } 3928 x := v_0.Args[0] 3929 v.reset(OpARM64LessEqualU) 3930 v.AddArg(x) 3931 return true 3932 } 3933 return false 3934 } 3935 func rewriteValueARM64_OpARM64GreaterThan_0(v *Value) bool { 3936 // match: (GreaterThan (FlagEQ)) 3937 // cond: 3938 // result: (MOVDconst [0]) 3939 for { 3940 v_0 := v.Args[0] 3941 if v_0.Op != OpARM64FlagEQ { 3942 break 3943 } 3944 v.reset(OpARM64MOVDconst) 3945 v.AuxInt = 0 3946 return true 3947 } 3948 // match: (GreaterThan (FlagLT_ULT)) 3949 // cond: 3950 // result: (MOVDconst [0]) 3951 for { 3952 v_0 := v.Args[0] 3953 if v_0.Op != OpARM64FlagLT_ULT { 3954 break 3955 } 3956 v.reset(OpARM64MOVDconst) 3957 v.AuxInt = 0 3958 return true 3959 } 3960 // match: (GreaterThan (FlagLT_UGT)) 3961 // cond: 3962 // result: (MOVDconst [0]) 3963 for { 3964 v_0 := v.Args[0] 3965 if v_0.Op != OpARM64FlagLT_UGT { 3966 break 3967 } 3968 v.reset(OpARM64MOVDconst) 3969 v.AuxInt = 0 3970 return true 3971 } 3972 // match: (GreaterThan (FlagGT_ULT)) 3973 // cond: 3974 // result: (MOVDconst [1]) 3975 for { 3976 v_0 := v.Args[0] 3977 if v_0.Op != OpARM64FlagGT_ULT { 3978 break 3979 } 3980 v.reset(OpARM64MOVDconst) 3981 v.AuxInt = 1 3982 return true 3983 } 3984 // match: (GreaterThan (FlagGT_UGT)) 3985 // cond: 3986 // result: (MOVDconst [1]) 3987 for { 3988 v_0 := v.Args[0] 3989 if v_0.Op != OpARM64FlagGT_UGT { 3990 break 3991 } 3992 v.reset(OpARM64MOVDconst) 3993 v.AuxInt = 1 3994 return true 3995 } 3996 // match: (GreaterThan (InvertFlags x)) 3997 // cond: 3998 // result: (LessThan x) 3999 for { 4000 v_0 := v.Args[0] 4001 if v_0.Op != OpARM64InvertFlags { 4002 break 4003 } 4004 x := v_0.Args[0] 4005 v.reset(OpARM64LessThan) 4006 v.AddArg(x) 4007 return true 4008 } 4009 return false 4010 } 4011 func rewriteValueARM64_OpARM64GreaterThanU_0(v *Value) bool { 4012 // match: (GreaterThanU (FlagEQ)) 4013 // cond: 4014 // result: (MOVDconst [0]) 4015 for { 4016 v_0 := v.Args[0] 4017 if v_0.Op != OpARM64FlagEQ { 4018 break 4019 } 4020 v.reset(OpARM64MOVDconst) 4021 v.AuxInt = 0 4022 return true 4023 } 4024 // match: (GreaterThanU (FlagLT_ULT)) 4025 // cond: 4026 // result: (MOVDconst [0]) 4027 for { 4028 v_0 := v.Args[0] 4029 if v_0.Op != OpARM64FlagLT_ULT { 4030 break 4031 } 4032 v.reset(OpARM64MOVDconst) 4033 v.AuxInt = 0 4034 return true 4035 } 4036 // match: (GreaterThanU (FlagLT_UGT)) 4037 // cond: 4038 // result: (MOVDconst [1]) 4039 for { 4040 v_0 := v.Args[0] 4041 if v_0.Op != OpARM64FlagLT_UGT { 4042 break 4043 } 4044 v.reset(OpARM64MOVDconst) 4045 v.AuxInt = 1 4046 return true 4047 } 4048 // match: (GreaterThanU (FlagGT_ULT)) 4049 // cond: 4050 // result: (MOVDconst [0]) 4051 for { 4052 v_0 := v.Args[0] 4053 if v_0.Op != OpARM64FlagGT_ULT { 4054 break 4055 } 4056 v.reset(OpARM64MOVDconst) 4057 v.AuxInt = 0 4058 return true 4059 } 4060 // match: (GreaterThanU (FlagGT_UGT)) 4061 // cond: 4062 // result: (MOVDconst [1]) 4063 for { 4064 v_0 := v.Args[0] 4065 if v_0.Op != OpARM64FlagGT_UGT { 4066 break 4067 } 4068 v.reset(OpARM64MOVDconst) 4069 v.AuxInt = 1 4070 return true 4071 } 4072 // match: (GreaterThanU (InvertFlags x)) 4073 // cond: 4074 // result: (LessThanU x) 4075 for { 4076 v_0 := v.Args[0] 4077 if v_0.Op != OpARM64InvertFlags { 4078 break 4079 } 4080 x := v_0.Args[0] 4081 v.reset(OpARM64LessThanU) 4082 v.AddArg(x) 4083 return true 4084 } 4085 return false 4086 } 4087 func rewriteValueARM64_OpARM64LessEqual_0(v *Value) bool { 4088 // match: (LessEqual (FlagEQ)) 4089 // cond: 4090 // result: (MOVDconst [1]) 4091 for { 4092 v_0 := v.Args[0] 4093 if v_0.Op != OpARM64FlagEQ { 4094 break 4095 } 4096 v.reset(OpARM64MOVDconst) 4097 v.AuxInt = 1 4098 return true 4099 } 4100 // match: (LessEqual (FlagLT_ULT)) 4101 // cond: 4102 // result: (MOVDconst [1]) 4103 for { 4104 v_0 := v.Args[0] 4105 if v_0.Op != OpARM64FlagLT_ULT { 4106 break 4107 } 4108 v.reset(OpARM64MOVDconst) 4109 v.AuxInt = 1 4110 return true 4111 } 4112 // match: (LessEqual (FlagLT_UGT)) 4113 // cond: 4114 // result: (MOVDconst [1]) 4115 for { 4116 v_0 := v.Args[0] 4117 if v_0.Op != OpARM64FlagLT_UGT { 4118 break 4119 } 4120 v.reset(OpARM64MOVDconst) 4121 v.AuxInt = 1 4122 return true 4123 } 4124 // match: (LessEqual (FlagGT_ULT)) 4125 // cond: 4126 // result: (MOVDconst [0]) 4127 for { 4128 v_0 := v.Args[0] 4129 if v_0.Op != OpARM64FlagGT_ULT { 4130 break 4131 } 4132 v.reset(OpARM64MOVDconst) 4133 v.AuxInt = 0 4134 return true 4135 } 4136 // match: (LessEqual (FlagGT_UGT)) 4137 // cond: 4138 // result: (MOVDconst [0]) 4139 for { 4140 v_0 := v.Args[0] 4141 if v_0.Op != OpARM64FlagGT_UGT { 4142 break 4143 } 4144 v.reset(OpARM64MOVDconst) 4145 v.AuxInt = 0 4146 return true 4147 } 4148 // match: (LessEqual (InvertFlags x)) 4149 // cond: 4150 // result: (GreaterEqual x) 4151 for { 4152 v_0 := v.Args[0] 4153 if v_0.Op != OpARM64InvertFlags { 4154 break 4155 } 4156 x := v_0.Args[0] 4157 v.reset(OpARM64GreaterEqual) 4158 v.AddArg(x) 4159 return true 4160 } 4161 return false 4162 } 4163 func rewriteValueARM64_OpARM64LessEqualU_0(v *Value) bool { 4164 // match: (LessEqualU (FlagEQ)) 4165 // cond: 4166 // result: (MOVDconst [1]) 4167 for { 4168 v_0 := v.Args[0] 4169 if v_0.Op != OpARM64FlagEQ { 4170 break 4171 } 4172 v.reset(OpARM64MOVDconst) 4173 v.AuxInt = 1 4174 return true 4175 } 4176 // match: (LessEqualU (FlagLT_ULT)) 4177 // cond: 4178 // result: (MOVDconst [1]) 4179 for { 4180 v_0 := v.Args[0] 4181 if v_0.Op != OpARM64FlagLT_ULT { 4182 break 4183 } 4184 v.reset(OpARM64MOVDconst) 4185 v.AuxInt = 1 4186 return true 4187 } 4188 // match: (LessEqualU (FlagLT_UGT)) 4189 // cond: 4190 // result: (MOVDconst [0]) 4191 for { 4192 v_0 := v.Args[0] 4193 if v_0.Op != OpARM64FlagLT_UGT { 4194 break 4195 } 4196 v.reset(OpARM64MOVDconst) 4197 v.AuxInt = 0 4198 return true 4199 } 4200 // match: (LessEqualU (FlagGT_ULT)) 4201 // cond: 4202 // result: (MOVDconst [1]) 4203 for { 4204 v_0 := v.Args[0] 4205 if v_0.Op != OpARM64FlagGT_ULT { 4206 break 4207 } 4208 v.reset(OpARM64MOVDconst) 4209 v.AuxInt = 1 4210 return true 4211 } 4212 // match: (LessEqualU (FlagGT_UGT)) 4213 // cond: 4214 // result: (MOVDconst [0]) 4215 for { 4216 v_0 := v.Args[0] 4217 if v_0.Op != OpARM64FlagGT_UGT { 4218 break 4219 } 4220 v.reset(OpARM64MOVDconst) 4221 v.AuxInt = 0 4222 return true 4223 } 4224 // match: (LessEqualU (InvertFlags x)) 4225 // cond: 4226 // result: (GreaterEqualU x) 4227 for { 4228 v_0 := v.Args[0] 4229 if v_0.Op != OpARM64InvertFlags { 4230 break 4231 } 4232 x := v_0.Args[0] 4233 v.reset(OpARM64GreaterEqualU) 4234 v.AddArg(x) 4235 return true 4236 } 4237 return false 4238 } 4239 func rewriteValueARM64_OpARM64LessThan_0(v *Value) bool { 4240 // match: (LessThan (FlagEQ)) 4241 // cond: 4242 // result: (MOVDconst [0]) 4243 for { 4244 v_0 := v.Args[0] 4245 if v_0.Op != OpARM64FlagEQ { 4246 break 4247 } 4248 v.reset(OpARM64MOVDconst) 4249 v.AuxInt = 0 4250 return true 4251 } 4252 // match: (LessThan (FlagLT_ULT)) 4253 // cond: 4254 // result: (MOVDconst [1]) 4255 for { 4256 v_0 := v.Args[0] 4257 if v_0.Op != OpARM64FlagLT_ULT { 4258 break 4259 } 4260 v.reset(OpARM64MOVDconst) 4261 v.AuxInt = 1 4262 return true 4263 } 4264 // match: (LessThan (FlagLT_UGT)) 4265 // cond: 4266 // result: (MOVDconst [1]) 4267 for { 4268 v_0 := v.Args[0] 4269 if v_0.Op != OpARM64FlagLT_UGT { 4270 break 4271 } 4272 v.reset(OpARM64MOVDconst) 4273 v.AuxInt = 1 4274 return true 4275 } 4276 // match: (LessThan (FlagGT_ULT)) 4277 // cond: 4278 // result: (MOVDconst [0]) 4279 for { 4280 v_0 := v.Args[0] 4281 if v_0.Op != OpARM64FlagGT_ULT { 4282 break 4283 } 4284 v.reset(OpARM64MOVDconst) 4285 v.AuxInt = 0 4286 return true 4287 } 4288 // match: (LessThan (FlagGT_UGT)) 4289 // cond: 4290 // result: (MOVDconst [0]) 4291 for { 4292 v_0 := v.Args[0] 4293 if v_0.Op != OpARM64FlagGT_UGT { 4294 break 4295 } 4296 v.reset(OpARM64MOVDconst) 4297 v.AuxInt = 0 4298 return true 4299 } 4300 // match: (LessThan (InvertFlags x)) 4301 // cond: 4302 // result: (GreaterThan x) 4303 for { 4304 v_0 := v.Args[0] 4305 if v_0.Op != OpARM64InvertFlags { 4306 break 4307 } 4308 x := v_0.Args[0] 4309 v.reset(OpARM64GreaterThan) 4310 v.AddArg(x) 4311 return true 4312 } 4313 return false 4314 } 4315 func rewriteValueARM64_OpARM64LessThanU_0(v *Value) bool { 4316 // match: (LessThanU (FlagEQ)) 4317 // cond: 4318 // result: (MOVDconst [0]) 4319 for { 4320 v_0 := v.Args[0] 4321 if v_0.Op != OpARM64FlagEQ { 4322 break 4323 } 4324 v.reset(OpARM64MOVDconst) 4325 v.AuxInt = 0 4326 return true 4327 } 4328 // match: (LessThanU (FlagLT_ULT)) 4329 // cond: 4330 // result: (MOVDconst [1]) 4331 for { 4332 v_0 := v.Args[0] 4333 if v_0.Op != OpARM64FlagLT_ULT { 4334 break 4335 } 4336 v.reset(OpARM64MOVDconst) 4337 v.AuxInt = 1 4338 return true 4339 } 4340 // match: (LessThanU (FlagLT_UGT)) 4341 // cond: 4342 // result: (MOVDconst [0]) 4343 for { 4344 v_0 := v.Args[0] 4345 if v_0.Op != OpARM64FlagLT_UGT { 4346 break 4347 } 4348 v.reset(OpARM64MOVDconst) 4349 v.AuxInt = 0 4350 return true 4351 } 4352 // match: (LessThanU (FlagGT_ULT)) 4353 // cond: 4354 // result: (MOVDconst [1]) 4355 for { 4356 v_0 := v.Args[0] 4357 if v_0.Op != OpARM64FlagGT_ULT { 4358 break 4359 } 4360 v.reset(OpARM64MOVDconst) 4361 v.AuxInt = 1 4362 return true 4363 } 4364 // match: (LessThanU (FlagGT_UGT)) 4365 // cond: 4366 // result: (MOVDconst [0]) 4367 for { 4368 v_0 := v.Args[0] 4369 if v_0.Op != OpARM64FlagGT_UGT { 4370 break 4371 } 4372 v.reset(OpARM64MOVDconst) 4373 v.AuxInt = 0 4374 return true 4375 } 4376 // match: (LessThanU (InvertFlags x)) 4377 // cond: 4378 // result: (GreaterThanU x) 4379 for { 4380 v_0 := v.Args[0] 4381 if v_0.Op != OpARM64InvertFlags { 4382 break 4383 } 4384 x := v_0.Args[0] 4385 v.reset(OpARM64GreaterThanU) 4386 v.AddArg(x) 4387 return true 4388 } 4389 return false 4390 } 4391 func rewriteValueARM64_OpARM64MNEG_0(v *Value) bool { 4392 b := v.Block 4393 _ = b 4394 // match: (MNEG x (MOVDconst [-1])) 4395 // cond: 4396 // result: x 4397 for { 4398 _ = v.Args[1] 4399 x := v.Args[0] 4400 v_1 := v.Args[1] 4401 if v_1.Op != OpARM64MOVDconst { 4402 break 4403 } 4404 if v_1.AuxInt != -1 { 4405 break 4406 } 4407 v.reset(OpCopy) 4408 v.Type = x.Type 4409 v.AddArg(x) 4410 return true 4411 } 4412 // match: (MNEG (MOVDconst [-1]) x) 4413 // cond: 4414 // result: x 4415 for { 4416 _ = v.Args[1] 4417 v_0 := v.Args[0] 4418 if v_0.Op != OpARM64MOVDconst { 4419 break 4420 } 4421 if v_0.AuxInt != -1 { 4422 break 4423 } 4424 x := v.Args[1] 4425 v.reset(OpCopy) 4426 v.Type = x.Type 4427 v.AddArg(x) 4428 return true 4429 } 4430 // match: (MNEG _ (MOVDconst [0])) 4431 // cond: 4432 // result: (MOVDconst [0]) 4433 for { 4434 _ = v.Args[1] 4435 v_1 := v.Args[1] 4436 if v_1.Op != OpARM64MOVDconst { 4437 break 4438 } 4439 if v_1.AuxInt != 0 { 4440 break 4441 } 4442 v.reset(OpARM64MOVDconst) 4443 v.AuxInt = 0 4444 return true 4445 } 4446 // match: (MNEG (MOVDconst [0]) _) 4447 // cond: 4448 // result: (MOVDconst [0]) 4449 for { 4450 _ = v.Args[1] 4451 v_0 := v.Args[0] 4452 if v_0.Op != OpARM64MOVDconst { 4453 break 4454 } 4455 if v_0.AuxInt != 0 { 4456 break 4457 } 4458 v.reset(OpARM64MOVDconst) 4459 v.AuxInt = 0 4460 return true 4461 } 4462 // match: (MNEG x (MOVDconst [1])) 4463 // cond: 4464 // result: (NEG x) 4465 for { 4466 _ = v.Args[1] 4467 x := v.Args[0] 4468 v_1 := v.Args[1] 4469 if v_1.Op != OpARM64MOVDconst { 4470 break 4471 } 4472 if v_1.AuxInt != 1 { 4473 break 4474 } 4475 v.reset(OpARM64NEG) 4476 v.AddArg(x) 4477 return true 4478 } 4479 // match: (MNEG (MOVDconst [1]) x) 4480 // cond: 4481 // result: (NEG x) 4482 for { 4483 _ = v.Args[1] 4484 v_0 := v.Args[0] 4485 if v_0.Op != OpARM64MOVDconst { 4486 break 4487 } 4488 if v_0.AuxInt != 1 { 4489 break 4490 } 4491 x := v.Args[1] 4492 v.reset(OpARM64NEG) 4493 v.AddArg(x) 4494 return true 4495 } 4496 // match: (MNEG x (MOVDconst [c])) 4497 // cond: isPowerOfTwo(c) 4498 // result: (NEG (SLLconst <x.Type> [log2(c)] x)) 4499 for { 4500 _ = v.Args[1] 4501 x := v.Args[0] 4502 v_1 := v.Args[1] 4503 if v_1.Op != OpARM64MOVDconst { 4504 break 4505 } 4506 c := v_1.AuxInt 4507 if !(isPowerOfTwo(c)) { 4508 break 4509 } 4510 v.reset(OpARM64NEG) 4511 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 4512 v0.AuxInt = log2(c) 4513 v0.AddArg(x) 4514 v.AddArg(v0) 4515 return true 4516 } 4517 // match: (MNEG (MOVDconst [c]) x) 4518 // cond: isPowerOfTwo(c) 4519 // result: (NEG (SLLconst <x.Type> [log2(c)] x)) 4520 for { 4521 _ = v.Args[1] 4522 v_0 := v.Args[0] 4523 if v_0.Op != OpARM64MOVDconst { 4524 break 4525 } 4526 c := v_0.AuxInt 4527 x := v.Args[1] 4528 if !(isPowerOfTwo(c)) { 4529 break 4530 } 4531 v.reset(OpARM64NEG) 4532 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 4533 v0.AuxInt = log2(c) 4534 v0.AddArg(x) 4535 v.AddArg(v0) 4536 return true 4537 } 4538 // match: (MNEG x (MOVDconst [c])) 4539 // cond: isPowerOfTwo(c-1) && c >= 3 4540 // result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)])) 4541 for { 4542 _ = v.Args[1] 4543 x := v.Args[0] 4544 v_1 := v.Args[1] 4545 if v_1.Op != OpARM64MOVDconst { 4546 break 4547 } 4548 c := v_1.AuxInt 4549 if !(isPowerOfTwo(c-1) && c >= 3) { 4550 break 4551 } 4552 v.reset(OpARM64NEG) 4553 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 4554 v0.AuxInt = log2(c - 1) 4555 v0.AddArg(x) 4556 v0.AddArg(x) 4557 v.AddArg(v0) 4558 return true 4559 } 4560 // match: (MNEG (MOVDconst [c]) x) 4561 // cond: isPowerOfTwo(c-1) && c >= 3 4562 // result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)])) 4563 for { 4564 _ = v.Args[1] 4565 v_0 := v.Args[0] 4566 if v_0.Op != OpARM64MOVDconst { 4567 break 4568 } 4569 c := v_0.AuxInt 4570 x := v.Args[1] 4571 if !(isPowerOfTwo(c-1) && c >= 3) { 4572 break 4573 } 4574 v.reset(OpARM64NEG) 4575 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 4576 v0.AuxInt = log2(c - 1) 4577 v0.AddArg(x) 4578 v0.AddArg(x) 4579 v.AddArg(v0) 4580 return true 4581 } 4582 return false 4583 } 4584 func rewriteValueARM64_OpARM64MNEG_10(v *Value) bool { 4585 b := v.Block 4586 _ = b 4587 // match: (MNEG x (MOVDconst [c])) 4588 // cond: isPowerOfTwo(c+1) && c >= 7 4589 // result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)])) 4590 for { 4591 _ = v.Args[1] 4592 x := v.Args[0] 4593 v_1 := v.Args[1] 4594 if v_1.Op != OpARM64MOVDconst { 4595 break 4596 } 4597 c := v_1.AuxInt 4598 if !(isPowerOfTwo(c+1) && c >= 7) { 4599 break 4600 } 4601 v.reset(OpARM64NEG) 4602 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 4603 v0.AuxInt = log2(c + 1) 4604 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 4605 v1.AddArg(x) 4606 v0.AddArg(v1) 4607 v0.AddArg(x) 4608 v.AddArg(v0) 4609 return true 4610 } 4611 // match: (MNEG (MOVDconst [c]) x) 4612 // cond: isPowerOfTwo(c+1) && c >= 7 4613 // result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)])) 4614 for { 4615 _ = v.Args[1] 4616 v_0 := v.Args[0] 4617 if v_0.Op != OpARM64MOVDconst { 4618 break 4619 } 4620 c := v_0.AuxInt 4621 x := v.Args[1] 4622 if !(isPowerOfTwo(c+1) && c >= 7) { 4623 break 4624 } 4625 v.reset(OpARM64NEG) 4626 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 4627 v0.AuxInt = log2(c + 1) 4628 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 4629 v1.AddArg(x) 4630 v0.AddArg(v1) 4631 v0.AddArg(x) 4632 v.AddArg(v0) 4633 return true 4634 } 4635 // match: (MNEG x (MOVDconst [c])) 4636 // cond: c%3 == 0 && isPowerOfTwo(c/3) 4637 // result: (NEG (SLLconst <x.Type> [log2(c/3)] (ADDshiftLL <x.Type> x x [1]))) 4638 for { 4639 _ = v.Args[1] 4640 x := v.Args[0] 4641 v_1 := v.Args[1] 4642 if v_1.Op != OpARM64MOVDconst { 4643 break 4644 } 4645 c := v_1.AuxInt 4646 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 4647 break 4648 } 4649 v.reset(OpARM64NEG) 4650 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 4651 v0.AuxInt = log2(c / 3) 4652 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 4653 v1.AuxInt = 1 4654 v1.AddArg(x) 4655 v1.AddArg(x) 4656 v0.AddArg(v1) 4657 v.AddArg(v0) 4658 return true 4659 } 4660 // match: (MNEG (MOVDconst [c]) x) 4661 // cond: c%3 == 0 && isPowerOfTwo(c/3) 4662 // result: (NEG (SLLconst <x.Type> [log2(c/3)] (ADDshiftLL <x.Type> x x [1]))) 4663 for { 4664 _ = v.Args[1] 4665 v_0 := v.Args[0] 4666 if v_0.Op != OpARM64MOVDconst { 4667 break 4668 } 4669 c := v_0.AuxInt 4670 x := v.Args[1] 4671 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 4672 break 4673 } 4674 v.reset(OpARM64NEG) 4675 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 4676 v0.AuxInt = log2(c / 3) 4677 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 4678 v1.AuxInt = 1 4679 v1.AddArg(x) 4680 v1.AddArg(x) 4681 v0.AddArg(v1) 4682 v.AddArg(v0) 4683 return true 4684 } 4685 // match: (MNEG x (MOVDconst [c])) 4686 // cond: c%5 == 0 && isPowerOfTwo(c/5) 4687 // result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))) 4688 for { 4689 _ = v.Args[1] 4690 x := v.Args[0] 4691 v_1 := v.Args[1] 4692 if v_1.Op != OpARM64MOVDconst { 4693 break 4694 } 4695 c := v_1.AuxInt 4696 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 4697 break 4698 } 4699 v.reset(OpARM64NEG) 4700 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 4701 v0.AuxInt = log2(c / 5) 4702 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 4703 v1.AuxInt = 2 4704 v1.AddArg(x) 4705 v1.AddArg(x) 4706 v0.AddArg(v1) 4707 v.AddArg(v0) 4708 return true 4709 } 4710 // match: (MNEG (MOVDconst [c]) x) 4711 // cond: c%5 == 0 && isPowerOfTwo(c/5) 4712 // result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))) 4713 for { 4714 _ = v.Args[1] 4715 v_0 := v.Args[0] 4716 if v_0.Op != OpARM64MOVDconst { 4717 break 4718 } 4719 c := v_0.AuxInt 4720 x := v.Args[1] 4721 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 4722 break 4723 } 4724 v.reset(OpARM64NEG) 4725 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 4726 v0.AuxInt = log2(c / 5) 4727 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 4728 v1.AuxInt = 2 4729 v1.AddArg(x) 4730 v1.AddArg(x) 4731 v0.AddArg(v1) 4732 v.AddArg(v0) 4733 return true 4734 } 4735 // match: (MNEG x (MOVDconst [c])) 4736 // cond: c%7 == 0 && isPowerOfTwo(c/7) 4737 // result: (NEG (SLLconst <x.Type> [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3]))) 4738 for { 4739 _ = v.Args[1] 4740 x := v.Args[0] 4741 v_1 := v.Args[1] 4742 if v_1.Op != OpARM64MOVDconst { 4743 break 4744 } 4745 c := v_1.AuxInt 4746 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 4747 break 4748 } 4749 v.reset(OpARM64NEG) 4750 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 4751 v0.AuxInt = log2(c / 7) 4752 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 4753 v1.AuxInt = 3 4754 v2 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 4755 v2.AddArg(x) 4756 v1.AddArg(v2) 4757 v1.AddArg(x) 4758 v0.AddArg(v1) 4759 v.AddArg(v0) 4760 return true 4761 } 4762 // match: (MNEG (MOVDconst [c]) x) 4763 // cond: c%7 == 0 && isPowerOfTwo(c/7) 4764 // result: (NEG (SLLconst <x.Type> [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3]))) 4765 for { 4766 _ = v.Args[1] 4767 v_0 := v.Args[0] 4768 if v_0.Op != OpARM64MOVDconst { 4769 break 4770 } 4771 c := v_0.AuxInt 4772 x := v.Args[1] 4773 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 4774 break 4775 } 4776 v.reset(OpARM64NEG) 4777 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 4778 v0.AuxInt = log2(c / 7) 4779 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 4780 v1.AuxInt = 3 4781 v2 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 4782 v2.AddArg(x) 4783 v1.AddArg(v2) 4784 v1.AddArg(x) 4785 v0.AddArg(v1) 4786 v.AddArg(v0) 4787 return true 4788 } 4789 // match: (MNEG x (MOVDconst [c])) 4790 // cond: c%9 == 0 && isPowerOfTwo(c/9) 4791 // result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))) 4792 for { 4793 _ = v.Args[1] 4794 x := v.Args[0] 4795 v_1 := v.Args[1] 4796 if v_1.Op != OpARM64MOVDconst { 4797 break 4798 } 4799 c := v_1.AuxInt 4800 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 4801 break 4802 } 4803 v.reset(OpARM64NEG) 4804 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 4805 v0.AuxInt = log2(c / 9) 4806 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 4807 v1.AuxInt = 3 4808 v1.AddArg(x) 4809 v1.AddArg(x) 4810 v0.AddArg(v1) 4811 v.AddArg(v0) 4812 return true 4813 } 4814 // match: (MNEG (MOVDconst [c]) x) 4815 // cond: c%9 == 0 && isPowerOfTwo(c/9) 4816 // result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))) 4817 for { 4818 _ = v.Args[1] 4819 v_0 := v.Args[0] 4820 if v_0.Op != OpARM64MOVDconst { 4821 break 4822 } 4823 c := v_0.AuxInt 4824 x := v.Args[1] 4825 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 4826 break 4827 } 4828 v.reset(OpARM64NEG) 4829 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 4830 v0.AuxInt = log2(c / 9) 4831 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 4832 v1.AuxInt = 3 4833 v1.AddArg(x) 4834 v1.AddArg(x) 4835 v0.AddArg(v1) 4836 v.AddArg(v0) 4837 return true 4838 } 4839 return false 4840 } 4841 func rewriteValueARM64_OpARM64MNEG_20(v *Value) bool { 4842 // match: (MNEG (MOVDconst [c]) (MOVDconst [d])) 4843 // cond: 4844 // result: (MOVDconst [-c*d]) 4845 for { 4846 _ = v.Args[1] 4847 v_0 := v.Args[0] 4848 if v_0.Op != OpARM64MOVDconst { 4849 break 4850 } 4851 c := v_0.AuxInt 4852 v_1 := v.Args[1] 4853 if v_1.Op != OpARM64MOVDconst { 4854 break 4855 } 4856 d := v_1.AuxInt 4857 v.reset(OpARM64MOVDconst) 4858 v.AuxInt = -c * d 4859 return true 4860 } 4861 // match: (MNEG (MOVDconst [d]) (MOVDconst [c])) 4862 // cond: 4863 // result: (MOVDconst [-c*d]) 4864 for { 4865 _ = v.Args[1] 4866 v_0 := v.Args[0] 4867 if v_0.Op != OpARM64MOVDconst { 4868 break 4869 } 4870 d := v_0.AuxInt 4871 v_1 := v.Args[1] 4872 if v_1.Op != OpARM64MOVDconst { 4873 break 4874 } 4875 c := v_1.AuxInt 4876 v.reset(OpARM64MOVDconst) 4877 v.AuxInt = -c * d 4878 return true 4879 } 4880 return false 4881 } 4882 func rewriteValueARM64_OpARM64MNEGW_0(v *Value) bool { 4883 b := v.Block 4884 _ = b 4885 // match: (MNEGW x (MOVDconst [c])) 4886 // cond: int32(c)==-1 4887 // result: x 4888 for { 4889 _ = v.Args[1] 4890 x := v.Args[0] 4891 v_1 := v.Args[1] 4892 if v_1.Op != OpARM64MOVDconst { 4893 break 4894 } 4895 c := v_1.AuxInt 4896 if !(int32(c) == -1) { 4897 break 4898 } 4899 v.reset(OpCopy) 4900 v.Type = x.Type 4901 v.AddArg(x) 4902 return true 4903 } 4904 // match: (MNEGW (MOVDconst [c]) x) 4905 // cond: int32(c)==-1 4906 // result: x 4907 for { 4908 _ = v.Args[1] 4909 v_0 := v.Args[0] 4910 if v_0.Op != OpARM64MOVDconst { 4911 break 4912 } 4913 c := v_0.AuxInt 4914 x := v.Args[1] 4915 if !(int32(c) == -1) { 4916 break 4917 } 4918 v.reset(OpCopy) 4919 v.Type = x.Type 4920 v.AddArg(x) 4921 return true 4922 } 4923 // match: (MNEGW _ (MOVDconst [c])) 4924 // cond: int32(c)==0 4925 // result: (MOVDconst [0]) 4926 for { 4927 _ = v.Args[1] 4928 v_1 := v.Args[1] 4929 if v_1.Op != OpARM64MOVDconst { 4930 break 4931 } 4932 c := v_1.AuxInt 4933 if !(int32(c) == 0) { 4934 break 4935 } 4936 v.reset(OpARM64MOVDconst) 4937 v.AuxInt = 0 4938 return true 4939 } 4940 // match: (MNEGW (MOVDconst [c]) _) 4941 // cond: int32(c)==0 4942 // result: (MOVDconst [0]) 4943 for { 4944 _ = v.Args[1] 4945 v_0 := v.Args[0] 4946 if v_0.Op != OpARM64MOVDconst { 4947 break 4948 } 4949 c := v_0.AuxInt 4950 if !(int32(c) == 0) { 4951 break 4952 } 4953 v.reset(OpARM64MOVDconst) 4954 v.AuxInt = 0 4955 return true 4956 } 4957 // match: (MNEGW x (MOVDconst [c])) 4958 // cond: int32(c)==1 4959 // result: (NEG x) 4960 for { 4961 _ = v.Args[1] 4962 x := v.Args[0] 4963 v_1 := v.Args[1] 4964 if v_1.Op != OpARM64MOVDconst { 4965 break 4966 } 4967 c := v_1.AuxInt 4968 if !(int32(c) == 1) { 4969 break 4970 } 4971 v.reset(OpARM64NEG) 4972 v.AddArg(x) 4973 return true 4974 } 4975 // match: (MNEGW (MOVDconst [c]) x) 4976 // cond: int32(c)==1 4977 // result: (NEG x) 4978 for { 4979 _ = v.Args[1] 4980 v_0 := v.Args[0] 4981 if v_0.Op != OpARM64MOVDconst { 4982 break 4983 } 4984 c := v_0.AuxInt 4985 x := v.Args[1] 4986 if !(int32(c) == 1) { 4987 break 4988 } 4989 v.reset(OpARM64NEG) 4990 v.AddArg(x) 4991 return true 4992 } 4993 // match: (MNEGW x (MOVDconst [c])) 4994 // cond: isPowerOfTwo(c) 4995 // result: (NEG (SLLconst <x.Type> [log2(c)] x)) 4996 for { 4997 _ = v.Args[1] 4998 x := v.Args[0] 4999 v_1 := v.Args[1] 5000 if v_1.Op != OpARM64MOVDconst { 5001 break 5002 } 5003 c := v_1.AuxInt 5004 if !(isPowerOfTwo(c)) { 5005 break 5006 } 5007 v.reset(OpARM64NEG) 5008 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 5009 v0.AuxInt = log2(c) 5010 v0.AddArg(x) 5011 v.AddArg(v0) 5012 return true 5013 } 5014 // match: (MNEGW (MOVDconst [c]) x) 5015 // cond: isPowerOfTwo(c) 5016 // result: (NEG (SLLconst <x.Type> [log2(c)] x)) 5017 for { 5018 _ = v.Args[1] 5019 v_0 := v.Args[0] 5020 if v_0.Op != OpARM64MOVDconst { 5021 break 5022 } 5023 c := v_0.AuxInt 5024 x := v.Args[1] 5025 if !(isPowerOfTwo(c)) { 5026 break 5027 } 5028 v.reset(OpARM64NEG) 5029 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 5030 v0.AuxInt = log2(c) 5031 v0.AddArg(x) 5032 v.AddArg(v0) 5033 return true 5034 } 5035 // match: (MNEGW x (MOVDconst [c])) 5036 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 5037 // result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)])) 5038 for { 5039 _ = v.Args[1] 5040 x := v.Args[0] 5041 v_1 := v.Args[1] 5042 if v_1.Op != OpARM64MOVDconst { 5043 break 5044 } 5045 c := v_1.AuxInt 5046 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 5047 break 5048 } 5049 v.reset(OpARM64NEG) 5050 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5051 v0.AuxInt = log2(c - 1) 5052 v0.AddArg(x) 5053 v0.AddArg(x) 5054 v.AddArg(v0) 5055 return true 5056 } 5057 // match: (MNEGW (MOVDconst [c]) x) 5058 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 5059 // result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)])) 5060 for { 5061 _ = v.Args[1] 5062 v_0 := v.Args[0] 5063 if v_0.Op != OpARM64MOVDconst { 5064 break 5065 } 5066 c := v_0.AuxInt 5067 x := v.Args[1] 5068 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 5069 break 5070 } 5071 v.reset(OpARM64NEG) 5072 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5073 v0.AuxInt = log2(c - 1) 5074 v0.AddArg(x) 5075 v0.AddArg(x) 5076 v.AddArg(v0) 5077 return true 5078 } 5079 return false 5080 } 5081 func rewriteValueARM64_OpARM64MNEGW_10(v *Value) bool { 5082 b := v.Block 5083 _ = b 5084 // match: (MNEGW x (MOVDconst [c])) 5085 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 5086 // result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)])) 5087 for { 5088 _ = v.Args[1] 5089 x := v.Args[0] 5090 v_1 := v.Args[1] 5091 if v_1.Op != OpARM64MOVDconst { 5092 break 5093 } 5094 c := v_1.AuxInt 5095 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 5096 break 5097 } 5098 v.reset(OpARM64NEG) 5099 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5100 v0.AuxInt = log2(c + 1) 5101 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5102 v1.AddArg(x) 5103 v0.AddArg(v1) 5104 v0.AddArg(x) 5105 v.AddArg(v0) 5106 return true 5107 } 5108 // match: (MNEGW (MOVDconst [c]) x) 5109 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 5110 // result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)])) 5111 for { 5112 _ = v.Args[1] 5113 v_0 := v.Args[0] 5114 if v_0.Op != OpARM64MOVDconst { 5115 break 5116 } 5117 c := v_0.AuxInt 5118 x := v.Args[1] 5119 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 5120 break 5121 } 5122 v.reset(OpARM64NEG) 5123 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5124 v0.AuxInt = log2(c + 1) 5125 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5126 v1.AddArg(x) 5127 v0.AddArg(v1) 5128 v0.AddArg(x) 5129 v.AddArg(v0) 5130 return true 5131 } 5132 // match: (MNEGW x (MOVDconst [c])) 5133 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 5134 // result: (NEG (SLLconst <x.Type> [log2(c/3)] (ADDshiftLL <x.Type> x x [1]))) 5135 for { 5136 _ = v.Args[1] 5137 x := v.Args[0] 5138 v_1 := v.Args[1] 5139 if v_1.Op != OpARM64MOVDconst { 5140 break 5141 } 5142 c := v_1.AuxInt 5143 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 5144 break 5145 } 5146 v.reset(OpARM64NEG) 5147 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 5148 v0.AuxInt = log2(c / 3) 5149 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5150 v1.AuxInt = 1 5151 v1.AddArg(x) 5152 v1.AddArg(x) 5153 v0.AddArg(v1) 5154 v.AddArg(v0) 5155 return true 5156 } 5157 // match: (MNEGW (MOVDconst [c]) x) 5158 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 5159 // result: (NEG (SLLconst <x.Type> [log2(c/3)] (ADDshiftLL <x.Type> x x [1]))) 5160 for { 5161 _ = v.Args[1] 5162 v_0 := v.Args[0] 5163 if v_0.Op != OpARM64MOVDconst { 5164 break 5165 } 5166 c := v_0.AuxInt 5167 x := v.Args[1] 5168 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 5169 break 5170 } 5171 v.reset(OpARM64NEG) 5172 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 5173 v0.AuxInt = log2(c / 3) 5174 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5175 v1.AuxInt = 1 5176 v1.AddArg(x) 5177 v1.AddArg(x) 5178 v0.AddArg(v1) 5179 v.AddArg(v0) 5180 return true 5181 } 5182 // match: (MNEGW x (MOVDconst [c])) 5183 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 5184 // result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))) 5185 for { 5186 _ = v.Args[1] 5187 x := v.Args[0] 5188 v_1 := v.Args[1] 5189 if v_1.Op != OpARM64MOVDconst { 5190 break 5191 } 5192 c := v_1.AuxInt 5193 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 5194 break 5195 } 5196 v.reset(OpARM64NEG) 5197 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 5198 v0.AuxInt = log2(c / 5) 5199 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5200 v1.AuxInt = 2 5201 v1.AddArg(x) 5202 v1.AddArg(x) 5203 v0.AddArg(v1) 5204 v.AddArg(v0) 5205 return true 5206 } 5207 // match: (MNEGW (MOVDconst [c]) x) 5208 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 5209 // result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))) 5210 for { 5211 _ = v.Args[1] 5212 v_0 := v.Args[0] 5213 if v_0.Op != OpARM64MOVDconst { 5214 break 5215 } 5216 c := v_0.AuxInt 5217 x := v.Args[1] 5218 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 5219 break 5220 } 5221 v.reset(OpARM64NEG) 5222 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 5223 v0.AuxInt = log2(c / 5) 5224 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5225 v1.AuxInt = 2 5226 v1.AddArg(x) 5227 v1.AddArg(x) 5228 v0.AddArg(v1) 5229 v.AddArg(v0) 5230 return true 5231 } 5232 // match: (MNEGW x (MOVDconst [c])) 5233 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 5234 // result: (NEG (SLLconst <x.Type> [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3]))) 5235 for { 5236 _ = v.Args[1] 5237 x := v.Args[0] 5238 v_1 := v.Args[1] 5239 if v_1.Op != OpARM64MOVDconst { 5240 break 5241 } 5242 c := v_1.AuxInt 5243 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 5244 break 5245 } 5246 v.reset(OpARM64NEG) 5247 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 5248 v0.AuxInt = log2(c / 7) 5249 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5250 v1.AuxInt = 3 5251 v2 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5252 v2.AddArg(x) 5253 v1.AddArg(v2) 5254 v1.AddArg(x) 5255 v0.AddArg(v1) 5256 v.AddArg(v0) 5257 return true 5258 } 5259 // match: (MNEGW (MOVDconst [c]) x) 5260 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 5261 // result: (NEG (SLLconst <x.Type> [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3]))) 5262 for { 5263 _ = v.Args[1] 5264 v_0 := v.Args[0] 5265 if v_0.Op != OpARM64MOVDconst { 5266 break 5267 } 5268 c := v_0.AuxInt 5269 x := v.Args[1] 5270 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 5271 break 5272 } 5273 v.reset(OpARM64NEG) 5274 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 5275 v0.AuxInt = log2(c / 7) 5276 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5277 v1.AuxInt = 3 5278 v2 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5279 v2.AddArg(x) 5280 v1.AddArg(v2) 5281 v1.AddArg(x) 5282 v0.AddArg(v1) 5283 v.AddArg(v0) 5284 return true 5285 } 5286 // match: (MNEGW x (MOVDconst [c])) 5287 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 5288 // result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))) 5289 for { 5290 _ = v.Args[1] 5291 x := v.Args[0] 5292 v_1 := v.Args[1] 5293 if v_1.Op != OpARM64MOVDconst { 5294 break 5295 } 5296 c := v_1.AuxInt 5297 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 5298 break 5299 } 5300 v.reset(OpARM64NEG) 5301 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 5302 v0.AuxInt = log2(c / 9) 5303 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5304 v1.AuxInt = 3 5305 v1.AddArg(x) 5306 v1.AddArg(x) 5307 v0.AddArg(v1) 5308 v.AddArg(v0) 5309 return true 5310 } 5311 // match: (MNEGW (MOVDconst [c]) x) 5312 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 5313 // result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))) 5314 for { 5315 _ = v.Args[1] 5316 v_0 := v.Args[0] 5317 if v_0.Op != OpARM64MOVDconst { 5318 break 5319 } 5320 c := v_0.AuxInt 5321 x := v.Args[1] 5322 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 5323 break 5324 } 5325 v.reset(OpARM64NEG) 5326 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 5327 v0.AuxInt = log2(c / 9) 5328 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5329 v1.AuxInt = 3 5330 v1.AddArg(x) 5331 v1.AddArg(x) 5332 v0.AddArg(v1) 5333 v.AddArg(v0) 5334 return true 5335 } 5336 return false 5337 } 5338 func rewriteValueARM64_OpARM64MNEGW_20(v *Value) bool { 5339 // match: (MNEGW (MOVDconst [c]) (MOVDconst [d])) 5340 // cond: 5341 // result: (MOVDconst [-int64(int32(c)*int32(d))]) 5342 for { 5343 _ = v.Args[1] 5344 v_0 := v.Args[0] 5345 if v_0.Op != OpARM64MOVDconst { 5346 break 5347 } 5348 c := v_0.AuxInt 5349 v_1 := v.Args[1] 5350 if v_1.Op != OpARM64MOVDconst { 5351 break 5352 } 5353 d := v_1.AuxInt 5354 v.reset(OpARM64MOVDconst) 5355 v.AuxInt = -int64(int32(c) * int32(d)) 5356 return true 5357 } 5358 // match: (MNEGW (MOVDconst [d]) (MOVDconst [c])) 5359 // cond: 5360 // result: (MOVDconst [-int64(int32(c)*int32(d))]) 5361 for { 5362 _ = v.Args[1] 5363 v_0 := v.Args[0] 5364 if v_0.Op != OpARM64MOVDconst { 5365 break 5366 } 5367 d := v_0.AuxInt 5368 v_1 := v.Args[1] 5369 if v_1.Op != OpARM64MOVDconst { 5370 break 5371 } 5372 c := v_1.AuxInt 5373 v.reset(OpARM64MOVDconst) 5374 v.AuxInt = -int64(int32(c) * int32(d)) 5375 return true 5376 } 5377 return false 5378 } 5379 func rewriteValueARM64_OpARM64MOD_0(v *Value) bool { 5380 // match: (MOD (MOVDconst [c]) (MOVDconst [d])) 5381 // cond: 5382 // result: (MOVDconst [int64(c)%int64(d)]) 5383 for { 5384 _ = v.Args[1] 5385 v_0 := v.Args[0] 5386 if v_0.Op != OpARM64MOVDconst { 5387 break 5388 } 5389 c := v_0.AuxInt 5390 v_1 := v.Args[1] 5391 if v_1.Op != OpARM64MOVDconst { 5392 break 5393 } 5394 d := v_1.AuxInt 5395 v.reset(OpARM64MOVDconst) 5396 v.AuxInt = int64(c) % int64(d) 5397 return true 5398 } 5399 return false 5400 } 5401 func rewriteValueARM64_OpARM64MODW_0(v *Value) bool { 5402 // match: (MODW (MOVDconst [c]) (MOVDconst [d])) 5403 // cond: 5404 // result: (MOVDconst [int64(int32(c)%int32(d))]) 5405 for { 5406 _ = v.Args[1] 5407 v_0 := v.Args[0] 5408 if v_0.Op != OpARM64MOVDconst { 5409 break 5410 } 5411 c := v_0.AuxInt 5412 v_1 := v.Args[1] 5413 if v_1.Op != OpARM64MOVDconst { 5414 break 5415 } 5416 d := v_1.AuxInt 5417 v.reset(OpARM64MOVDconst) 5418 v.AuxInt = int64(int32(c) % int32(d)) 5419 return true 5420 } 5421 return false 5422 } 5423 func rewriteValueARM64_OpARM64MOVBUload_0(v *Value) bool { 5424 b := v.Block 5425 _ = b 5426 config := b.Func.Config 5427 _ = config 5428 // match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem) 5429 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5430 // result: (MOVBUload [off1+off2] {sym} ptr mem) 5431 for { 5432 off1 := v.AuxInt 5433 sym := v.Aux 5434 _ = v.Args[1] 5435 v_0 := v.Args[0] 5436 if v_0.Op != OpARM64ADDconst { 5437 break 5438 } 5439 off2 := v_0.AuxInt 5440 ptr := v_0.Args[0] 5441 mem := v.Args[1] 5442 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5443 break 5444 } 5445 v.reset(OpARM64MOVBUload) 5446 v.AuxInt = off1 + off2 5447 v.Aux = sym 5448 v.AddArg(ptr) 5449 v.AddArg(mem) 5450 return true 5451 } 5452 // match: (MOVBUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5453 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5454 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5455 for { 5456 off1 := v.AuxInt 5457 sym1 := v.Aux 5458 _ = v.Args[1] 5459 v_0 := v.Args[0] 5460 if v_0.Op != OpARM64MOVDaddr { 5461 break 5462 } 5463 off2 := v_0.AuxInt 5464 sym2 := v_0.Aux 5465 ptr := v_0.Args[0] 5466 mem := v.Args[1] 5467 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5468 break 5469 } 5470 v.reset(OpARM64MOVBUload) 5471 v.AuxInt = off1 + off2 5472 v.Aux = mergeSym(sym1, sym2) 5473 v.AddArg(ptr) 5474 v.AddArg(mem) 5475 return true 5476 } 5477 // match: (MOVBUload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 5478 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 5479 // result: (MOVDconst [0]) 5480 for { 5481 off := v.AuxInt 5482 sym := v.Aux 5483 _ = v.Args[1] 5484 ptr := v.Args[0] 5485 v_1 := v.Args[1] 5486 if v_1.Op != OpARM64MOVBstorezero { 5487 break 5488 } 5489 off2 := v_1.AuxInt 5490 sym2 := v_1.Aux 5491 _ = v_1.Args[1] 5492 ptr2 := v_1.Args[0] 5493 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 5494 break 5495 } 5496 v.reset(OpARM64MOVDconst) 5497 v.AuxInt = 0 5498 return true 5499 } 5500 return false 5501 } 5502 func rewriteValueARM64_OpARM64MOVBUreg_0(v *Value) bool { 5503 // match: (MOVBUreg x:(MOVBUload _ _)) 5504 // cond: 5505 // result: (MOVDreg x) 5506 for { 5507 x := v.Args[0] 5508 if x.Op != OpARM64MOVBUload { 5509 break 5510 } 5511 _ = x.Args[1] 5512 v.reset(OpARM64MOVDreg) 5513 v.AddArg(x) 5514 return true 5515 } 5516 // match: (MOVBUreg x:(MOVBUreg _)) 5517 // cond: 5518 // result: (MOVDreg x) 5519 for { 5520 x := v.Args[0] 5521 if x.Op != OpARM64MOVBUreg { 5522 break 5523 } 5524 v.reset(OpARM64MOVDreg) 5525 v.AddArg(x) 5526 return true 5527 } 5528 // match: (MOVBUreg (MOVDconst [c])) 5529 // cond: 5530 // result: (MOVDconst [int64(uint8(c))]) 5531 for { 5532 v_0 := v.Args[0] 5533 if v_0.Op != OpARM64MOVDconst { 5534 break 5535 } 5536 c := v_0.AuxInt 5537 v.reset(OpARM64MOVDconst) 5538 v.AuxInt = int64(uint8(c)) 5539 return true 5540 } 5541 // match: (MOVBUreg x) 5542 // cond: x.Type.IsBoolean() 5543 // result: (MOVDreg x) 5544 for { 5545 x := v.Args[0] 5546 if !(x.Type.IsBoolean()) { 5547 break 5548 } 5549 v.reset(OpARM64MOVDreg) 5550 v.AddArg(x) 5551 return true 5552 } 5553 return false 5554 } 5555 func rewriteValueARM64_OpARM64MOVBload_0(v *Value) bool { 5556 b := v.Block 5557 _ = b 5558 config := b.Func.Config 5559 _ = config 5560 // match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem) 5561 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5562 // result: (MOVBload [off1+off2] {sym} ptr mem) 5563 for { 5564 off1 := v.AuxInt 5565 sym := v.Aux 5566 _ = v.Args[1] 5567 v_0 := v.Args[0] 5568 if v_0.Op != OpARM64ADDconst { 5569 break 5570 } 5571 off2 := v_0.AuxInt 5572 ptr := v_0.Args[0] 5573 mem := v.Args[1] 5574 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5575 break 5576 } 5577 v.reset(OpARM64MOVBload) 5578 v.AuxInt = off1 + off2 5579 v.Aux = sym 5580 v.AddArg(ptr) 5581 v.AddArg(mem) 5582 return true 5583 } 5584 // match: (MOVBload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5585 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5586 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5587 for { 5588 off1 := v.AuxInt 5589 sym1 := v.Aux 5590 _ = v.Args[1] 5591 v_0 := v.Args[0] 5592 if v_0.Op != OpARM64MOVDaddr { 5593 break 5594 } 5595 off2 := v_0.AuxInt 5596 sym2 := v_0.Aux 5597 ptr := v_0.Args[0] 5598 mem := v.Args[1] 5599 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5600 break 5601 } 5602 v.reset(OpARM64MOVBload) 5603 v.AuxInt = off1 + off2 5604 v.Aux = mergeSym(sym1, sym2) 5605 v.AddArg(ptr) 5606 v.AddArg(mem) 5607 return true 5608 } 5609 // match: (MOVBload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 5610 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 5611 // result: (MOVDconst [0]) 5612 for { 5613 off := v.AuxInt 5614 sym := v.Aux 5615 _ = v.Args[1] 5616 ptr := v.Args[0] 5617 v_1 := v.Args[1] 5618 if v_1.Op != OpARM64MOVBstorezero { 5619 break 5620 } 5621 off2 := v_1.AuxInt 5622 sym2 := v_1.Aux 5623 _ = v_1.Args[1] 5624 ptr2 := v_1.Args[0] 5625 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 5626 break 5627 } 5628 v.reset(OpARM64MOVDconst) 5629 v.AuxInt = 0 5630 return true 5631 } 5632 return false 5633 } 5634 func rewriteValueARM64_OpARM64MOVBreg_0(v *Value) bool { 5635 // match: (MOVBreg x:(MOVBload _ _)) 5636 // cond: 5637 // result: (MOVDreg x) 5638 for { 5639 x := v.Args[0] 5640 if x.Op != OpARM64MOVBload { 5641 break 5642 } 5643 _ = x.Args[1] 5644 v.reset(OpARM64MOVDreg) 5645 v.AddArg(x) 5646 return true 5647 } 5648 // match: (MOVBreg x:(MOVBreg _)) 5649 // cond: 5650 // result: (MOVDreg x) 5651 for { 5652 x := v.Args[0] 5653 if x.Op != OpARM64MOVBreg { 5654 break 5655 } 5656 v.reset(OpARM64MOVDreg) 5657 v.AddArg(x) 5658 return true 5659 } 5660 // match: (MOVBreg (MOVDconst [c])) 5661 // cond: 5662 // result: (MOVDconst [int64(int8(c))]) 5663 for { 5664 v_0 := v.Args[0] 5665 if v_0.Op != OpARM64MOVDconst { 5666 break 5667 } 5668 c := v_0.AuxInt 5669 v.reset(OpARM64MOVDconst) 5670 v.AuxInt = int64(int8(c)) 5671 return true 5672 } 5673 return false 5674 } 5675 func rewriteValueARM64_OpARM64MOVBstore_0(v *Value) bool { 5676 b := v.Block 5677 _ = b 5678 config := b.Func.Config 5679 _ = config 5680 // match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem) 5681 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5682 // result: (MOVBstore [off1+off2] {sym} ptr val mem) 5683 for { 5684 off1 := v.AuxInt 5685 sym := v.Aux 5686 _ = v.Args[2] 5687 v_0 := v.Args[0] 5688 if v_0.Op != OpARM64ADDconst { 5689 break 5690 } 5691 off2 := v_0.AuxInt 5692 ptr := v_0.Args[0] 5693 val := v.Args[1] 5694 mem := v.Args[2] 5695 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5696 break 5697 } 5698 v.reset(OpARM64MOVBstore) 5699 v.AuxInt = off1 + off2 5700 v.Aux = sym 5701 v.AddArg(ptr) 5702 v.AddArg(val) 5703 v.AddArg(mem) 5704 return true 5705 } 5706 // match: (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 5707 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5708 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 5709 for { 5710 off1 := v.AuxInt 5711 sym1 := v.Aux 5712 _ = v.Args[2] 5713 v_0 := v.Args[0] 5714 if v_0.Op != OpARM64MOVDaddr { 5715 break 5716 } 5717 off2 := v_0.AuxInt 5718 sym2 := v_0.Aux 5719 ptr := v_0.Args[0] 5720 val := v.Args[1] 5721 mem := v.Args[2] 5722 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5723 break 5724 } 5725 v.reset(OpARM64MOVBstore) 5726 v.AuxInt = off1 + off2 5727 v.Aux = mergeSym(sym1, sym2) 5728 v.AddArg(ptr) 5729 v.AddArg(val) 5730 v.AddArg(mem) 5731 return true 5732 } 5733 // match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem) 5734 // cond: 5735 // result: (MOVBstorezero [off] {sym} ptr mem) 5736 for { 5737 off := v.AuxInt 5738 sym := v.Aux 5739 _ = v.Args[2] 5740 ptr := v.Args[0] 5741 v_1 := v.Args[1] 5742 if v_1.Op != OpARM64MOVDconst { 5743 break 5744 } 5745 if v_1.AuxInt != 0 { 5746 break 5747 } 5748 mem := v.Args[2] 5749 v.reset(OpARM64MOVBstorezero) 5750 v.AuxInt = off 5751 v.Aux = sym 5752 v.AddArg(ptr) 5753 v.AddArg(mem) 5754 return true 5755 } 5756 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem) 5757 // cond: 5758 // result: (MOVBstore [off] {sym} ptr x mem) 5759 for { 5760 off := v.AuxInt 5761 sym := v.Aux 5762 _ = v.Args[2] 5763 ptr := v.Args[0] 5764 v_1 := v.Args[1] 5765 if v_1.Op != OpARM64MOVBreg { 5766 break 5767 } 5768 x := v_1.Args[0] 5769 mem := v.Args[2] 5770 v.reset(OpARM64MOVBstore) 5771 v.AuxInt = off 5772 v.Aux = sym 5773 v.AddArg(ptr) 5774 v.AddArg(x) 5775 v.AddArg(mem) 5776 return true 5777 } 5778 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem) 5779 // cond: 5780 // result: (MOVBstore [off] {sym} ptr x mem) 5781 for { 5782 off := v.AuxInt 5783 sym := v.Aux 5784 _ = v.Args[2] 5785 ptr := v.Args[0] 5786 v_1 := v.Args[1] 5787 if v_1.Op != OpARM64MOVBUreg { 5788 break 5789 } 5790 x := v_1.Args[0] 5791 mem := v.Args[2] 5792 v.reset(OpARM64MOVBstore) 5793 v.AuxInt = off 5794 v.Aux = sym 5795 v.AddArg(ptr) 5796 v.AddArg(x) 5797 v.AddArg(mem) 5798 return true 5799 } 5800 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem) 5801 // cond: 5802 // result: (MOVBstore [off] {sym} ptr x mem) 5803 for { 5804 off := v.AuxInt 5805 sym := v.Aux 5806 _ = v.Args[2] 5807 ptr := v.Args[0] 5808 v_1 := v.Args[1] 5809 if v_1.Op != OpARM64MOVHreg { 5810 break 5811 } 5812 x := v_1.Args[0] 5813 mem := v.Args[2] 5814 v.reset(OpARM64MOVBstore) 5815 v.AuxInt = off 5816 v.Aux = sym 5817 v.AddArg(ptr) 5818 v.AddArg(x) 5819 v.AddArg(mem) 5820 return true 5821 } 5822 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem) 5823 // cond: 5824 // result: (MOVBstore [off] {sym} ptr x mem) 5825 for { 5826 off := v.AuxInt 5827 sym := v.Aux 5828 _ = v.Args[2] 5829 ptr := v.Args[0] 5830 v_1 := v.Args[1] 5831 if v_1.Op != OpARM64MOVHUreg { 5832 break 5833 } 5834 x := v_1.Args[0] 5835 mem := v.Args[2] 5836 v.reset(OpARM64MOVBstore) 5837 v.AuxInt = off 5838 v.Aux = sym 5839 v.AddArg(ptr) 5840 v.AddArg(x) 5841 v.AddArg(mem) 5842 return true 5843 } 5844 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem) 5845 // cond: 5846 // result: (MOVBstore [off] {sym} ptr x mem) 5847 for { 5848 off := v.AuxInt 5849 sym := v.Aux 5850 _ = v.Args[2] 5851 ptr := v.Args[0] 5852 v_1 := v.Args[1] 5853 if v_1.Op != OpARM64MOVWreg { 5854 break 5855 } 5856 x := v_1.Args[0] 5857 mem := v.Args[2] 5858 v.reset(OpARM64MOVBstore) 5859 v.AuxInt = off 5860 v.Aux = sym 5861 v.AddArg(ptr) 5862 v.AddArg(x) 5863 v.AddArg(mem) 5864 return true 5865 } 5866 // match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem) 5867 // cond: 5868 // result: (MOVBstore [off] {sym} ptr x mem) 5869 for { 5870 off := v.AuxInt 5871 sym := v.Aux 5872 _ = v.Args[2] 5873 ptr := v.Args[0] 5874 v_1 := v.Args[1] 5875 if v_1.Op != OpARM64MOVWUreg { 5876 break 5877 } 5878 x := v_1.Args[0] 5879 mem := v.Args[2] 5880 v.reset(OpARM64MOVBstore) 5881 v.AuxInt = off 5882 v.Aux = sym 5883 v.AddArg(ptr) 5884 v.AddArg(x) 5885 v.AddArg(mem) 5886 return true 5887 } 5888 return false 5889 } 5890 func rewriteValueARM64_OpARM64MOVBstorezero_0(v *Value) bool { 5891 b := v.Block 5892 _ = b 5893 config := b.Func.Config 5894 _ = config 5895 // match: (MOVBstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 5896 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5897 // result: (MOVBstorezero [off1+off2] {sym} ptr mem) 5898 for { 5899 off1 := v.AuxInt 5900 sym := v.Aux 5901 _ = v.Args[1] 5902 v_0 := v.Args[0] 5903 if v_0.Op != OpARM64ADDconst { 5904 break 5905 } 5906 off2 := v_0.AuxInt 5907 ptr := v_0.Args[0] 5908 mem := v.Args[1] 5909 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5910 break 5911 } 5912 v.reset(OpARM64MOVBstorezero) 5913 v.AuxInt = off1 + off2 5914 v.Aux = sym 5915 v.AddArg(ptr) 5916 v.AddArg(mem) 5917 return true 5918 } 5919 // match: (MOVBstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5920 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5921 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5922 for { 5923 off1 := v.AuxInt 5924 sym1 := v.Aux 5925 _ = v.Args[1] 5926 v_0 := v.Args[0] 5927 if v_0.Op != OpARM64MOVDaddr { 5928 break 5929 } 5930 off2 := v_0.AuxInt 5931 sym2 := v_0.Aux 5932 ptr := v_0.Args[0] 5933 mem := v.Args[1] 5934 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5935 break 5936 } 5937 v.reset(OpARM64MOVBstorezero) 5938 v.AuxInt = off1 + off2 5939 v.Aux = mergeSym(sym1, sym2) 5940 v.AddArg(ptr) 5941 v.AddArg(mem) 5942 return true 5943 } 5944 return false 5945 } 5946 func rewriteValueARM64_OpARM64MOVDload_0(v *Value) bool { 5947 b := v.Block 5948 _ = b 5949 config := b.Func.Config 5950 _ = config 5951 // match: (MOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 5952 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5953 // result: (MOVDload [off1+off2] {sym} ptr mem) 5954 for { 5955 off1 := v.AuxInt 5956 sym := v.Aux 5957 _ = v.Args[1] 5958 v_0 := v.Args[0] 5959 if v_0.Op != OpARM64ADDconst { 5960 break 5961 } 5962 off2 := v_0.AuxInt 5963 ptr := v_0.Args[0] 5964 mem := v.Args[1] 5965 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5966 break 5967 } 5968 v.reset(OpARM64MOVDload) 5969 v.AuxInt = off1 + off2 5970 v.Aux = sym 5971 v.AddArg(ptr) 5972 v.AddArg(mem) 5973 return true 5974 } 5975 // match: (MOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5976 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5977 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5978 for { 5979 off1 := v.AuxInt 5980 sym1 := v.Aux 5981 _ = v.Args[1] 5982 v_0 := v.Args[0] 5983 if v_0.Op != OpARM64MOVDaddr { 5984 break 5985 } 5986 off2 := v_0.AuxInt 5987 sym2 := v_0.Aux 5988 ptr := v_0.Args[0] 5989 mem := v.Args[1] 5990 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5991 break 5992 } 5993 v.reset(OpARM64MOVDload) 5994 v.AuxInt = off1 + off2 5995 v.Aux = mergeSym(sym1, sym2) 5996 v.AddArg(ptr) 5997 v.AddArg(mem) 5998 return true 5999 } 6000 // match: (MOVDload [off] {sym} ptr (MOVDstorezero [off2] {sym2} ptr2 _)) 6001 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 6002 // result: (MOVDconst [0]) 6003 for { 6004 off := v.AuxInt 6005 sym := v.Aux 6006 _ = v.Args[1] 6007 ptr := v.Args[0] 6008 v_1 := v.Args[1] 6009 if v_1.Op != OpARM64MOVDstorezero { 6010 break 6011 } 6012 off2 := v_1.AuxInt 6013 sym2 := v_1.Aux 6014 _ = v_1.Args[1] 6015 ptr2 := v_1.Args[0] 6016 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 6017 break 6018 } 6019 v.reset(OpARM64MOVDconst) 6020 v.AuxInt = 0 6021 return true 6022 } 6023 return false 6024 } 6025 func rewriteValueARM64_OpARM64MOVDreg_0(v *Value) bool { 6026 // match: (MOVDreg x) 6027 // cond: x.Uses == 1 6028 // result: (MOVDnop x) 6029 for { 6030 x := v.Args[0] 6031 if !(x.Uses == 1) { 6032 break 6033 } 6034 v.reset(OpARM64MOVDnop) 6035 v.AddArg(x) 6036 return true 6037 } 6038 // match: (MOVDreg (MOVDconst [c])) 6039 // cond: 6040 // result: (MOVDconst [c]) 6041 for { 6042 v_0 := v.Args[0] 6043 if v_0.Op != OpARM64MOVDconst { 6044 break 6045 } 6046 c := v_0.AuxInt 6047 v.reset(OpARM64MOVDconst) 6048 v.AuxInt = c 6049 return true 6050 } 6051 return false 6052 } 6053 func rewriteValueARM64_OpARM64MOVDstore_0(v *Value) bool { 6054 b := v.Block 6055 _ = b 6056 config := b.Func.Config 6057 _ = config 6058 // match: (MOVDstore ptr (FMOVDfpgp val) mem) 6059 // cond: 6060 // result: (FMOVDstore ptr val mem) 6061 for { 6062 _ = v.Args[2] 6063 ptr := v.Args[0] 6064 v_1 := v.Args[1] 6065 if v_1.Op != OpARM64FMOVDfpgp { 6066 break 6067 } 6068 val := v_1.Args[0] 6069 mem := v.Args[2] 6070 v.reset(OpARM64FMOVDstore) 6071 v.AddArg(ptr) 6072 v.AddArg(val) 6073 v.AddArg(mem) 6074 return true 6075 } 6076 // match: (MOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 6077 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 6078 // result: (MOVDstore [off1+off2] {sym} ptr val mem) 6079 for { 6080 off1 := v.AuxInt 6081 sym := v.Aux 6082 _ = v.Args[2] 6083 v_0 := v.Args[0] 6084 if v_0.Op != OpARM64ADDconst { 6085 break 6086 } 6087 off2 := v_0.AuxInt 6088 ptr := v_0.Args[0] 6089 val := v.Args[1] 6090 mem := v.Args[2] 6091 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 6092 break 6093 } 6094 v.reset(OpARM64MOVDstore) 6095 v.AuxInt = off1 + off2 6096 v.Aux = sym 6097 v.AddArg(ptr) 6098 v.AddArg(val) 6099 v.AddArg(mem) 6100 return true 6101 } 6102 // match: (MOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 6103 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 6104 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 6105 for { 6106 off1 := v.AuxInt 6107 sym1 := v.Aux 6108 _ = v.Args[2] 6109 v_0 := v.Args[0] 6110 if v_0.Op != OpARM64MOVDaddr { 6111 break 6112 } 6113 off2 := v_0.AuxInt 6114 sym2 := v_0.Aux 6115 ptr := v_0.Args[0] 6116 val := v.Args[1] 6117 mem := v.Args[2] 6118 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 6119 break 6120 } 6121 v.reset(OpARM64MOVDstore) 6122 v.AuxInt = off1 + off2 6123 v.Aux = mergeSym(sym1, sym2) 6124 v.AddArg(ptr) 6125 v.AddArg(val) 6126 v.AddArg(mem) 6127 return true 6128 } 6129 // match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem) 6130 // cond: 6131 // result: (MOVDstorezero [off] {sym} ptr mem) 6132 for { 6133 off := v.AuxInt 6134 sym := v.Aux 6135 _ = v.Args[2] 6136 ptr := v.Args[0] 6137 v_1 := v.Args[1] 6138 if v_1.Op != OpARM64MOVDconst { 6139 break 6140 } 6141 if v_1.AuxInt != 0 { 6142 break 6143 } 6144 mem := v.Args[2] 6145 v.reset(OpARM64MOVDstorezero) 6146 v.AuxInt = off 6147 v.Aux = sym 6148 v.AddArg(ptr) 6149 v.AddArg(mem) 6150 return true 6151 } 6152 return false 6153 } 6154 func rewriteValueARM64_OpARM64MOVDstorezero_0(v *Value) bool { 6155 b := v.Block 6156 _ = b 6157 config := b.Func.Config 6158 _ = config 6159 // match: (MOVDstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 6160 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 6161 // result: (MOVDstorezero [off1+off2] {sym} ptr mem) 6162 for { 6163 off1 := v.AuxInt 6164 sym := v.Aux 6165 _ = v.Args[1] 6166 v_0 := v.Args[0] 6167 if v_0.Op != OpARM64ADDconst { 6168 break 6169 } 6170 off2 := v_0.AuxInt 6171 ptr := v_0.Args[0] 6172 mem := v.Args[1] 6173 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 6174 break 6175 } 6176 v.reset(OpARM64MOVDstorezero) 6177 v.AuxInt = off1 + off2 6178 v.Aux = sym 6179 v.AddArg(ptr) 6180 v.AddArg(mem) 6181 return true 6182 } 6183 // match: (MOVDstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 6184 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 6185 // result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 6186 for { 6187 off1 := v.AuxInt 6188 sym1 := v.Aux 6189 _ = v.Args[1] 6190 v_0 := v.Args[0] 6191 if v_0.Op != OpARM64MOVDaddr { 6192 break 6193 } 6194 off2 := v_0.AuxInt 6195 sym2 := v_0.Aux 6196 ptr := v_0.Args[0] 6197 mem := v.Args[1] 6198 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 6199 break 6200 } 6201 v.reset(OpARM64MOVDstorezero) 6202 v.AuxInt = off1 + off2 6203 v.Aux = mergeSym(sym1, sym2) 6204 v.AddArg(ptr) 6205 v.AddArg(mem) 6206 return true 6207 } 6208 return false 6209 } 6210 func rewriteValueARM64_OpARM64MOVHUload_0(v *Value) bool { 6211 b := v.Block 6212 _ = b 6213 config := b.Func.Config 6214 _ = config 6215 // match: (MOVHUload [off1] {sym} (ADDconst [off2] ptr) mem) 6216 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 6217 // result: (MOVHUload [off1+off2] {sym} ptr mem) 6218 for { 6219 off1 := v.AuxInt 6220 sym := v.Aux 6221 _ = v.Args[1] 6222 v_0 := v.Args[0] 6223 if v_0.Op != OpARM64ADDconst { 6224 break 6225 } 6226 off2 := v_0.AuxInt 6227 ptr := v_0.Args[0] 6228 mem := v.Args[1] 6229 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 6230 break 6231 } 6232 v.reset(OpARM64MOVHUload) 6233 v.AuxInt = off1 + off2 6234 v.Aux = sym 6235 v.AddArg(ptr) 6236 v.AddArg(mem) 6237 return true 6238 } 6239 // match: (MOVHUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 6240 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 6241 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 6242 for { 6243 off1 := v.AuxInt 6244 sym1 := v.Aux 6245 _ = v.Args[1] 6246 v_0 := v.Args[0] 6247 if v_0.Op != OpARM64MOVDaddr { 6248 break 6249 } 6250 off2 := v_0.AuxInt 6251 sym2 := v_0.Aux 6252 ptr := v_0.Args[0] 6253 mem := v.Args[1] 6254 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 6255 break 6256 } 6257 v.reset(OpARM64MOVHUload) 6258 v.AuxInt = off1 + off2 6259 v.Aux = mergeSym(sym1, sym2) 6260 v.AddArg(ptr) 6261 v.AddArg(mem) 6262 return true 6263 } 6264 // match: (MOVHUload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 6265 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 6266 // result: (MOVDconst [0]) 6267 for { 6268 off := v.AuxInt 6269 sym := v.Aux 6270 _ = v.Args[1] 6271 ptr := v.Args[0] 6272 v_1 := v.Args[1] 6273 if v_1.Op != OpARM64MOVHstorezero { 6274 break 6275 } 6276 off2 := v_1.AuxInt 6277 sym2 := v_1.Aux 6278 _ = v_1.Args[1] 6279 ptr2 := v_1.Args[0] 6280 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 6281 break 6282 } 6283 v.reset(OpARM64MOVDconst) 6284 v.AuxInt = 0 6285 return true 6286 } 6287 return false 6288 } 6289 func rewriteValueARM64_OpARM64MOVHUreg_0(v *Value) bool { 6290 // match: (MOVHUreg x:(MOVBUload _ _)) 6291 // cond: 6292 // result: (MOVDreg x) 6293 for { 6294 x := v.Args[0] 6295 if x.Op != OpARM64MOVBUload { 6296 break 6297 } 6298 _ = x.Args[1] 6299 v.reset(OpARM64MOVDreg) 6300 v.AddArg(x) 6301 return true 6302 } 6303 // match: (MOVHUreg x:(MOVHUload _ _)) 6304 // cond: 6305 // result: (MOVDreg x) 6306 for { 6307 x := v.Args[0] 6308 if x.Op != OpARM64MOVHUload { 6309 break 6310 } 6311 _ = x.Args[1] 6312 v.reset(OpARM64MOVDreg) 6313 v.AddArg(x) 6314 return true 6315 } 6316 // match: (MOVHUreg x:(MOVBUreg _)) 6317 // cond: 6318 // result: (MOVDreg x) 6319 for { 6320 x := v.Args[0] 6321 if x.Op != OpARM64MOVBUreg { 6322 break 6323 } 6324 v.reset(OpARM64MOVDreg) 6325 v.AddArg(x) 6326 return true 6327 } 6328 // match: (MOVHUreg x:(MOVHUreg _)) 6329 // cond: 6330 // result: (MOVDreg x) 6331 for { 6332 x := v.Args[0] 6333 if x.Op != OpARM64MOVHUreg { 6334 break 6335 } 6336 v.reset(OpARM64MOVDreg) 6337 v.AddArg(x) 6338 return true 6339 } 6340 // match: (MOVHUreg (MOVDconst [c])) 6341 // cond: 6342 // result: (MOVDconst [int64(uint16(c))]) 6343 for { 6344 v_0 := v.Args[0] 6345 if v_0.Op != OpARM64MOVDconst { 6346 break 6347 } 6348 c := v_0.AuxInt 6349 v.reset(OpARM64MOVDconst) 6350 v.AuxInt = int64(uint16(c)) 6351 return true 6352 } 6353 return false 6354 } 6355 func rewriteValueARM64_OpARM64MOVHload_0(v *Value) bool { 6356 b := v.Block 6357 _ = b 6358 config := b.Func.Config 6359 _ = config 6360 // match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem) 6361 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 6362 // result: (MOVHload [off1+off2] {sym} ptr mem) 6363 for { 6364 off1 := v.AuxInt 6365 sym := v.Aux 6366 _ = v.Args[1] 6367 v_0 := v.Args[0] 6368 if v_0.Op != OpARM64ADDconst { 6369 break 6370 } 6371 off2 := v_0.AuxInt 6372 ptr := v_0.Args[0] 6373 mem := v.Args[1] 6374 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 6375 break 6376 } 6377 v.reset(OpARM64MOVHload) 6378 v.AuxInt = off1 + off2 6379 v.Aux = sym 6380 v.AddArg(ptr) 6381 v.AddArg(mem) 6382 return true 6383 } 6384 // match: (MOVHload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 6385 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 6386 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 6387 for { 6388 off1 := v.AuxInt 6389 sym1 := v.Aux 6390 _ = v.Args[1] 6391 v_0 := v.Args[0] 6392 if v_0.Op != OpARM64MOVDaddr { 6393 break 6394 } 6395 off2 := v_0.AuxInt 6396 sym2 := v_0.Aux 6397 ptr := v_0.Args[0] 6398 mem := v.Args[1] 6399 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 6400 break 6401 } 6402 v.reset(OpARM64MOVHload) 6403 v.AuxInt = off1 + off2 6404 v.Aux = mergeSym(sym1, sym2) 6405 v.AddArg(ptr) 6406 v.AddArg(mem) 6407 return true 6408 } 6409 // match: (MOVHload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 6410 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 6411 // result: (MOVDconst [0]) 6412 for { 6413 off := v.AuxInt 6414 sym := v.Aux 6415 _ = v.Args[1] 6416 ptr := v.Args[0] 6417 v_1 := v.Args[1] 6418 if v_1.Op != OpARM64MOVHstorezero { 6419 break 6420 } 6421 off2 := v_1.AuxInt 6422 sym2 := v_1.Aux 6423 _ = v_1.Args[1] 6424 ptr2 := v_1.Args[0] 6425 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 6426 break 6427 } 6428 v.reset(OpARM64MOVDconst) 6429 v.AuxInt = 0 6430 return true 6431 } 6432 return false 6433 } 6434 func rewriteValueARM64_OpARM64MOVHreg_0(v *Value) bool { 6435 // match: (MOVHreg x:(MOVBload _ _)) 6436 // cond: 6437 // result: (MOVDreg x) 6438 for { 6439 x := v.Args[0] 6440 if x.Op != OpARM64MOVBload { 6441 break 6442 } 6443 _ = x.Args[1] 6444 v.reset(OpARM64MOVDreg) 6445 v.AddArg(x) 6446 return true 6447 } 6448 // match: (MOVHreg x:(MOVBUload _ _)) 6449 // cond: 6450 // result: (MOVDreg x) 6451 for { 6452 x := v.Args[0] 6453 if x.Op != OpARM64MOVBUload { 6454 break 6455 } 6456 _ = x.Args[1] 6457 v.reset(OpARM64MOVDreg) 6458 v.AddArg(x) 6459 return true 6460 } 6461 // match: (MOVHreg x:(MOVHload _ _)) 6462 // cond: 6463 // result: (MOVDreg x) 6464 for { 6465 x := v.Args[0] 6466 if x.Op != OpARM64MOVHload { 6467 break 6468 } 6469 _ = x.Args[1] 6470 v.reset(OpARM64MOVDreg) 6471 v.AddArg(x) 6472 return true 6473 } 6474 // match: (MOVHreg x:(MOVBreg _)) 6475 // cond: 6476 // result: (MOVDreg x) 6477 for { 6478 x := v.Args[0] 6479 if x.Op != OpARM64MOVBreg { 6480 break 6481 } 6482 v.reset(OpARM64MOVDreg) 6483 v.AddArg(x) 6484 return true 6485 } 6486 // match: (MOVHreg x:(MOVBUreg _)) 6487 // cond: 6488 // result: (MOVDreg x) 6489 for { 6490 x := v.Args[0] 6491 if x.Op != OpARM64MOVBUreg { 6492 break 6493 } 6494 v.reset(OpARM64MOVDreg) 6495 v.AddArg(x) 6496 return true 6497 } 6498 // match: (MOVHreg x:(MOVHreg _)) 6499 // cond: 6500 // result: (MOVDreg x) 6501 for { 6502 x := v.Args[0] 6503 if x.Op != OpARM64MOVHreg { 6504 break 6505 } 6506 v.reset(OpARM64MOVDreg) 6507 v.AddArg(x) 6508 return true 6509 } 6510 // match: (MOVHreg (MOVDconst [c])) 6511 // cond: 6512 // result: (MOVDconst [int64(int16(c))]) 6513 for { 6514 v_0 := v.Args[0] 6515 if v_0.Op != OpARM64MOVDconst { 6516 break 6517 } 6518 c := v_0.AuxInt 6519 v.reset(OpARM64MOVDconst) 6520 v.AuxInt = int64(int16(c)) 6521 return true 6522 } 6523 return false 6524 } 6525 func rewriteValueARM64_OpARM64MOVHstore_0(v *Value) bool { 6526 b := v.Block 6527 _ = b 6528 config := b.Func.Config 6529 _ = config 6530 // match: (MOVHstore [off1] {sym} (ADDconst [off2] ptr) val mem) 6531 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 6532 // result: (MOVHstore [off1+off2] {sym} ptr val mem) 6533 for { 6534 off1 := v.AuxInt 6535 sym := v.Aux 6536 _ = v.Args[2] 6537 v_0 := v.Args[0] 6538 if v_0.Op != OpARM64ADDconst { 6539 break 6540 } 6541 off2 := v_0.AuxInt 6542 ptr := v_0.Args[0] 6543 val := v.Args[1] 6544 mem := v.Args[2] 6545 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 6546 break 6547 } 6548 v.reset(OpARM64MOVHstore) 6549 v.AuxInt = off1 + off2 6550 v.Aux = sym 6551 v.AddArg(ptr) 6552 v.AddArg(val) 6553 v.AddArg(mem) 6554 return true 6555 } 6556 // match: (MOVHstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 6557 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 6558 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 6559 for { 6560 off1 := v.AuxInt 6561 sym1 := v.Aux 6562 _ = v.Args[2] 6563 v_0 := v.Args[0] 6564 if v_0.Op != OpARM64MOVDaddr { 6565 break 6566 } 6567 off2 := v_0.AuxInt 6568 sym2 := v_0.Aux 6569 ptr := v_0.Args[0] 6570 val := v.Args[1] 6571 mem := v.Args[2] 6572 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 6573 break 6574 } 6575 v.reset(OpARM64MOVHstore) 6576 v.AuxInt = off1 + off2 6577 v.Aux = mergeSym(sym1, sym2) 6578 v.AddArg(ptr) 6579 v.AddArg(val) 6580 v.AddArg(mem) 6581 return true 6582 } 6583 // match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem) 6584 // cond: 6585 // result: (MOVHstorezero [off] {sym} ptr mem) 6586 for { 6587 off := v.AuxInt 6588 sym := v.Aux 6589 _ = v.Args[2] 6590 ptr := v.Args[0] 6591 v_1 := v.Args[1] 6592 if v_1.Op != OpARM64MOVDconst { 6593 break 6594 } 6595 if v_1.AuxInt != 0 { 6596 break 6597 } 6598 mem := v.Args[2] 6599 v.reset(OpARM64MOVHstorezero) 6600 v.AuxInt = off 6601 v.Aux = sym 6602 v.AddArg(ptr) 6603 v.AddArg(mem) 6604 return true 6605 } 6606 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem) 6607 // cond: 6608 // result: (MOVHstore [off] {sym} ptr x mem) 6609 for { 6610 off := v.AuxInt 6611 sym := v.Aux 6612 _ = v.Args[2] 6613 ptr := v.Args[0] 6614 v_1 := v.Args[1] 6615 if v_1.Op != OpARM64MOVHreg { 6616 break 6617 } 6618 x := v_1.Args[0] 6619 mem := v.Args[2] 6620 v.reset(OpARM64MOVHstore) 6621 v.AuxInt = off 6622 v.Aux = sym 6623 v.AddArg(ptr) 6624 v.AddArg(x) 6625 v.AddArg(mem) 6626 return true 6627 } 6628 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem) 6629 // cond: 6630 // result: (MOVHstore [off] {sym} ptr x mem) 6631 for { 6632 off := v.AuxInt 6633 sym := v.Aux 6634 _ = v.Args[2] 6635 ptr := v.Args[0] 6636 v_1 := v.Args[1] 6637 if v_1.Op != OpARM64MOVHUreg { 6638 break 6639 } 6640 x := v_1.Args[0] 6641 mem := v.Args[2] 6642 v.reset(OpARM64MOVHstore) 6643 v.AuxInt = off 6644 v.Aux = sym 6645 v.AddArg(ptr) 6646 v.AddArg(x) 6647 v.AddArg(mem) 6648 return true 6649 } 6650 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem) 6651 // cond: 6652 // result: (MOVHstore [off] {sym} ptr x mem) 6653 for { 6654 off := v.AuxInt 6655 sym := v.Aux 6656 _ = v.Args[2] 6657 ptr := v.Args[0] 6658 v_1 := v.Args[1] 6659 if v_1.Op != OpARM64MOVWreg { 6660 break 6661 } 6662 x := v_1.Args[0] 6663 mem := v.Args[2] 6664 v.reset(OpARM64MOVHstore) 6665 v.AuxInt = off 6666 v.Aux = sym 6667 v.AddArg(ptr) 6668 v.AddArg(x) 6669 v.AddArg(mem) 6670 return true 6671 } 6672 // match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem) 6673 // cond: 6674 // result: (MOVHstore [off] {sym} ptr x mem) 6675 for { 6676 off := v.AuxInt 6677 sym := v.Aux 6678 _ = v.Args[2] 6679 ptr := v.Args[0] 6680 v_1 := v.Args[1] 6681 if v_1.Op != OpARM64MOVWUreg { 6682 break 6683 } 6684 x := v_1.Args[0] 6685 mem := v.Args[2] 6686 v.reset(OpARM64MOVHstore) 6687 v.AuxInt = off 6688 v.Aux = sym 6689 v.AddArg(ptr) 6690 v.AddArg(x) 6691 v.AddArg(mem) 6692 return true 6693 } 6694 return false 6695 } 6696 func rewriteValueARM64_OpARM64MOVHstorezero_0(v *Value) bool { 6697 b := v.Block 6698 _ = b 6699 config := b.Func.Config 6700 _ = config 6701 // match: (MOVHstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 6702 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 6703 // result: (MOVHstorezero [off1+off2] {sym} ptr mem) 6704 for { 6705 off1 := v.AuxInt 6706 sym := v.Aux 6707 _ = v.Args[1] 6708 v_0 := v.Args[0] 6709 if v_0.Op != OpARM64ADDconst { 6710 break 6711 } 6712 off2 := v_0.AuxInt 6713 ptr := v_0.Args[0] 6714 mem := v.Args[1] 6715 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 6716 break 6717 } 6718 v.reset(OpARM64MOVHstorezero) 6719 v.AuxInt = off1 + off2 6720 v.Aux = sym 6721 v.AddArg(ptr) 6722 v.AddArg(mem) 6723 return true 6724 } 6725 // match: (MOVHstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 6726 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 6727 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 6728 for { 6729 off1 := v.AuxInt 6730 sym1 := v.Aux 6731 _ = v.Args[1] 6732 v_0 := v.Args[0] 6733 if v_0.Op != OpARM64MOVDaddr { 6734 break 6735 } 6736 off2 := v_0.AuxInt 6737 sym2 := v_0.Aux 6738 ptr := v_0.Args[0] 6739 mem := v.Args[1] 6740 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 6741 break 6742 } 6743 v.reset(OpARM64MOVHstorezero) 6744 v.AuxInt = off1 + off2 6745 v.Aux = mergeSym(sym1, sym2) 6746 v.AddArg(ptr) 6747 v.AddArg(mem) 6748 return true 6749 } 6750 return false 6751 } 6752 func rewriteValueARM64_OpARM64MOVQstorezero_0(v *Value) bool { 6753 b := v.Block 6754 _ = b 6755 config := b.Func.Config 6756 _ = config 6757 // match: (MOVQstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 6758 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 6759 // result: (MOVQstorezero [off1+off2] {sym} ptr mem) 6760 for { 6761 off1 := v.AuxInt 6762 sym := v.Aux 6763 _ = v.Args[1] 6764 v_0 := v.Args[0] 6765 if v_0.Op != OpARM64ADDconst { 6766 break 6767 } 6768 off2 := v_0.AuxInt 6769 ptr := v_0.Args[0] 6770 mem := v.Args[1] 6771 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 6772 break 6773 } 6774 v.reset(OpARM64MOVQstorezero) 6775 v.AuxInt = off1 + off2 6776 v.Aux = sym 6777 v.AddArg(ptr) 6778 v.AddArg(mem) 6779 return true 6780 } 6781 // match: (MOVQstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 6782 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 6783 // result: (MOVQstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 6784 for { 6785 off1 := v.AuxInt 6786 sym1 := v.Aux 6787 _ = v.Args[1] 6788 v_0 := v.Args[0] 6789 if v_0.Op != OpARM64MOVDaddr { 6790 break 6791 } 6792 off2 := v_0.AuxInt 6793 sym2 := v_0.Aux 6794 ptr := v_0.Args[0] 6795 mem := v.Args[1] 6796 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 6797 break 6798 } 6799 v.reset(OpARM64MOVQstorezero) 6800 v.AuxInt = off1 + off2 6801 v.Aux = mergeSym(sym1, sym2) 6802 v.AddArg(ptr) 6803 v.AddArg(mem) 6804 return true 6805 } 6806 return false 6807 } 6808 func rewriteValueARM64_OpARM64MOVWUload_0(v *Value) bool { 6809 b := v.Block 6810 _ = b 6811 config := b.Func.Config 6812 _ = config 6813 // match: (MOVWUload [off1] {sym} (ADDconst [off2] ptr) mem) 6814 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 6815 // result: (MOVWUload [off1+off2] {sym} ptr mem) 6816 for { 6817 off1 := v.AuxInt 6818 sym := v.Aux 6819 _ = v.Args[1] 6820 v_0 := v.Args[0] 6821 if v_0.Op != OpARM64ADDconst { 6822 break 6823 } 6824 off2 := v_0.AuxInt 6825 ptr := v_0.Args[0] 6826 mem := v.Args[1] 6827 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 6828 break 6829 } 6830 v.reset(OpARM64MOVWUload) 6831 v.AuxInt = off1 + off2 6832 v.Aux = sym 6833 v.AddArg(ptr) 6834 v.AddArg(mem) 6835 return true 6836 } 6837 // match: (MOVWUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 6838 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 6839 // result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 6840 for { 6841 off1 := v.AuxInt 6842 sym1 := v.Aux 6843 _ = v.Args[1] 6844 v_0 := v.Args[0] 6845 if v_0.Op != OpARM64MOVDaddr { 6846 break 6847 } 6848 off2 := v_0.AuxInt 6849 sym2 := v_0.Aux 6850 ptr := v_0.Args[0] 6851 mem := v.Args[1] 6852 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 6853 break 6854 } 6855 v.reset(OpARM64MOVWUload) 6856 v.AuxInt = off1 + off2 6857 v.Aux = mergeSym(sym1, sym2) 6858 v.AddArg(ptr) 6859 v.AddArg(mem) 6860 return true 6861 } 6862 // match: (MOVWUload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 6863 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 6864 // result: (MOVDconst [0]) 6865 for { 6866 off := v.AuxInt 6867 sym := v.Aux 6868 _ = v.Args[1] 6869 ptr := v.Args[0] 6870 v_1 := v.Args[1] 6871 if v_1.Op != OpARM64MOVWstorezero { 6872 break 6873 } 6874 off2 := v_1.AuxInt 6875 sym2 := v_1.Aux 6876 _ = v_1.Args[1] 6877 ptr2 := v_1.Args[0] 6878 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 6879 break 6880 } 6881 v.reset(OpARM64MOVDconst) 6882 v.AuxInt = 0 6883 return true 6884 } 6885 return false 6886 } 6887 func rewriteValueARM64_OpARM64MOVWUreg_0(v *Value) bool { 6888 // match: (MOVWUreg x:(MOVBUload _ _)) 6889 // cond: 6890 // result: (MOVDreg x) 6891 for { 6892 x := v.Args[0] 6893 if x.Op != OpARM64MOVBUload { 6894 break 6895 } 6896 _ = x.Args[1] 6897 v.reset(OpARM64MOVDreg) 6898 v.AddArg(x) 6899 return true 6900 } 6901 // match: (MOVWUreg x:(MOVHUload _ _)) 6902 // cond: 6903 // result: (MOVDreg x) 6904 for { 6905 x := v.Args[0] 6906 if x.Op != OpARM64MOVHUload { 6907 break 6908 } 6909 _ = x.Args[1] 6910 v.reset(OpARM64MOVDreg) 6911 v.AddArg(x) 6912 return true 6913 } 6914 // match: (MOVWUreg x:(MOVWUload _ _)) 6915 // cond: 6916 // result: (MOVDreg x) 6917 for { 6918 x := v.Args[0] 6919 if x.Op != OpARM64MOVWUload { 6920 break 6921 } 6922 _ = x.Args[1] 6923 v.reset(OpARM64MOVDreg) 6924 v.AddArg(x) 6925 return true 6926 } 6927 // match: (MOVWUreg x:(MOVBUreg _)) 6928 // cond: 6929 // result: (MOVDreg x) 6930 for { 6931 x := v.Args[0] 6932 if x.Op != OpARM64MOVBUreg { 6933 break 6934 } 6935 v.reset(OpARM64MOVDreg) 6936 v.AddArg(x) 6937 return true 6938 } 6939 // match: (MOVWUreg x:(MOVHUreg _)) 6940 // cond: 6941 // result: (MOVDreg x) 6942 for { 6943 x := v.Args[0] 6944 if x.Op != OpARM64MOVHUreg { 6945 break 6946 } 6947 v.reset(OpARM64MOVDreg) 6948 v.AddArg(x) 6949 return true 6950 } 6951 // match: (MOVWUreg x:(MOVWUreg _)) 6952 // cond: 6953 // result: (MOVDreg x) 6954 for { 6955 x := v.Args[0] 6956 if x.Op != OpARM64MOVWUreg { 6957 break 6958 } 6959 v.reset(OpARM64MOVDreg) 6960 v.AddArg(x) 6961 return true 6962 } 6963 // match: (MOVWUreg (MOVDconst [c])) 6964 // cond: 6965 // result: (MOVDconst [int64(uint32(c))]) 6966 for { 6967 v_0 := v.Args[0] 6968 if v_0.Op != OpARM64MOVDconst { 6969 break 6970 } 6971 c := v_0.AuxInt 6972 v.reset(OpARM64MOVDconst) 6973 v.AuxInt = int64(uint32(c)) 6974 return true 6975 } 6976 return false 6977 } 6978 func rewriteValueARM64_OpARM64MOVWload_0(v *Value) bool { 6979 b := v.Block 6980 _ = b 6981 config := b.Func.Config 6982 _ = config 6983 // match: (MOVWload [off1] {sym} (ADDconst [off2] ptr) mem) 6984 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 6985 // result: (MOVWload [off1+off2] {sym} ptr mem) 6986 for { 6987 off1 := v.AuxInt 6988 sym := v.Aux 6989 _ = v.Args[1] 6990 v_0 := v.Args[0] 6991 if v_0.Op != OpARM64ADDconst { 6992 break 6993 } 6994 off2 := v_0.AuxInt 6995 ptr := v_0.Args[0] 6996 mem := v.Args[1] 6997 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 6998 break 6999 } 7000 v.reset(OpARM64MOVWload) 7001 v.AuxInt = off1 + off2 7002 v.Aux = sym 7003 v.AddArg(ptr) 7004 v.AddArg(mem) 7005 return true 7006 } 7007 // match: (MOVWload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 7008 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 7009 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 7010 for { 7011 off1 := v.AuxInt 7012 sym1 := v.Aux 7013 _ = v.Args[1] 7014 v_0 := v.Args[0] 7015 if v_0.Op != OpARM64MOVDaddr { 7016 break 7017 } 7018 off2 := v_0.AuxInt 7019 sym2 := v_0.Aux 7020 ptr := v_0.Args[0] 7021 mem := v.Args[1] 7022 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 7023 break 7024 } 7025 v.reset(OpARM64MOVWload) 7026 v.AuxInt = off1 + off2 7027 v.Aux = mergeSym(sym1, sym2) 7028 v.AddArg(ptr) 7029 v.AddArg(mem) 7030 return true 7031 } 7032 // match: (MOVWload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 7033 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 7034 // result: (MOVDconst [0]) 7035 for { 7036 off := v.AuxInt 7037 sym := v.Aux 7038 _ = v.Args[1] 7039 ptr := v.Args[0] 7040 v_1 := v.Args[1] 7041 if v_1.Op != OpARM64MOVWstorezero { 7042 break 7043 } 7044 off2 := v_1.AuxInt 7045 sym2 := v_1.Aux 7046 _ = v_1.Args[1] 7047 ptr2 := v_1.Args[0] 7048 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 7049 break 7050 } 7051 v.reset(OpARM64MOVDconst) 7052 v.AuxInt = 0 7053 return true 7054 } 7055 return false 7056 } 7057 func rewriteValueARM64_OpARM64MOVWreg_0(v *Value) bool { 7058 // match: (MOVWreg x:(MOVBload _ _)) 7059 // cond: 7060 // result: (MOVDreg x) 7061 for { 7062 x := v.Args[0] 7063 if x.Op != OpARM64MOVBload { 7064 break 7065 } 7066 _ = x.Args[1] 7067 v.reset(OpARM64MOVDreg) 7068 v.AddArg(x) 7069 return true 7070 } 7071 // match: (MOVWreg x:(MOVBUload _ _)) 7072 // cond: 7073 // result: (MOVDreg x) 7074 for { 7075 x := v.Args[0] 7076 if x.Op != OpARM64MOVBUload { 7077 break 7078 } 7079 _ = x.Args[1] 7080 v.reset(OpARM64MOVDreg) 7081 v.AddArg(x) 7082 return true 7083 } 7084 // match: (MOVWreg x:(MOVHload _ _)) 7085 // cond: 7086 // result: (MOVDreg x) 7087 for { 7088 x := v.Args[0] 7089 if x.Op != OpARM64MOVHload { 7090 break 7091 } 7092 _ = x.Args[1] 7093 v.reset(OpARM64MOVDreg) 7094 v.AddArg(x) 7095 return true 7096 } 7097 // match: (MOVWreg x:(MOVHUload _ _)) 7098 // cond: 7099 // result: (MOVDreg x) 7100 for { 7101 x := v.Args[0] 7102 if x.Op != OpARM64MOVHUload { 7103 break 7104 } 7105 _ = x.Args[1] 7106 v.reset(OpARM64MOVDreg) 7107 v.AddArg(x) 7108 return true 7109 } 7110 // match: (MOVWreg x:(MOVWload _ _)) 7111 // cond: 7112 // result: (MOVDreg x) 7113 for { 7114 x := v.Args[0] 7115 if x.Op != OpARM64MOVWload { 7116 break 7117 } 7118 _ = x.Args[1] 7119 v.reset(OpARM64MOVDreg) 7120 v.AddArg(x) 7121 return true 7122 } 7123 // match: (MOVWreg x:(MOVBreg _)) 7124 // cond: 7125 // result: (MOVDreg x) 7126 for { 7127 x := v.Args[0] 7128 if x.Op != OpARM64MOVBreg { 7129 break 7130 } 7131 v.reset(OpARM64MOVDreg) 7132 v.AddArg(x) 7133 return true 7134 } 7135 // match: (MOVWreg x:(MOVBUreg _)) 7136 // cond: 7137 // result: (MOVDreg x) 7138 for { 7139 x := v.Args[0] 7140 if x.Op != OpARM64MOVBUreg { 7141 break 7142 } 7143 v.reset(OpARM64MOVDreg) 7144 v.AddArg(x) 7145 return true 7146 } 7147 // match: (MOVWreg x:(MOVHreg _)) 7148 // cond: 7149 // result: (MOVDreg x) 7150 for { 7151 x := v.Args[0] 7152 if x.Op != OpARM64MOVHreg { 7153 break 7154 } 7155 v.reset(OpARM64MOVDreg) 7156 v.AddArg(x) 7157 return true 7158 } 7159 // match: (MOVWreg x:(MOVHreg _)) 7160 // cond: 7161 // result: (MOVDreg x) 7162 for { 7163 x := v.Args[0] 7164 if x.Op != OpARM64MOVHreg { 7165 break 7166 } 7167 v.reset(OpARM64MOVDreg) 7168 v.AddArg(x) 7169 return true 7170 } 7171 // match: (MOVWreg x:(MOVWreg _)) 7172 // cond: 7173 // result: (MOVDreg x) 7174 for { 7175 x := v.Args[0] 7176 if x.Op != OpARM64MOVWreg { 7177 break 7178 } 7179 v.reset(OpARM64MOVDreg) 7180 v.AddArg(x) 7181 return true 7182 } 7183 return false 7184 } 7185 func rewriteValueARM64_OpARM64MOVWreg_10(v *Value) bool { 7186 // match: (MOVWreg (MOVDconst [c])) 7187 // cond: 7188 // result: (MOVDconst [int64(int32(c))]) 7189 for { 7190 v_0 := v.Args[0] 7191 if v_0.Op != OpARM64MOVDconst { 7192 break 7193 } 7194 c := v_0.AuxInt 7195 v.reset(OpARM64MOVDconst) 7196 v.AuxInt = int64(int32(c)) 7197 return true 7198 } 7199 return false 7200 } 7201 func rewriteValueARM64_OpARM64MOVWstore_0(v *Value) bool { 7202 b := v.Block 7203 _ = b 7204 config := b.Func.Config 7205 _ = config 7206 // match: (MOVWstore [off1] {sym} (ADDconst [off2] ptr) val mem) 7207 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 7208 // result: (MOVWstore [off1+off2] {sym} ptr val mem) 7209 for { 7210 off1 := v.AuxInt 7211 sym := v.Aux 7212 _ = v.Args[2] 7213 v_0 := v.Args[0] 7214 if v_0.Op != OpARM64ADDconst { 7215 break 7216 } 7217 off2 := v_0.AuxInt 7218 ptr := v_0.Args[0] 7219 val := v.Args[1] 7220 mem := v.Args[2] 7221 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 7222 break 7223 } 7224 v.reset(OpARM64MOVWstore) 7225 v.AuxInt = off1 + off2 7226 v.Aux = sym 7227 v.AddArg(ptr) 7228 v.AddArg(val) 7229 v.AddArg(mem) 7230 return true 7231 } 7232 // match: (MOVWstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 7233 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 7234 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 7235 for { 7236 off1 := v.AuxInt 7237 sym1 := v.Aux 7238 _ = v.Args[2] 7239 v_0 := v.Args[0] 7240 if v_0.Op != OpARM64MOVDaddr { 7241 break 7242 } 7243 off2 := v_0.AuxInt 7244 sym2 := v_0.Aux 7245 ptr := v_0.Args[0] 7246 val := v.Args[1] 7247 mem := v.Args[2] 7248 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 7249 break 7250 } 7251 v.reset(OpARM64MOVWstore) 7252 v.AuxInt = off1 + off2 7253 v.Aux = mergeSym(sym1, sym2) 7254 v.AddArg(ptr) 7255 v.AddArg(val) 7256 v.AddArg(mem) 7257 return true 7258 } 7259 // match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem) 7260 // cond: 7261 // result: (MOVWstorezero [off] {sym} ptr mem) 7262 for { 7263 off := v.AuxInt 7264 sym := v.Aux 7265 _ = v.Args[2] 7266 ptr := v.Args[0] 7267 v_1 := v.Args[1] 7268 if v_1.Op != OpARM64MOVDconst { 7269 break 7270 } 7271 if v_1.AuxInt != 0 { 7272 break 7273 } 7274 mem := v.Args[2] 7275 v.reset(OpARM64MOVWstorezero) 7276 v.AuxInt = off 7277 v.Aux = sym 7278 v.AddArg(ptr) 7279 v.AddArg(mem) 7280 return true 7281 } 7282 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem) 7283 // cond: 7284 // result: (MOVWstore [off] {sym} ptr x mem) 7285 for { 7286 off := v.AuxInt 7287 sym := v.Aux 7288 _ = v.Args[2] 7289 ptr := v.Args[0] 7290 v_1 := v.Args[1] 7291 if v_1.Op != OpARM64MOVWreg { 7292 break 7293 } 7294 x := v_1.Args[0] 7295 mem := v.Args[2] 7296 v.reset(OpARM64MOVWstore) 7297 v.AuxInt = off 7298 v.Aux = sym 7299 v.AddArg(ptr) 7300 v.AddArg(x) 7301 v.AddArg(mem) 7302 return true 7303 } 7304 // match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem) 7305 // cond: 7306 // result: (MOVWstore [off] {sym} ptr x mem) 7307 for { 7308 off := v.AuxInt 7309 sym := v.Aux 7310 _ = v.Args[2] 7311 ptr := v.Args[0] 7312 v_1 := v.Args[1] 7313 if v_1.Op != OpARM64MOVWUreg { 7314 break 7315 } 7316 x := v_1.Args[0] 7317 mem := v.Args[2] 7318 v.reset(OpARM64MOVWstore) 7319 v.AuxInt = off 7320 v.Aux = sym 7321 v.AddArg(ptr) 7322 v.AddArg(x) 7323 v.AddArg(mem) 7324 return true 7325 } 7326 return false 7327 } 7328 func rewriteValueARM64_OpARM64MOVWstorezero_0(v *Value) bool { 7329 b := v.Block 7330 _ = b 7331 config := b.Func.Config 7332 _ = config 7333 // match: (MOVWstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 7334 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 7335 // result: (MOVWstorezero [off1+off2] {sym} ptr mem) 7336 for { 7337 off1 := v.AuxInt 7338 sym := v.Aux 7339 _ = v.Args[1] 7340 v_0 := v.Args[0] 7341 if v_0.Op != OpARM64ADDconst { 7342 break 7343 } 7344 off2 := v_0.AuxInt 7345 ptr := v_0.Args[0] 7346 mem := v.Args[1] 7347 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 7348 break 7349 } 7350 v.reset(OpARM64MOVWstorezero) 7351 v.AuxInt = off1 + off2 7352 v.Aux = sym 7353 v.AddArg(ptr) 7354 v.AddArg(mem) 7355 return true 7356 } 7357 // match: (MOVWstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 7358 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 7359 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 7360 for { 7361 off1 := v.AuxInt 7362 sym1 := v.Aux 7363 _ = v.Args[1] 7364 v_0 := v.Args[0] 7365 if v_0.Op != OpARM64MOVDaddr { 7366 break 7367 } 7368 off2 := v_0.AuxInt 7369 sym2 := v_0.Aux 7370 ptr := v_0.Args[0] 7371 mem := v.Args[1] 7372 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 7373 break 7374 } 7375 v.reset(OpARM64MOVWstorezero) 7376 v.AuxInt = off1 + off2 7377 v.Aux = mergeSym(sym1, sym2) 7378 v.AddArg(ptr) 7379 v.AddArg(mem) 7380 return true 7381 } 7382 return false 7383 } 7384 func rewriteValueARM64_OpARM64MUL_0(v *Value) bool { 7385 // match: (MUL (NEG x) y) 7386 // cond: 7387 // result: (MNEG x y) 7388 for { 7389 _ = v.Args[1] 7390 v_0 := v.Args[0] 7391 if v_0.Op != OpARM64NEG { 7392 break 7393 } 7394 x := v_0.Args[0] 7395 y := v.Args[1] 7396 v.reset(OpARM64MNEG) 7397 v.AddArg(x) 7398 v.AddArg(y) 7399 return true 7400 } 7401 // match: (MUL y (NEG x)) 7402 // cond: 7403 // result: (MNEG x y) 7404 for { 7405 _ = v.Args[1] 7406 y := v.Args[0] 7407 v_1 := v.Args[1] 7408 if v_1.Op != OpARM64NEG { 7409 break 7410 } 7411 x := v_1.Args[0] 7412 v.reset(OpARM64MNEG) 7413 v.AddArg(x) 7414 v.AddArg(y) 7415 return true 7416 } 7417 // match: (MUL x (MOVDconst [-1])) 7418 // cond: 7419 // result: (NEG x) 7420 for { 7421 _ = v.Args[1] 7422 x := v.Args[0] 7423 v_1 := v.Args[1] 7424 if v_1.Op != OpARM64MOVDconst { 7425 break 7426 } 7427 if v_1.AuxInt != -1 { 7428 break 7429 } 7430 v.reset(OpARM64NEG) 7431 v.AddArg(x) 7432 return true 7433 } 7434 // match: (MUL (MOVDconst [-1]) x) 7435 // cond: 7436 // result: (NEG x) 7437 for { 7438 _ = v.Args[1] 7439 v_0 := v.Args[0] 7440 if v_0.Op != OpARM64MOVDconst { 7441 break 7442 } 7443 if v_0.AuxInt != -1 { 7444 break 7445 } 7446 x := v.Args[1] 7447 v.reset(OpARM64NEG) 7448 v.AddArg(x) 7449 return true 7450 } 7451 // match: (MUL _ (MOVDconst [0])) 7452 // cond: 7453 // result: (MOVDconst [0]) 7454 for { 7455 _ = v.Args[1] 7456 v_1 := v.Args[1] 7457 if v_1.Op != OpARM64MOVDconst { 7458 break 7459 } 7460 if v_1.AuxInt != 0 { 7461 break 7462 } 7463 v.reset(OpARM64MOVDconst) 7464 v.AuxInt = 0 7465 return true 7466 } 7467 // match: (MUL (MOVDconst [0]) _) 7468 // cond: 7469 // result: (MOVDconst [0]) 7470 for { 7471 _ = v.Args[1] 7472 v_0 := v.Args[0] 7473 if v_0.Op != OpARM64MOVDconst { 7474 break 7475 } 7476 if v_0.AuxInt != 0 { 7477 break 7478 } 7479 v.reset(OpARM64MOVDconst) 7480 v.AuxInt = 0 7481 return true 7482 } 7483 // match: (MUL x (MOVDconst [1])) 7484 // cond: 7485 // result: x 7486 for { 7487 _ = v.Args[1] 7488 x := v.Args[0] 7489 v_1 := v.Args[1] 7490 if v_1.Op != OpARM64MOVDconst { 7491 break 7492 } 7493 if v_1.AuxInt != 1 { 7494 break 7495 } 7496 v.reset(OpCopy) 7497 v.Type = x.Type 7498 v.AddArg(x) 7499 return true 7500 } 7501 // match: (MUL (MOVDconst [1]) x) 7502 // cond: 7503 // result: x 7504 for { 7505 _ = v.Args[1] 7506 v_0 := v.Args[0] 7507 if v_0.Op != OpARM64MOVDconst { 7508 break 7509 } 7510 if v_0.AuxInt != 1 { 7511 break 7512 } 7513 x := v.Args[1] 7514 v.reset(OpCopy) 7515 v.Type = x.Type 7516 v.AddArg(x) 7517 return true 7518 } 7519 // match: (MUL x (MOVDconst [c])) 7520 // cond: isPowerOfTwo(c) 7521 // result: (SLLconst [log2(c)] x) 7522 for { 7523 _ = v.Args[1] 7524 x := v.Args[0] 7525 v_1 := v.Args[1] 7526 if v_1.Op != OpARM64MOVDconst { 7527 break 7528 } 7529 c := v_1.AuxInt 7530 if !(isPowerOfTwo(c)) { 7531 break 7532 } 7533 v.reset(OpARM64SLLconst) 7534 v.AuxInt = log2(c) 7535 v.AddArg(x) 7536 return true 7537 } 7538 // match: (MUL (MOVDconst [c]) x) 7539 // cond: isPowerOfTwo(c) 7540 // result: (SLLconst [log2(c)] x) 7541 for { 7542 _ = v.Args[1] 7543 v_0 := v.Args[0] 7544 if v_0.Op != OpARM64MOVDconst { 7545 break 7546 } 7547 c := v_0.AuxInt 7548 x := v.Args[1] 7549 if !(isPowerOfTwo(c)) { 7550 break 7551 } 7552 v.reset(OpARM64SLLconst) 7553 v.AuxInt = log2(c) 7554 v.AddArg(x) 7555 return true 7556 } 7557 return false 7558 } 7559 func rewriteValueARM64_OpARM64MUL_10(v *Value) bool { 7560 b := v.Block 7561 _ = b 7562 // match: (MUL x (MOVDconst [c])) 7563 // cond: isPowerOfTwo(c-1) && c >= 3 7564 // result: (ADDshiftLL x x [log2(c-1)]) 7565 for { 7566 _ = v.Args[1] 7567 x := v.Args[0] 7568 v_1 := v.Args[1] 7569 if v_1.Op != OpARM64MOVDconst { 7570 break 7571 } 7572 c := v_1.AuxInt 7573 if !(isPowerOfTwo(c-1) && c >= 3) { 7574 break 7575 } 7576 v.reset(OpARM64ADDshiftLL) 7577 v.AuxInt = log2(c - 1) 7578 v.AddArg(x) 7579 v.AddArg(x) 7580 return true 7581 } 7582 // match: (MUL (MOVDconst [c]) x) 7583 // cond: isPowerOfTwo(c-1) && c >= 3 7584 // result: (ADDshiftLL x x [log2(c-1)]) 7585 for { 7586 _ = v.Args[1] 7587 v_0 := v.Args[0] 7588 if v_0.Op != OpARM64MOVDconst { 7589 break 7590 } 7591 c := v_0.AuxInt 7592 x := v.Args[1] 7593 if !(isPowerOfTwo(c-1) && c >= 3) { 7594 break 7595 } 7596 v.reset(OpARM64ADDshiftLL) 7597 v.AuxInt = log2(c - 1) 7598 v.AddArg(x) 7599 v.AddArg(x) 7600 return true 7601 } 7602 // match: (MUL x (MOVDconst [c])) 7603 // cond: isPowerOfTwo(c+1) && c >= 7 7604 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 7605 for { 7606 _ = v.Args[1] 7607 x := v.Args[0] 7608 v_1 := v.Args[1] 7609 if v_1.Op != OpARM64MOVDconst { 7610 break 7611 } 7612 c := v_1.AuxInt 7613 if !(isPowerOfTwo(c+1) && c >= 7) { 7614 break 7615 } 7616 v.reset(OpARM64ADDshiftLL) 7617 v.AuxInt = log2(c + 1) 7618 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 7619 v0.AddArg(x) 7620 v.AddArg(v0) 7621 v.AddArg(x) 7622 return true 7623 } 7624 // match: (MUL (MOVDconst [c]) x) 7625 // cond: isPowerOfTwo(c+1) && c >= 7 7626 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 7627 for { 7628 _ = v.Args[1] 7629 v_0 := v.Args[0] 7630 if v_0.Op != OpARM64MOVDconst { 7631 break 7632 } 7633 c := v_0.AuxInt 7634 x := v.Args[1] 7635 if !(isPowerOfTwo(c+1) && c >= 7) { 7636 break 7637 } 7638 v.reset(OpARM64ADDshiftLL) 7639 v.AuxInt = log2(c + 1) 7640 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 7641 v0.AddArg(x) 7642 v.AddArg(v0) 7643 v.AddArg(x) 7644 return true 7645 } 7646 // match: (MUL x (MOVDconst [c])) 7647 // cond: c%3 == 0 && isPowerOfTwo(c/3) 7648 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 7649 for { 7650 _ = v.Args[1] 7651 x := v.Args[0] 7652 v_1 := v.Args[1] 7653 if v_1.Op != OpARM64MOVDconst { 7654 break 7655 } 7656 c := v_1.AuxInt 7657 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 7658 break 7659 } 7660 v.reset(OpARM64SLLconst) 7661 v.AuxInt = log2(c / 3) 7662 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7663 v0.AuxInt = 1 7664 v0.AddArg(x) 7665 v0.AddArg(x) 7666 v.AddArg(v0) 7667 return true 7668 } 7669 // match: (MUL (MOVDconst [c]) x) 7670 // cond: c%3 == 0 && isPowerOfTwo(c/3) 7671 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 7672 for { 7673 _ = v.Args[1] 7674 v_0 := v.Args[0] 7675 if v_0.Op != OpARM64MOVDconst { 7676 break 7677 } 7678 c := v_0.AuxInt 7679 x := v.Args[1] 7680 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 7681 break 7682 } 7683 v.reset(OpARM64SLLconst) 7684 v.AuxInt = log2(c / 3) 7685 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7686 v0.AuxInt = 1 7687 v0.AddArg(x) 7688 v0.AddArg(x) 7689 v.AddArg(v0) 7690 return true 7691 } 7692 // match: (MUL x (MOVDconst [c])) 7693 // cond: c%5 == 0 && isPowerOfTwo(c/5) 7694 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 7695 for { 7696 _ = v.Args[1] 7697 x := v.Args[0] 7698 v_1 := v.Args[1] 7699 if v_1.Op != OpARM64MOVDconst { 7700 break 7701 } 7702 c := v_1.AuxInt 7703 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 7704 break 7705 } 7706 v.reset(OpARM64SLLconst) 7707 v.AuxInt = log2(c / 5) 7708 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7709 v0.AuxInt = 2 7710 v0.AddArg(x) 7711 v0.AddArg(x) 7712 v.AddArg(v0) 7713 return true 7714 } 7715 // match: (MUL (MOVDconst [c]) x) 7716 // cond: c%5 == 0 && isPowerOfTwo(c/5) 7717 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 7718 for { 7719 _ = v.Args[1] 7720 v_0 := v.Args[0] 7721 if v_0.Op != OpARM64MOVDconst { 7722 break 7723 } 7724 c := v_0.AuxInt 7725 x := v.Args[1] 7726 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 7727 break 7728 } 7729 v.reset(OpARM64SLLconst) 7730 v.AuxInt = log2(c / 5) 7731 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7732 v0.AuxInt = 2 7733 v0.AddArg(x) 7734 v0.AddArg(x) 7735 v.AddArg(v0) 7736 return true 7737 } 7738 // match: (MUL x (MOVDconst [c])) 7739 // cond: c%7 == 0 && isPowerOfTwo(c/7) 7740 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 7741 for { 7742 _ = v.Args[1] 7743 x := v.Args[0] 7744 v_1 := v.Args[1] 7745 if v_1.Op != OpARM64MOVDconst { 7746 break 7747 } 7748 c := v_1.AuxInt 7749 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 7750 break 7751 } 7752 v.reset(OpARM64SLLconst) 7753 v.AuxInt = log2(c / 7) 7754 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7755 v0.AuxInt = 3 7756 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 7757 v1.AddArg(x) 7758 v0.AddArg(v1) 7759 v0.AddArg(x) 7760 v.AddArg(v0) 7761 return true 7762 } 7763 // match: (MUL (MOVDconst [c]) x) 7764 // cond: c%7 == 0 && isPowerOfTwo(c/7) 7765 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 7766 for { 7767 _ = v.Args[1] 7768 v_0 := v.Args[0] 7769 if v_0.Op != OpARM64MOVDconst { 7770 break 7771 } 7772 c := v_0.AuxInt 7773 x := v.Args[1] 7774 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 7775 break 7776 } 7777 v.reset(OpARM64SLLconst) 7778 v.AuxInt = log2(c / 7) 7779 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7780 v0.AuxInt = 3 7781 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 7782 v1.AddArg(x) 7783 v0.AddArg(v1) 7784 v0.AddArg(x) 7785 v.AddArg(v0) 7786 return true 7787 } 7788 return false 7789 } 7790 func rewriteValueARM64_OpARM64MUL_20(v *Value) bool { 7791 b := v.Block 7792 _ = b 7793 // match: (MUL x (MOVDconst [c])) 7794 // cond: c%9 == 0 && isPowerOfTwo(c/9) 7795 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 7796 for { 7797 _ = v.Args[1] 7798 x := v.Args[0] 7799 v_1 := v.Args[1] 7800 if v_1.Op != OpARM64MOVDconst { 7801 break 7802 } 7803 c := v_1.AuxInt 7804 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 7805 break 7806 } 7807 v.reset(OpARM64SLLconst) 7808 v.AuxInt = log2(c / 9) 7809 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7810 v0.AuxInt = 3 7811 v0.AddArg(x) 7812 v0.AddArg(x) 7813 v.AddArg(v0) 7814 return true 7815 } 7816 // match: (MUL (MOVDconst [c]) x) 7817 // cond: c%9 == 0 && isPowerOfTwo(c/9) 7818 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 7819 for { 7820 _ = v.Args[1] 7821 v_0 := v.Args[0] 7822 if v_0.Op != OpARM64MOVDconst { 7823 break 7824 } 7825 c := v_0.AuxInt 7826 x := v.Args[1] 7827 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 7828 break 7829 } 7830 v.reset(OpARM64SLLconst) 7831 v.AuxInt = log2(c / 9) 7832 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7833 v0.AuxInt = 3 7834 v0.AddArg(x) 7835 v0.AddArg(x) 7836 v.AddArg(v0) 7837 return true 7838 } 7839 // match: (MUL (MOVDconst [c]) (MOVDconst [d])) 7840 // cond: 7841 // result: (MOVDconst [c*d]) 7842 for { 7843 _ = v.Args[1] 7844 v_0 := v.Args[0] 7845 if v_0.Op != OpARM64MOVDconst { 7846 break 7847 } 7848 c := v_0.AuxInt 7849 v_1 := v.Args[1] 7850 if v_1.Op != OpARM64MOVDconst { 7851 break 7852 } 7853 d := v_1.AuxInt 7854 v.reset(OpARM64MOVDconst) 7855 v.AuxInt = c * d 7856 return true 7857 } 7858 // match: (MUL (MOVDconst [d]) (MOVDconst [c])) 7859 // cond: 7860 // result: (MOVDconst [c*d]) 7861 for { 7862 _ = v.Args[1] 7863 v_0 := v.Args[0] 7864 if v_0.Op != OpARM64MOVDconst { 7865 break 7866 } 7867 d := v_0.AuxInt 7868 v_1 := v.Args[1] 7869 if v_1.Op != OpARM64MOVDconst { 7870 break 7871 } 7872 c := v_1.AuxInt 7873 v.reset(OpARM64MOVDconst) 7874 v.AuxInt = c * d 7875 return true 7876 } 7877 return false 7878 } 7879 func rewriteValueARM64_OpARM64MULW_0(v *Value) bool { 7880 // match: (MULW (NEG x) y) 7881 // cond: 7882 // result: (MNEGW x y) 7883 for { 7884 _ = v.Args[1] 7885 v_0 := v.Args[0] 7886 if v_0.Op != OpARM64NEG { 7887 break 7888 } 7889 x := v_0.Args[0] 7890 y := v.Args[1] 7891 v.reset(OpARM64MNEGW) 7892 v.AddArg(x) 7893 v.AddArg(y) 7894 return true 7895 } 7896 // match: (MULW y (NEG x)) 7897 // cond: 7898 // result: (MNEGW x y) 7899 for { 7900 _ = v.Args[1] 7901 y := v.Args[0] 7902 v_1 := v.Args[1] 7903 if v_1.Op != OpARM64NEG { 7904 break 7905 } 7906 x := v_1.Args[0] 7907 v.reset(OpARM64MNEGW) 7908 v.AddArg(x) 7909 v.AddArg(y) 7910 return true 7911 } 7912 // match: (MULW x (MOVDconst [c])) 7913 // cond: int32(c)==-1 7914 // result: (NEG x) 7915 for { 7916 _ = v.Args[1] 7917 x := v.Args[0] 7918 v_1 := v.Args[1] 7919 if v_1.Op != OpARM64MOVDconst { 7920 break 7921 } 7922 c := v_1.AuxInt 7923 if !(int32(c) == -1) { 7924 break 7925 } 7926 v.reset(OpARM64NEG) 7927 v.AddArg(x) 7928 return true 7929 } 7930 // match: (MULW (MOVDconst [c]) x) 7931 // cond: int32(c)==-1 7932 // result: (NEG x) 7933 for { 7934 _ = v.Args[1] 7935 v_0 := v.Args[0] 7936 if v_0.Op != OpARM64MOVDconst { 7937 break 7938 } 7939 c := v_0.AuxInt 7940 x := v.Args[1] 7941 if !(int32(c) == -1) { 7942 break 7943 } 7944 v.reset(OpARM64NEG) 7945 v.AddArg(x) 7946 return true 7947 } 7948 // match: (MULW _ (MOVDconst [c])) 7949 // cond: int32(c)==0 7950 // result: (MOVDconst [0]) 7951 for { 7952 _ = v.Args[1] 7953 v_1 := v.Args[1] 7954 if v_1.Op != OpARM64MOVDconst { 7955 break 7956 } 7957 c := v_1.AuxInt 7958 if !(int32(c) == 0) { 7959 break 7960 } 7961 v.reset(OpARM64MOVDconst) 7962 v.AuxInt = 0 7963 return true 7964 } 7965 // match: (MULW (MOVDconst [c]) _) 7966 // cond: int32(c)==0 7967 // result: (MOVDconst [0]) 7968 for { 7969 _ = v.Args[1] 7970 v_0 := v.Args[0] 7971 if v_0.Op != OpARM64MOVDconst { 7972 break 7973 } 7974 c := v_0.AuxInt 7975 if !(int32(c) == 0) { 7976 break 7977 } 7978 v.reset(OpARM64MOVDconst) 7979 v.AuxInt = 0 7980 return true 7981 } 7982 // match: (MULW x (MOVDconst [c])) 7983 // cond: int32(c)==1 7984 // result: x 7985 for { 7986 _ = v.Args[1] 7987 x := v.Args[0] 7988 v_1 := v.Args[1] 7989 if v_1.Op != OpARM64MOVDconst { 7990 break 7991 } 7992 c := v_1.AuxInt 7993 if !(int32(c) == 1) { 7994 break 7995 } 7996 v.reset(OpCopy) 7997 v.Type = x.Type 7998 v.AddArg(x) 7999 return true 8000 } 8001 // match: (MULW (MOVDconst [c]) x) 8002 // cond: int32(c)==1 8003 // result: x 8004 for { 8005 _ = v.Args[1] 8006 v_0 := v.Args[0] 8007 if v_0.Op != OpARM64MOVDconst { 8008 break 8009 } 8010 c := v_0.AuxInt 8011 x := v.Args[1] 8012 if !(int32(c) == 1) { 8013 break 8014 } 8015 v.reset(OpCopy) 8016 v.Type = x.Type 8017 v.AddArg(x) 8018 return true 8019 } 8020 // match: (MULW x (MOVDconst [c])) 8021 // cond: isPowerOfTwo(c) 8022 // result: (SLLconst [log2(c)] x) 8023 for { 8024 _ = v.Args[1] 8025 x := v.Args[0] 8026 v_1 := v.Args[1] 8027 if v_1.Op != OpARM64MOVDconst { 8028 break 8029 } 8030 c := v_1.AuxInt 8031 if !(isPowerOfTwo(c)) { 8032 break 8033 } 8034 v.reset(OpARM64SLLconst) 8035 v.AuxInt = log2(c) 8036 v.AddArg(x) 8037 return true 8038 } 8039 // match: (MULW (MOVDconst [c]) x) 8040 // cond: isPowerOfTwo(c) 8041 // result: (SLLconst [log2(c)] x) 8042 for { 8043 _ = v.Args[1] 8044 v_0 := v.Args[0] 8045 if v_0.Op != OpARM64MOVDconst { 8046 break 8047 } 8048 c := v_0.AuxInt 8049 x := v.Args[1] 8050 if !(isPowerOfTwo(c)) { 8051 break 8052 } 8053 v.reset(OpARM64SLLconst) 8054 v.AuxInt = log2(c) 8055 v.AddArg(x) 8056 return true 8057 } 8058 return false 8059 } 8060 func rewriteValueARM64_OpARM64MULW_10(v *Value) bool { 8061 b := v.Block 8062 _ = b 8063 // match: (MULW x (MOVDconst [c])) 8064 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 8065 // result: (ADDshiftLL x x [log2(c-1)]) 8066 for { 8067 _ = v.Args[1] 8068 x := v.Args[0] 8069 v_1 := v.Args[1] 8070 if v_1.Op != OpARM64MOVDconst { 8071 break 8072 } 8073 c := v_1.AuxInt 8074 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 8075 break 8076 } 8077 v.reset(OpARM64ADDshiftLL) 8078 v.AuxInt = log2(c - 1) 8079 v.AddArg(x) 8080 v.AddArg(x) 8081 return true 8082 } 8083 // match: (MULW (MOVDconst [c]) x) 8084 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 8085 // result: (ADDshiftLL x x [log2(c-1)]) 8086 for { 8087 _ = v.Args[1] 8088 v_0 := v.Args[0] 8089 if v_0.Op != OpARM64MOVDconst { 8090 break 8091 } 8092 c := v_0.AuxInt 8093 x := v.Args[1] 8094 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 8095 break 8096 } 8097 v.reset(OpARM64ADDshiftLL) 8098 v.AuxInt = log2(c - 1) 8099 v.AddArg(x) 8100 v.AddArg(x) 8101 return true 8102 } 8103 // match: (MULW x (MOVDconst [c])) 8104 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 8105 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 8106 for { 8107 _ = v.Args[1] 8108 x := v.Args[0] 8109 v_1 := v.Args[1] 8110 if v_1.Op != OpARM64MOVDconst { 8111 break 8112 } 8113 c := v_1.AuxInt 8114 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 8115 break 8116 } 8117 v.reset(OpARM64ADDshiftLL) 8118 v.AuxInt = log2(c + 1) 8119 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 8120 v0.AddArg(x) 8121 v.AddArg(v0) 8122 v.AddArg(x) 8123 return true 8124 } 8125 // match: (MULW (MOVDconst [c]) x) 8126 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 8127 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 8128 for { 8129 _ = v.Args[1] 8130 v_0 := v.Args[0] 8131 if v_0.Op != OpARM64MOVDconst { 8132 break 8133 } 8134 c := v_0.AuxInt 8135 x := v.Args[1] 8136 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 8137 break 8138 } 8139 v.reset(OpARM64ADDshiftLL) 8140 v.AuxInt = log2(c + 1) 8141 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 8142 v0.AddArg(x) 8143 v.AddArg(v0) 8144 v.AddArg(x) 8145 return true 8146 } 8147 // match: (MULW x (MOVDconst [c])) 8148 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 8149 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 8150 for { 8151 _ = v.Args[1] 8152 x := v.Args[0] 8153 v_1 := v.Args[1] 8154 if v_1.Op != OpARM64MOVDconst { 8155 break 8156 } 8157 c := v_1.AuxInt 8158 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 8159 break 8160 } 8161 v.reset(OpARM64SLLconst) 8162 v.AuxInt = log2(c / 3) 8163 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8164 v0.AuxInt = 1 8165 v0.AddArg(x) 8166 v0.AddArg(x) 8167 v.AddArg(v0) 8168 return true 8169 } 8170 // match: (MULW (MOVDconst [c]) x) 8171 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 8172 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 8173 for { 8174 _ = v.Args[1] 8175 v_0 := v.Args[0] 8176 if v_0.Op != OpARM64MOVDconst { 8177 break 8178 } 8179 c := v_0.AuxInt 8180 x := v.Args[1] 8181 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 8182 break 8183 } 8184 v.reset(OpARM64SLLconst) 8185 v.AuxInt = log2(c / 3) 8186 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8187 v0.AuxInt = 1 8188 v0.AddArg(x) 8189 v0.AddArg(x) 8190 v.AddArg(v0) 8191 return true 8192 } 8193 // match: (MULW x (MOVDconst [c])) 8194 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 8195 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 8196 for { 8197 _ = v.Args[1] 8198 x := v.Args[0] 8199 v_1 := v.Args[1] 8200 if v_1.Op != OpARM64MOVDconst { 8201 break 8202 } 8203 c := v_1.AuxInt 8204 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 8205 break 8206 } 8207 v.reset(OpARM64SLLconst) 8208 v.AuxInt = log2(c / 5) 8209 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8210 v0.AuxInt = 2 8211 v0.AddArg(x) 8212 v0.AddArg(x) 8213 v.AddArg(v0) 8214 return true 8215 } 8216 // match: (MULW (MOVDconst [c]) x) 8217 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 8218 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 8219 for { 8220 _ = v.Args[1] 8221 v_0 := v.Args[0] 8222 if v_0.Op != OpARM64MOVDconst { 8223 break 8224 } 8225 c := v_0.AuxInt 8226 x := v.Args[1] 8227 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 8228 break 8229 } 8230 v.reset(OpARM64SLLconst) 8231 v.AuxInt = log2(c / 5) 8232 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8233 v0.AuxInt = 2 8234 v0.AddArg(x) 8235 v0.AddArg(x) 8236 v.AddArg(v0) 8237 return true 8238 } 8239 // match: (MULW x (MOVDconst [c])) 8240 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 8241 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 8242 for { 8243 _ = v.Args[1] 8244 x := v.Args[0] 8245 v_1 := v.Args[1] 8246 if v_1.Op != OpARM64MOVDconst { 8247 break 8248 } 8249 c := v_1.AuxInt 8250 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 8251 break 8252 } 8253 v.reset(OpARM64SLLconst) 8254 v.AuxInt = log2(c / 7) 8255 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8256 v0.AuxInt = 3 8257 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 8258 v1.AddArg(x) 8259 v0.AddArg(v1) 8260 v0.AddArg(x) 8261 v.AddArg(v0) 8262 return true 8263 } 8264 // match: (MULW (MOVDconst [c]) x) 8265 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 8266 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 8267 for { 8268 _ = v.Args[1] 8269 v_0 := v.Args[0] 8270 if v_0.Op != OpARM64MOVDconst { 8271 break 8272 } 8273 c := v_0.AuxInt 8274 x := v.Args[1] 8275 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 8276 break 8277 } 8278 v.reset(OpARM64SLLconst) 8279 v.AuxInt = log2(c / 7) 8280 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8281 v0.AuxInt = 3 8282 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 8283 v1.AddArg(x) 8284 v0.AddArg(v1) 8285 v0.AddArg(x) 8286 v.AddArg(v0) 8287 return true 8288 } 8289 return false 8290 } 8291 func rewriteValueARM64_OpARM64MULW_20(v *Value) bool { 8292 b := v.Block 8293 _ = b 8294 // match: (MULW x (MOVDconst [c])) 8295 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 8296 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 8297 for { 8298 _ = v.Args[1] 8299 x := v.Args[0] 8300 v_1 := v.Args[1] 8301 if v_1.Op != OpARM64MOVDconst { 8302 break 8303 } 8304 c := v_1.AuxInt 8305 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 8306 break 8307 } 8308 v.reset(OpARM64SLLconst) 8309 v.AuxInt = log2(c / 9) 8310 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8311 v0.AuxInt = 3 8312 v0.AddArg(x) 8313 v0.AddArg(x) 8314 v.AddArg(v0) 8315 return true 8316 } 8317 // match: (MULW (MOVDconst [c]) x) 8318 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 8319 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 8320 for { 8321 _ = v.Args[1] 8322 v_0 := v.Args[0] 8323 if v_0.Op != OpARM64MOVDconst { 8324 break 8325 } 8326 c := v_0.AuxInt 8327 x := v.Args[1] 8328 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 8329 break 8330 } 8331 v.reset(OpARM64SLLconst) 8332 v.AuxInt = log2(c / 9) 8333 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8334 v0.AuxInt = 3 8335 v0.AddArg(x) 8336 v0.AddArg(x) 8337 v.AddArg(v0) 8338 return true 8339 } 8340 // match: (MULW (MOVDconst [c]) (MOVDconst [d])) 8341 // cond: 8342 // result: (MOVDconst [int64(int32(c)*int32(d))]) 8343 for { 8344 _ = v.Args[1] 8345 v_0 := v.Args[0] 8346 if v_0.Op != OpARM64MOVDconst { 8347 break 8348 } 8349 c := v_0.AuxInt 8350 v_1 := v.Args[1] 8351 if v_1.Op != OpARM64MOVDconst { 8352 break 8353 } 8354 d := v_1.AuxInt 8355 v.reset(OpARM64MOVDconst) 8356 v.AuxInt = int64(int32(c) * int32(d)) 8357 return true 8358 } 8359 // match: (MULW (MOVDconst [d]) (MOVDconst [c])) 8360 // cond: 8361 // result: (MOVDconst [int64(int32(c)*int32(d))]) 8362 for { 8363 _ = v.Args[1] 8364 v_0 := v.Args[0] 8365 if v_0.Op != OpARM64MOVDconst { 8366 break 8367 } 8368 d := v_0.AuxInt 8369 v_1 := v.Args[1] 8370 if v_1.Op != OpARM64MOVDconst { 8371 break 8372 } 8373 c := v_1.AuxInt 8374 v.reset(OpARM64MOVDconst) 8375 v.AuxInt = int64(int32(c) * int32(d)) 8376 return true 8377 } 8378 return false 8379 } 8380 func rewriteValueARM64_OpARM64MVN_0(v *Value) bool { 8381 // match: (MVN (MOVDconst [c])) 8382 // cond: 8383 // result: (MOVDconst [^c]) 8384 for { 8385 v_0 := v.Args[0] 8386 if v_0.Op != OpARM64MOVDconst { 8387 break 8388 } 8389 c := v_0.AuxInt 8390 v.reset(OpARM64MOVDconst) 8391 v.AuxInt = ^c 8392 return true 8393 } 8394 return false 8395 } 8396 func rewriteValueARM64_OpARM64NEG_0(v *Value) bool { 8397 // match: (NEG (MUL x y)) 8398 // cond: 8399 // result: (MNEG x y) 8400 for { 8401 v_0 := v.Args[0] 8402 if v_0.Op != OpARM64MUL { 8403 break 8404 } 8405 _ = v_0.Args[1] 8406 x := v_0.Args[0] 8407 y := v_0.Args[1] 8408 v.reset(OpARM64MNEG) 8409 v.AddArg(x) 8410 v.AddArg(y) 8411 return true 8412 } 8413 // match: (NEG (MULW x y)) 8414 // cond: 8415 // result: (MNEGW x y) 8416 for { 8417 v_0 := v.Args[0] 8418 if v_0.Op != OpARM64MULW { 8419 break 8420 } 8421 _ = v_0.Args[1] 8422 x := v_0.Args[0] 8423 y := v_0.Args[1] 8424 v.reset(OpARM64MNEGW) 8425 v.AddArg(x) 8426 v.AddArg(y) 8427 return true 8428 } 8429 // match: (NEG (MOVDconst [c])) 8430 // cond: 8431 // result: (MOVDconst [-c]) 8432 for { 8433 v_0 := v.Args[0] 8434 if v_0.Op != OpARM64MOVDconst { 8435 break 8436 } 8437 c := v_0.AuxInt 8438 v.reset(OpARM64MOVDconst) 8439 v.AuxInt = -c 8440 return true 8441 } 8442 return false 8443 } 8444 func rewriteValueARM64_OpARM64NotEqual_0(v *Value) bool { 8445 // match: (NotEqual (FlagEQ)) 8446 // cond: 8447 // result: (MOVDconst [0]) 8448 for { 8449 v_0 := v.Args[0] 8450 if v_0.Op != OpARM64FlagEQ { 8451 break 8452 } 8453 v.reset(OpARM64MOVDconst) 8454 v.AuxInt = 0 8455 return true 8456 } 8457 // match: (NotEqual (FlagLT_ULT)) 8458 // cond: 8459 // result: (MOVDconst [1]) 8460 for { 8461 v_0 := v.Args[0] 8462 if v_0.Op != OpARM64FlagLT_ULT { 8463 break 8464 } 8465 v.reset(OpARM64MOVDconst) 8466 v.AuxInt = 1 8467 return true 8468 } 8469 // match: (NotEqual (FlagLT_UGT)) 8470 // cond: 8471 // result: (MOVDconst [1]) 8472 for { 8473 v_0 := v.Args[0] 8474 if v_0.Op != OpARM64FlagLT_UGT { 8475 break 8476 } 8477 v.reset(OpARM64MOVDconst) 8478 v.AuxInt = 1 8479 return true 8480 } 8481 // match: (NotEqual (FlagGT_ULT)) 8482 // cond: 8483 // result: (MOVDconst [1]) 8484 for { 8485 v_0 := v.Args[0] 8486 if v_0.Op != OpARM64FlagGT_ULT { 8487 break 8488 } 8489 v.reset(OpARM64MOVDconst) 8490 v.AuxInt = 1 8491 return true 8492 } 8493 // match: (NotEqual (FlagGT_UGT)) 8494 // cond: 8495 // result: (MOVDconst [1]) 8496 for { 8497 v_0 := v.Args[0] 8498 if v_0.Op != OpARM64FlagGT_UGT { 8499 break 8500 } 8501 v.reset(OpARM64MOVDconst) 8502 v.AuxInt = 1 8503 return true 8504 } 8505 // match: (NotEqual (InvertFlags x)) 8506 // cond: 8507 // result: (NotEqual x) 8508 for { 8509 v_0 := v.Args[0] 8510 if v_0.Op != OpARM64InvertFlags { 8511 break 8512 } 8513 x := v_0.Args[0] 8514 v.reset(OpARM64NotEqual) 8515 v.AddArg(x) 8516 return true 8517 } 8518 return false 8519 } 8520 func rewriteValueARM64_OpARM64OR_0(v *Value) bool { 8521 b := v.Block 8522 _ = b 8523 // match: (OR x (MOVDconst [c])) 8524 // cond: 8525 // result: (ORconst [c] x) 8526 for { 8527 _ = v.Args[1] 8528 x := v.Args[0] 8529 v_1 := v.Args[1] 8530 if v_1.Op != OpARM64MOVDconst { 8531 break 8532 } 8533 c := v_1.AuxInt 8534 v.reset(OpARM64ORconst) 8535 v.AuxInt = c 8536 v.AddArg(x) 8537 return true 8538 } 8539 // match: (OR (MOVDconst [c]) x) 8540 // cond: 8541 // result: (ORconst [c] x) 8542 for { 8543 _ = v.Args[1] 8544 v_0 := v.Args[0] 8545 if v_0.Op != OpARM64MOVDconst { 8546 break 8547 } 8548 c := v_0.AuxInt 8549 x := v.Args[1] 8550 v.reset(OpARM64ORconst) 8551 v.AuxInt = c 8552 v.AddArg(x) 8553 return true 8554 } 8555 // match: (OR x x) 8556 // cond: 8557 // result: x 8558 for { 8559 _ = v.Args[1] 8560 x := v.Args[0] 8561 if x != v.Args[1] { 8562 break 8563 } 8564 v.reset(OpCopy) 8565 v.Type = x.Type 8566 v.AddArg(x) 8567 return true 8568 } 8569 // match: (OR x0 x1:(SLLconst [c] y)) 8570 // cond: clobberIfDead(x1) 8571 // result: (ORshiftLL x0 y [c]) 8572 for { 8573 _ = v.Args[1] 8574 x0 := v.Args[0] 8575 x1 := v.Args[1] 8576 if x1.Op != OpARM64SLLconst { 8577 break 8578 } 8579 c := x1.AuxInt 8580 y := x1.Args[0] 8581 if !(clobberIfDead(x1)) { 8582 break 8583 } 8584 v.reset(OpARM64ORshiftLL) 8585 v.AuxInt = c 8586 v.AddArg(x0) 8587 v.AddArg(y) 8588 return true 8589 } 8590 // match: (OR x1:(SLLconst [c] y) x0) 8591 // cond: clobberIfDead(x1) 8592 // result: (ORshiftLL x0 y [c]) 8593 for { 8594 _ = v.Args[1] 8595 x1 := v.Args[0] 8596 if x1.Op != OpARM64SLLconst { 8597 break 8598 } 8599 c := x1.AuxInt 8600 y := x1.Args[0] 8601 x0 := v.Args[1] 8602 if !(clobberIfDead(x1)) { 8603 break 8604 } 8605 v.reset(OpARM64ORshiftLL) 8606 v.AuxInt = c 8607 v.AddArg(x0) 8608 v.AddArg(y) 8609 return true 8610 } 8611 // match: (OR x0 x1:(SRLconst [c] y)) 8612 // cond: clobberIfDead(x1) 8613 // result: (ORshiftRL x0 y [c]) 8614 for { 8615 _ = v.Args[1] 8616 x0 := v.Args[0] 8617 x1 := v.Args[1] 8618 if x1.Op != OpARM64SRLconst { 8619 break 8620 } 8621 c := x1.AuxInt 8622 y := x1.Args[0] 8623 if !(clobberIfDead(x1)) { 8624 break 8625 } 8626 v.reset(OpARM64ORshiftRL) 8627 v.AuxInt = c 8628 v.AddArg(x0) 8629 v.AddArg(y) 8630 return true 8631 } 8632 // match: (OR x1:(SRLconst [c] y) x0) 8633 // cond: clobberIfDead(x1) 8634 // result: (ORshiftRL x0 y [c]) 8635 for { 8636 _ = v.Args[1] 8637 x1 := v.Args[0] 8638 if x1.Op != OpARM64SRLconst { 8639 break 8640 } 8641 c := x1.AuxInt 8642 y := x1.Args[0] 8643 x0 := v.Args[1] 8644 if !(clobberIfDead(x1)) { 8645 break 8646 } 8647 v.reset(OpARM64ORshiftRL) 8648 v.AuxInt = c 8649 v.AddArg(x0) 8650 v.AddArg(y) 8651 return true 8652 } 8653 // match: (OR x0 x1:(SRAconst [c] y)) 8654 // cond: clobberIfDead(x1) 8655 // result: (ORshiftRA x0 y [c]) 8656 for { 8657 _ = v.Args[1] 8658 x0 := v.Args[0] 8659 x1 := v.Args[1] 8660 if x1.Op != OpARM64SRAconst { 8661 break 8662 } 8663 c := x1.AuxInt 8664 y := x1.Args[0] 8665 if !(clobberIfDead(x1)) { 8666 break 8667 } 8668 v.reset(OpARM64ORshiftRA) 8669 v.AuxInt = c 8670 v.AddArg(x0) 8671 v.AddArg(y) 8672 return true 8673 } 8674 // match: (OR x1:(SRAconst [c] y) x0) 8675 // cond: clobberIfDead(x1) 8676 // result: (ORshiftRA x0 y [c]) 8677 for { 8678 _ = v.Args[1] 8679 x1 := v.Args[0] 8680 if x1.Op != OpARM64SRAconst { 8681 break 8682 } 8683 c := x1.AuxInt 8684 y := x1.Args[0] 8685 x0 := v.Args[1] 8686 if !(clobberIfDead(x1)) { 8687 break 8688 } 8689 v.reset(OpARM64ORshiftRA) 8690 v.AuxInt = c 8691 v.AddArg(x0) 8692 v.AddArg(y) 8693 return true 8694 } 8695 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i1] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i0] {s} p mem))) 8696 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 8697 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 8698 for { 8699 t := v.Type 8700 _ = v.Args[1] 8701 o0 := v.Args[0] 8702 if o0.Op != OpARM64ORshiftLL { 8703 break 8704 } 8705 if o0.AuxInt != 8 { 8706 break 8707 } 8708 _ = o0.Args[1] 8709 o1 := o0.Args[0] 8710 if o1.Op != OpARM64ORshiftLL { 8711 break 8712 } 8713 if o1.AuxInt != 16 { 8714 break 8715 } 8716 _ = o1.Args[1] 8717 s0 := o1.Args[0] 8718 if s0.Op != OpARM64SLLconst { 8719 break 8720 } 8721 if s0.AuxInt != 24 { 8722 break 8723 } 8724 y0 := s0.Args[0] 8725 if y0.Op != OpARM64MOVDnop { 8726 break 8727 } 8728 x0 := y0.Args[0] 8729 if x0.Op != OpARM64MOVBUload { 8730 break 8731 } 8732 i3 := x0.AuxInt 8733 s := x0.Aux 8734 _ = x0.Args[1] 8735 p := x0.Args[0] 8736 mem := x0.Args[1] 8737 y1 := o1.Args[1] 8738 if y1.Op != OpARM64MOVDnop { 8739 break 8740 } 8741 x1 := y1.Args[0] 8742 if x1.Op != OpARM64MOVBUload { 8743 break 8744 } 8745 i2 := x1.AuxInt 8746 if x1.Aux != s { 8747 break 8748 } 8749 _ = x1.Args[1] 8750 if p != x1.Args[0] { 8751 break 8752 } 8753 if mem != x1.Args[1] { 8754 break 8755 } 8756 y2 := o0.Args[1] 8757 if y2.Op != OpARM64MOVDnop { 8758 break 8759 } 8760 x2 := y2.Args[0] 8761 if x2.Op != OpARM64MOVBUload { 8762 break 8763 } 8764 i1 := x2.AuxInt 8765 if x2.Aux != s { 8766 break 8767 } 8768 _ = x2.Args[1] 8769 if p != x2.Args[0] { 8770 break 8771 } 8772 if mem != x2.Args[1] { 8773 break 8774 } 8775 y3 := v.Args[1] 8776 if y3.Op != OpARM64MOVDnop { 8777 break 8778 } 8779 x3 := y3.Args[0] 8780 if x3.Op != OpARM64MOVBUload { 8781 break 8782 } 8783 i0 := x3.AuxInt 8784 if x3.Aux != s { 8785 break 8786 } 8787 _ = x3.Args[1] 8788 if p != x3.Args[0] { 8789 break 8790 } 8791 if mem != x3.Args[1] { 8792 break 8793 } 8794 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 8795 break 8796 } 8797 b = mergePoint(b, x0, x1, x2, x3) 8798 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 8799 v.reset(OpCopy) 8800 v.AddArg(v0) 8801 v0.Aux = s 8802 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8803 v1.AuxInt = i0 8804 v1.AddArg(p) 8805 v0.AddArg(v1) 8806 v0.AddArg(mem) 8807 return true 8808 } 8809 return false 8810 } 8811 func rewriteValueARM64_OpARM64OR_10(v *Value) bool { 8812 b := v.Block 8813 _ = b 8814 // match: (OR <t> y3:(MOVDnop x3:(MOVBUload [i0] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i1] {s} p mem)))) 8815 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 8816 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 8817 for { 8818 t := v.Type 8819 _ = v.Args[1] 8820 y3 := v.Args[0] 8821 if y3.Op != OpARM64MOVDnop { 8822 break 8823 } 8824 x3 := y3.Args[0] 8825 if x3.Op != OpARM64MOVBUload { 8826 break 8827 } 8828 i0 := x3.AuxInt 8829 s := x3.Aux 8830 _ = x3.Args[1] 8831 p := x3.Args[0] 8832 mem := x3.Args[1] 8833 o0 := v.Args[1] 8834 if o0.Op != OpARM64ORshiftLL { 8835 break 8836 } 8837 if o0.AuxInt != 8 { 8838 break 8839 } 8840 _ = o0.Args[1] 8841 o1 := o0.Args[0] 8842 if o1.Op != OpARM64ORshiftLL { 8843 break 8844 } 8845 if o1.AuxInt != 16 { 8846 break 8847 } 8848 _ = o1.Args[1] 8849 s0 := o1.Args[0] 8850 if s0.Op != OpARM64SLLconst { 8851 break 8852 } 8853 if s0.AuxInt != 24 { 8854 break 8855 } 8856 y0 := s0.Args[0] 8857 if y0.Op != OpARM64MOVDnop { 8858 break 8859 } 8860 x0 := y0.Args[0] 8861 if x0.Op != OpARM64MOVBUload { 8862 break 8863 } 8864 i3 := x0.AuxInt 8865 if x0.Aux != s { 8866 break 8867 } 8868 _ = x0.Args[1] 8869 if p != x0.Args[0] { 8870 break 8871 } 8872 if mem != x0.Args[1] { 8873 break 8874 } 8875 y1 := o1.Args[1] 8876 if y1.Op != OpARM64MOVDnop { 8877 break 8878 } 8879 x1 := y1.Args[0] 8880 if x1.Op != OpARM64MOVBUload { 8881 break 8882 } 8883 i2 := x1.AuxInt 8884 if x1.Aux != s { 8885 break 8886 } 8887 _ = x1.Args[1] 8888 if p != x1.Args[0] { 8889 break 8890 } 8891 if mem != x1.Args[1] { 8892 break 8893 } 8894 y2 := o0.Args[1] 8895 if y2.Op != OpARM64MOVDnop { 8896 break 8897 } 8898 x2 := y2.Args[0] 8899 if x2.Op != OpARM64MOVBUload { 8900 break 8901 } 8902 i1 := x2.AuxInt 8903 if x2.Aux != s { 8904 break 8905 } 8906 _ = x2.Args[1] 8907 if p != x2.Args[0] { 8908 break 8909 } 8910 if mem != x2.Args[1] { 8911 break 8912 } 8913 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 8914 break 8915 } 8916 b = mergePoint(b, x0, x1, x2, x3) 8917 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 8918 v.reset(OpCopy) 8919 v.AddArg(v0) 8920 v0.Aux = s 8921 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8922 v1.AuxInt = i0 8923 v1.AddArg(p) 8924 v0.AddArg(v1) 8925 v0.AddArg(mem) 8926 return true 8927 } 8928 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem))) 8929 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 8930 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 8931 for { 8932 t := v.Type 8933 _ = v.Args[1] 8934 o0 := v.Args[0] 8935 if o0.Op != OpARM64ORshiftLL { 8936 break 8937 } 8938 if o0.AuxInt != 8 { 8939 break 8940 } 8941 _ = o0.Args[1] 8942 o1 := o0.Args[0] 8943 if o1.Op != OpARM64ORshiftLL { 8944 break 8945 } 8946 if o1.AuxInt != 16 { 8947 break 8948 } 8949 _ = o1.Args[1] 8950 o2 := o1.Args[0] 8951 if o2.Op != OpARM64ORshiftLL { 8952 break 8953 } 8954 if o2.AuxInt != 24 { 8955 break 8956 } 8957 _ = o2.Args[1] 8958 o3 := o2.Args[0] 8959 if o3.Op != OpARM64ORshiftLL { 8960 break 8961 } 8962 if o3.AuxInt != 32 { 8963 break 8964 } 8965 _ = o3.Args[1] 8966 o4 := o3.Args[0] 8967 if o4.Op != OpARM64ORshiftLL { 8968 break 8969 } 8970 if o4.AuxInt != 40 { 8971 break 8972 } 8973 _ = o4.Args[1] 8974 o5 := o4.Args[0] 8975 if o5.Op != OpARM64ORshiftLL { 8976 break 8977 } 8978 if o5.AuxInt != 48 { 8979 break 8980 } 8981 _ = o5.Args[1] 8982 s0 := o5.Args[0] 8983 if s0.Op != OpARM64SLLconst { 8984 break 8985 } 8986 if s0.AuxInt != 56 { 8987 break 8988 } 8989 y0 := s0.Args[0] 8990 if y0.Op != OpARM64MOVDnop { 8991 break 8992 } 8993 x0 := y0.Args[0] 8994 if x0.Op != OpARM64MOVBUload { 8995 break 8996 } 8997 i7 := x0.AuxInt 8998 s := x0.Aux 8999 _ = x0.Args[1] 9000 p := x0.Args[0] 9001 mem := x0.Args[1] 9002 y1 := o5.Args[1] 9003 if y1.Op != OpARM64MOVDnop { 9004 break 9005 } 9006 x1 := y1.Args[0] 9007 if x1.Op != OpARM64MOVBUload { 9008 break 9009 } 9010 i6 := x1.AuxInt 9011 if x1.Aux != s { 9012 break 9013 } 9014 _ = x1.Args[1] 9015 if p != x1.Args[0] { 9016 break 9017 } 9018 if mem != x1.Args[1] { 9019 break 9020 } 9021 y2 := o4.Args[1] 9022 if y2.Op != OpARM64MOVDnop { 9023 break 9024 } 9025 x2 := y2.Args[0] 9026 if x2.Op != OpARM64MOVBUload { 9027 break 9028 } 9029 i5 := x2.AuxInt 9030 if x2.Aux != s { 9031 break 9032 } 9033 _ = x2.Args[1] 9034 if p != x2.Args[0] { 9035 break 9036 } 9037 if mem != x2.Args[1] { 9038 break 9039 } 9040 y3 := o3.Args[1] 9041 if y3.Op != OpARM64MOVDnop { 9042 break 9043 } 9044 x3 := y3.Args[0] 9045 if x3.Op != OpARM64MOVBUload { 9046 break 9047 } 9048 i4 := x3.AuxInt 9049 if x3.Aux != s { 9050 break 9051 } 9052 _ = x3.Args[1] 9053 if p != x3.Args[0] { 9054 break 9055 } 9056 if mem != x3.Args[1] { 9057 break 9058 } 9059 y4 := o2.Args[1] 9060 if y4.Op != OpARM64MOVDnop { 9061 break 9062 } 9063 x4 := y4.Args[0] 9064 if x4.Op != OpARM64MOVBUload { 9065 break 9066 } 9067 i3 := x4.AuxInt 9068 if x4.Aux != s { 9069 break 9070 } 9071 _ = x4.Args[1] 9072 if p != x4.Args[0] { 9073 break 9074 } 9075 if mem != x4.Args[1] { 9076 break 9077 } 9078 y5 := o1.Args[1] 9079 if y5.Op != OpARM64MOVDnop { 9080 break 9081 } 9082 x5 := y5.Args[0] 9083 if x5.Op != OpARM64MOVBUload { 9084 break 9085 } 9086 i2 := x5.AuxInt 9087 if x5.Aux != s { 9088 break 9089 } 9090 _ = x5.Args[1] 9091 if p != x5.Args[0] { 9092 break 9093 } 9094 if mem != x5.Args[1] { 9095 break 9096 } 9097 y6 := o0.Args[1] 9098 if y6.Op != OpARM64MOVDnop { 9099 break 9100 } 9101 x6 := y6.Args[0] 9102 if x6.Op != OpARM64MOVBUload { 9103 break 9104 } 9105 i1 := x6.AuxInt 9106 if x6.Aux != s { 9107 break 9108 } 9109 _ = x6.Args[1] 9110 if p != x6.Args[0] { 9111 break 9112 } 9113 if mem != x6.Args[1] { 9114 break 9115 } 9116 y7 := v.Args[1] 9117 if y7.Op != OpARM64MOVDnop { 9118 break 9119 } 9120 x7 := y7.Args[0] 9121 if x7.Op != OpARM64MOVBUload { 9122 break 9123 } 9124 i0 := x7.AuxInt 9125 if x7.Aux != s { 9126 break 9127 } 9128 _ = x7.Args[1] 9129 if p != x7.Args[0] { 9130 break 9131 } 9132 if mem != x7.Args[1] { 9133 break 9134 } 9135 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 9136 break 9137 } 9138 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 9139 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 9140 v.reset(OpCopy) 9141 v.AddArg(v0) 9142 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 9143 v1.Aux = s 9144 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 9145 v2.AuxInt = i0 9146 v2.AddArg(p) 9147 v1.AddArg(v2) 9148 v1.AddArg(mem) 9149 v0.AddArg(v1) 9150 return true 9151 } 9152 // match: (OR <t> y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem)))) 9153 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 9154 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 9155 for { 9156 t := v.Type 9157 _ = v.Args[1] 9158 y7 := v.Args[0] 9159 if y7.Op != OpARM64MOVDnop { 9160 break 9161 } 9162 x7 := y7.Args[0] 9163 if x7.Op != OpARM64MOVBUload { 9164 break 9165 } 9166 i0 := x7.AuxInt 9167 s := x7.Aux 9168 _ = x7.Args[1] 9169 p := x7.Args[0] 9170 mem := x7.Args[1] 9171 o0 := v.Args[1] 9172 if o0.Op != OpARM64ORshiftLL { 9173 break 9174 } 9175 if o0.AuxInt != 8 { 9176 break 9177 } 9178 _ = o0.Args[1] 9179 o1 := o0.Args[0] 9180 if o1.Op != OpARM64ORshiftLL { 9181 break 9182 } 9183 if o1.AuxInt != 16 { 9184 break 9185 } 9186 _ = o1.Args[1] 9187 o2 := o1.Args[0] 9188 if o2.Op != OpARM64ORshiftLL { 9189 break 9190 } 9191 if o2.AuxInt != 24 { 9192 break 9193 } 9194 _ = o2.Args[1] 9195 o3 := o2.Args[0] 9196 if o3.Op != OpARM64ORshiftLL { 9197 break 9198 } 9199 if o3.AuxInt != 32 { 9200 break 9201 } 9202 _ = o3.Args[1] 9203 o4 := o3.Args[0] 9204 if o4.Op != OpARM64ORshiftLL { 9205 break 9206 } 9207 if o4.AuxInt != 40 { 9208 break 9209 } 9210 _ = o4.Args[1] 9211 o5 := o4.Args[0] 9212 if o5.Op != OpARM64ORshiftLL { 9213 break 9214 } 9215 if o5.AuxInt != 48 { 9216 break 9217 } 9218 _ = o5.Args[1] 9219 s0 := o5.Args[0] 9220 if s0.Op != OpARM64SLLconst { 9221 break 9222 } 9223 if s0.AuxInt != 56 { 9224 break 9225 } 9226 y0 := s0.Args[0] 9227 if y0.Op != OpARM64MOVDnop { 9228 break 9229 } 9230 x0 := y0.Args[0] 9231 if x0.Op != OpARM64MOVBUload { 9232 break 9233 } 9234 i7 := x0.AuxInt 9235 if x0.Aux != s { 9236 break 9237 } 9238 _ = x0.Args[1] 9239 if p != x0.Args[0] { 9240 break 9241 } 9242 if mem != x0.Args[1] { 9243 break 9244 } 9245 y1 := o5.Args[1] 9246 if y1.Op != OpARM64MOVDnop { 9247 break 9248 } 9249 x1 := y1.Args[0] 9250 if x1.Op != OpARM64MOVBUload { 9251 break 9252 } 9253 i6 := x1.AuxInt 9254 if x1.Aux != s { 9255 break 9256 } 9257 _ = x1.Args[1] 9258 if p != x1.Args[0] { 9259 break 9260 } 9261 if mem != x1.Args[1] { 9262 break 9263 } 9264 y2 := o4.Args[1] 9265 if y2.Op != OpARM64MOVDnop { 9266 break 9267 } 9268 x2 := y2.Args[0] 9269 if x2.Op != OpARM64MOVBUload { 9270 break 9271 } 9272 i5 := x2.AuxInt 9273 if x2.Aux != s { 9274 break 9275 } 9276 _ = x2.Args[1] 9277 if p != x2.Args[0] { 9278 break 9279 } 9280 if mem != x2.Args[1] { 9281 break 9282 } 9283 y3 := o3.Args[1] 9284 if y3.Op != OpARM64MOVDnop { 9285 break 9286 } 9287 x3 := y3.Args[0] 9288 if x3.Op != OpARM64MOVBUload { 9289 break 9290 } 9291 i4 := x3.AuxInt 9292 if x3.Aux != s { 9293 break 9294 } 9295 _ = x3.Args[1] 9296 if p != x3.Args[0] { 9297 break 9298 } 9299 if mem != x3.Args[1] { 9300 break 9301 } 9302 y4 := o2.Args[1] 9303 if y4.Op != OpARM64MOVDnop { 9304 break 9305 } 9306 x4 := y4.Args[0] 9307 if x4.Op != OpARM64MOVBUload { 9308 break 9309 } 9310 i3 := x4.AuxInt 9311 if x4.Aux != s { 9312 break 9313 } 9314 _ = x4.Args[1] 9315 if p != x4.Args[0] { 9316 break 9317 } 9318 if mem != x4.Args[1] { 9319 break 9320 } 9321 y5 := o1.Args[1] 9322 if y5.Op != OpARM64MOVDnop { 9323 break 9324 } 9325 x5 := y5.Args[0] 9326 if x5.Op != OpARM64MOVBUload { 9327 break 9328 } 9329 i2 := x5.AuxInt 9330 if x5.Aux != s { 9331 break 9332 } 9333 _ = x5.Args[1] 9334 if p != x5.Args[0] { 9335 break 9336 } 9337 if mem != x5.Args[1] { 9338 break 9339 } 9340 y6 := o0.Args[1] 9341 if y6.Op != OpARM64MOVDnop { 9342 break 9343 } 9344 x6 := y6.Args[0] 9345 if x6.Op != OpARM64MOVBUload { 9346 break 9347 } 9348 i1 := x6.AuxInt 9349 if x6.Aux != s { 9350 break 9351 } 9352 _ = x6.Args[1] 9353 if p != x6.Args[0] { 9354 break 9355 } 9356 if mem != x6.Args[1] { 9357 break 9358 } 9359 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 9360 break 9361 } 9362 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 9363 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 9364 v.reset(OpCopy) 9365 v.AddArg(v0) 9366 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 9367 v1.Aux = s 9368 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 9369 v2.AuxInt = i0 9370 v2.AddArg(p) 9371 v1.AddArg(v2) 9372 v1.AddArg(mem) 9373 v0.AddArg(v1) 9374 return true 9375 } 9376 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) 9377 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 9378 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 9379 for { 9380 t := v.Type 9381 _ = v.Args[1] 9382 o0 := v.Args[0] 9383 if o0.Op != OpARM64ORshiftLL { 9384 break 9385 } 9386 if o0.AuxInt != 8 { 9387 break 9388 } 9389 _ = o0.Args[1] 9390 o1 := o0.Args[0] 9391 if o1.Op != OpARM64ORshiftLL { 9392 break 9393 } 9394 if o1.AuxInt != 16 { 9395 break 9396 } 9397 _ = o1.Args[1] 9398 s0 := o1.Args[0] 9399 if s0.Op != OpARM64SLLconst { 9400 break 9401 } 9402 if s0.AuxInt != 24 { 9403 break 9404 } 9405 y0 := s0.Args[0] 9406 if y0.Op != OpARM64MOVDnop { 9407 break 9408 } 9409 x0 := y0.Args[0] 9410 if x0.Op != OpARM64MOVBUload { 9411 break 9412 } 9413 i0 := x0.AuxInt 9414 s := x0.Aux 9415 _ = x0.Args[1] 9416 p := x0.Args[0] 9417 mem := x0.Args[1] 9418 y1 := o1.Args[1] 9419 if y1.Op != OpARM64MOVDnop { 9420 break 9421 } 9422 x1 := y1.Args[0] 9423 if x1.Op != OpARM64MOVBUload { 9424 break 9425 } 9426 i1 := x1.AuxInt 9427 if x1.Aux != s { 9428 break 9429 } 9430 _ = x1.Args[1] 9431 if p != x1.Args[0] { 9432 break 9433 } 9434 if mem != x1.Args[1] { 9435 break 9436 } 9437 y2 := o0.Args[1] 9438 if y2.Op != OpARM64MOVDnop { 9439 break 9440 } 9441 x2 := y2.Args[0] 9442 if x2.Op != OpARM64MOVBUload { 9443 break 9444 } 9445 i2 := x2.AuxInt 9446 if x2.Aux != s { 9447 break 9448 } 9449 _ = x2.Args[1] 9450 if p != x2.Args[0] { 9451 break 9452 } 9453 if mem != x2.Args[1] { 9454 break 9455 } 9456 y3 := v.Args[1] 9457 if y3.Op != OpARM64MOVDnop { 9458 break 9459 } 9460 x3 := y3.Args[0] 9461 if x3.Op != OpARM64MOVBUload { 9462 break 9463 } 9464 i3 := x3.AuxInt 9465 if x3.Aux != s { 9466 break 9467 } 9468 _ = x3.Args[1] 9469 if p != x3.Args[0] { 9470 break 9471 } 9472 if mem != x3.Args[1] { 9473 break 9474 } 9475 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 9476 break 9477 } 9478 b = mergePoint(b, x0, x1, x2, x3) 9479 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 9480 v.reset(OpCopy) 9481 v.AddArg(v0) 9482 v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 9483 v1.Aux = s 9484 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 9485 v2.AuxInt = i0 9486 v2.AddArg(p) 9487 v1.AddArg(v2) 9488 v1.AddArg(mem) 9489 v0.AddArg(v1) 9490 return true 9491 } 9492 // match: (OR <t> y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem)))) 9493 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 9494 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 9495 for { 9496 t := v.Type 9497 _ = v.Args[1] 9498 y3 := v.Args[0] 9499 if y3.Op != OpARM64MOVDnop { 9500 break 9501 } 9502 x3 := y3.Args[0] 9503 if x3.Op != OpARM64MOVBUload { 9504 break 9505 } 9506 i3 := x3.AuxInt 9507 s := x3.Aux 9508 _ = x3.Args[1] 9509 p := x3.Args[0] 9510 mem := x3.Args[1] 9511 o0 := v.Args[1] 9512 if o0.Op != OpARM64ORshiftLL { 9513 break 9514 } 9515 if o0.AuxInt != 8 { 9516 break 9517 } 9518 _ = o0.Args[1] 9519 o1 := o0.Args[0] 9520 if o1.Op != OpARM64ORshiftLL { 9521 break 9522 } 9523 if o1.AuxInt != 16 { 9524 break 9525 } 9526 _ = o1.Args[1] 9527 s0 := o1.Args[0] 9528 if s0.Op != OpARM64SLLconst { 9529 break 9530 } 9531 if s0.AuxInt != 24 { 9532 break 9533 } 9534 y0 := s0.Args[0] 9535 if y0.Op != OpARM64MOVDnop { 9536 break 9537 } 9538 x0 := y0.Args[0] 9539 if x0.Op != OpARM64MOVBUload { 9540 break 9541 } 9542 i0 := x0.AuxInt 9543 if x0.Aux != s { 9544 break 9545 } 9546 _ = x0.Args[1] 9547 if p != x0.Args[0] { 9548 break 9549 } 9550 if mem != x0.Args[1] { 9551 break 9552 } 9553 y1 := o1.Args[1] 9554 if y1.Op != OpARM64MOVDnop { 9555 break 9556 } 9557 x1 := y1.Args[0] 9558 if x1.Op != OpARM64MOVBUload { 9559 break 9560 } 9561 i1 := x1.AuxInt 9562 if x1.Aux != s { 9563 break 9564 } 9565 _ = x1.Args[1] 9566 if p != x1.Args[0] { 9567 break 9568 } 9569 if mem != x1.Args[1] { 9570 break 9571 } 9572 y2 := o0.Args[1] 9573 if y2.Op != OpARM64MOVDnop { 9574 break 9575 } 9576 x2 := y2.Args[0] 9577 if x2.Op != OpARM64MOVBUload { 9578 break 9579 } 9580 i2 := x2.AuxInt 9581 if x2.Aux != s { 9582 break 9583 } 9584 _ = x2.Args[1] 9585 if p != x2.Args[0] { 9586 break 9587 } 9588 if mem != x2.Args[1] { 9589 break 9590 } 9591 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 9592 break 9593 } 9594 b = mergePoint(b, x0, x1, x2, x3) 9595 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 9596 v.reset(OpCopy) 9597 v.AddArg(v0) 9598 v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 9599 v1.Aux = s 9600 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 9601 v2.AuxInt = i0 9602 v2.AddArg(p) 9603 v1.AddArg(v2) 9604 v1.AddArg(mem) 9605 v0.AddArg(v1) 9606 return true 9607 } 9608 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i6] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i7] {s} p mem))) 9609 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 9610 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 9611 for { 9612 t := v.Type 9613 _ = v.Args[1] 9614 o0 := v.Args[0] 9615 if o0.Op != OpARM64ORshiftLL { 9616 break 9617 } 9618 if o0.AuxInt != 8 { 9619 break 9620 } 9621 _ = o0.Args[1] 9622 o1 := o0.Args[0] 9623 if o1.Op != OpARM64ORshiftLL { 9624 break 9625 } 9626 if o1.AuxInt != 16 { 9627 break 9628 } 9629 _ = o1.Args[1] 9630 o2 := o1.Args[0] 9631 if o2.Op != OpARM64ORshiftLL { 9632 break 9633 } 9634 if o2.AuxInt != 24 { 9635 break 9636 } 9637 _ = o2.Args[1] 9638 o3 := o2.Args[0] 9639 if o3.Op != OpARM64ORshiftLL { 9640 break 9641 } 9642 if o3.AuxInt != 32 { 9643 break 9644 } 9645 _ = o3.Args[1] 9646 o4 := o3.Args[0] 9647 if o4.Op != OpARM64ORshiftLL { 9648 break 9649 } 9650 if o4.AuxInt != 40 { 9651 break 9652 } 9653 _ = o4.Args[1] 9654 o5 := o4.Args[0] 9655 if o5.Op != OpARM64ORshiftLL { 9656 break 9657 } 9658 if o5.AuxInt != 48 { 9659 break 9660 } 9661 _ = o5.Args[1] 9662 s0 := o5.Args[0] 9663 if s0.Op != OpARM64SLLconst { 9664 break 9665 } 9666 if s0.AuxInt != 56 { 9667 break 9668 } 9669 y0 := s0.Args[0] 9670 if y0.Op != OpARM64MOVDnop { 9671 break 9672 } 9673 x0 := y0.Args[0] 9674 if x0.Op != OpARM64MOVBUload { 9675 break 9676 } 9677 i0 := x0.AuxInt 9678 s := x0.Aux 9679 _ = x0.Args[1] 9680 p := x0.Args[0] 9681 mem := x0.Args[1] 9682 y1 := o5.Args[1] 9683 if y1.Op != OpARM64MOVDnop { 9684 break 9685 } 9686 x1 := y1.Args[0] 9687 if x1.Op != OpARM64MOVBUload { 9688 break 9689 } 9690 i1 := x1.AuxInt 9691 if x1.Aux != s { 9692 break 9693 } 9694 _ = x1.Args[1] 9695 if p != x1.Args[0] { 9696 break 9697 } 9698 if mem != x1.Args[1] { 9699 break 9700 } 9701 y2 := o4.Args[1] 9702 if y2.Op != OpARM64MOVDnop { 9703 break 9704 } 9705 x2 := y2.Args[0] 9706 if x2.Op != OpARM64MOVBUload { 9707 break 9708 } 9709 i2 := x2.AuxInt 9710 if x2.Aux != s { 9711 break 9712 } 9713 _ = x2.Args[1] 9714 if p != x2.Args[0] { 9715 break 9716 } 9717 if mem != x2.Args[1] { 9718 break 9719 } 9720 y3 := o3.Args[1] 9721 if y3.Op != OpARM64MOVDnop { 9722 break 9723 } 9724 x3 := y3.Args[0] 9725 if x3.Op != OpARM64MOVBUload { 9726 break 9727 } 9728 i3 := x3.AuxInt 9729 if x3.Aux != s { 9730 break 9731 } 9732 _ = x3.Args[1] 9733 if p != x3.Args[0] { 9734 break 9735 } 9736 if mem != x3.Args[1] { 9737 break 9738 } 9739 y4 := o2.Args[1] 9740 if y4.Op != OpARM64MOVDnop { 9741 break 9742 } 9743 x4 := y4.Args[0] 9744 if x4.Op != OpARM64MOVBUload { 9745 break 9746 } 9747 i4 := x4.AuxInt 9748 if x4.Aux != s { 9749 break 9750 } 9751 _ = x4.Args[1] 9752 if p != x4.Args[0] { 9753 break 9754 } 9755 if mem != x4.Args[1] { 9756 break 9757 } 9758 y5 := o1.Args[1] 9759 if y5.Op != OpARM64MOVDnop { 9760 break 9761 } 9762 x5 := y5.Args[0] 9763 if x5.Op != OpARM64MOVBUload { 9764 break 9765 } 9766 i5 := x5.AuxInt 9767 if x5.Aux != s { 9768 break 9769 } 9770 _ = x5.Args[1] 9771 if p != x5.Args[0] { 9772 break 9773 } 9774 if mem != x5.Args[1] { 9775 break 9776 } 9777 y6 := o0.Args[1] 9778 if y6.Op != OpARM64MOVDnop { 9779 break 9780 } 9781 x6 := y6.Args[0] 9782 if x6.Op != OpARM64MOVBUload { 9783 break 9784 } 9785 i6 := x6.AuxInt 9786 if x6.Aux != s { 9787 break 9788 } 9789 _ = x6.Args[1] 9790 if p != x6.Args[0] { 9791 break 9792 } 9793 if mem != x6.Args[1] { 9794 break 9795 } 9796 y7 := v.Args[1] 9797 if y7.Op != OpARM64MOVDnop { 9798 break 9799 } 9800 x7 := y7.Args[0] 9801 if x7.Op != OpARM64MOVBUload { 9802 break 9803 } 9804 i7 := x7.AuxInt 9805 if x7.Aux != s { 9806 break 9807 } 9808 _ = x7.Args[1] 9809 if p != x7.Args[0] { 9810 break 9811 } 9812 if mem != x7.Args[1] { 9813 break 9814 } 9815 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 9816 break 9817 } 9818 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 9819 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 9820 v.reset(OpCopy) 9821 v.AddArg(v0) 9822 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 9823 v1.Aux = s 9824 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 9825 v2.AuxInt = i0 9826 v2.AddArg(p) 9827 v1.AddArg(v2) 9828 v1.AddArg(mem) 9829 v0.AddArg(v1) 9830 return true 9831 } 9832 // match: (OR <t> y7:(MOVDnop x7:(MOVBUload [i7] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i6] {s} p mem)))) 9833 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 9834 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 9835 for { 9836 t := v.Type 9837 _ = v.Args[1] 9838 y7 := v.Args[0] 9839 if y7.Op != OpARM64MOVDnop { 9840 break 9841 } 9842 x7 := y7.Args[0] 9843 if x7.Op != OpARM64MOVBUload { 9844 break 9845 } 9846 i7 := x7.AuxInt 9847 s := x7.Aux 9848 _ = x7.Args[1] 9849 p := x7.Args[0] 9850 mem := x7.Args[1] 9851 o0 := v.Args[1] 9852 if o0.Op != OpARM64ORshiftLL { 9853 break 9854 } 9855 if o0.AuxInt != 8 { 9856 break 9857 } 9858 _ = o0.Args[1] 9859 o1 := o0.Args[0] 9860 if o1.Op != OpARM64ORshiftLL { 9861 break 9862 } 9863 if o1.AuxInt != 16 { 9864 break 9865 } 9866 _ = o1.Args[1] 9867 o2 := o1.Args[0] 9868 if o2.Op != OpARM64ORshiftLL { 9869 break 9870 } 9871 if o2.AuxInt != 24 { 9872 break 9873 } 9874 _ = o2.Args[1] 9875 o3 := o2.Args[0] 9876 if o3.Op != OpARM64ORshiftLL { 9877 break 9878 } 9879 if o3.AuxInt != 32 { 9880 break 9881 } 9882 _ = o3.Args[1] 9883 o4 := o3.Args[0] 9884 if o4.Op != OpARM64ORshiftLL { 9885 break 9886 } 9887 if o4.AuxInt != 40 { 9888 break 9889 } 9890 _ = o4.Args[1] 9891 o5 := o4.Args[0] 9892 if o5.Op != OpARM64ORshiftLL { 9893 break 9894 } 9895 if o5.AuxInt != 48 { 9896 break 9897 } 9898 _ = o5.Args[1] 9899 s0 := o5.Args[0] 9900 if s0.Op != OpARM64SLLconst { 9901 break 9902 } 9903 if s0.AuxInt != 56 { 9904 break 9905 } 9906 y0 := s0.Args[0] 9907 if y0.Op != OpARM64MOVDnop { 9908 break 9909 } 9910 x0 := y0.Args[0] 9911 if x0.Op != OpARM64MOVBUload { 9912 break 9913 } 9914 i0 := x0.AuxInt 9915 if x0.Aux != s { 9916 break 9917 } 9918 _ = x0.Args[1] 9919 if p != x0.Args[0] { 9920 break 9921 } 9922 if mem != x0.Args[1] { 9923 break 9924 } 9925 y1 := o5.Args[1] 9926 if y1.Op != OpARM64MOVDnop { 9927 break 9928 } 9929 x1 := y1.Args[0] 9930 if x1.Op != OpARM64MOVBUload { 9931 break 9932 } 9933 i1 := x1.AuxInt 9934 if x1.Aux != s { 9935 break 9936 } 9937 _ = x1.Args[1] 9938 if p != x1.Args[0] { 9939 break 9940 } 9941 if mem != x1.Args[1] { 9942 break 9943 } 9944 y2 := o4.Args[1] 9945 if y2.Op != OpARM64MOVDnop { 9946 break 9947 } 9948 x2 := y2.Args[0] 9949 if x2.Op != OpARM64MOVBUload { 9950 break 9951 } 9952 i2 := x2.AuxInt 9953 if x2.Aux != s { 9954 break 9955 } 9956 _ = x2.Args[1] 9957 if p != x2.Args[0] { 9958 break 9959 } 9960 if mem != x2.Args[1] { 9961 break 9962 } 9963 y3 := o3.Args[1] 9964 if y3.Op != OpARM64MOVDnop { 9965 break 9966 } 9967 x3 := y3.Args[0] 9968 if x3.Op != OpARM64MOVBUload { 9969 break 9970 } 9971 i3 := x3.AuxInt 9972 if x3.Aux != s { 9973 break 9974 } 9975 _ = x3.Args[1] 9976 if p != x3.Args[0] { 9977 break 9978 } 9979 if mem != x3.Args[1] { 9980 break 9981 } 9982 y4 := o2.Args[1] 9983 if y4.Op != OpARM64MOVDnop { 9984 break 9985 } 9986 x4 := y4.Args[0] 9987 if x4.Op != OpARM64MOVBUload { 9988 break 9989 } 9990 i4 := x4.AuxInt 9991 if x4.Aux != s { 9992 break 9993 } 9994 _ = x4.Args[1] 9995 if p != x4.Args[0] { 9996 break 9997 } 9998 if mem != x4.Args[1] { 9999 break 10000 } 10001 y5 := o1.Args[1] 10002 if y5.Op != OpARM64MOVDnop { 10003 break 10004 } 10005 x5 := y5.Args[0] 10006 if x5.Op != OpARM64MOVBUload { 10007 break 10008 } 10009 i5 := x5.AuxInt 10010 if x5.Aux != s { 10011 break 10012 } 10013 _ = x5.Args[1] 10014 if p != x5.Args[0] { 10015 break 10016 } 10017 if mem != x5.Args[1] { 10018 break 10019 } 10020 y6 := o0.Args[1] 10021 if y6.Op != OpARM64MOVDnop { 10022 break 10023 } 10024 x6 := y6.Args[0] 10025 if x6.Op != OpARM64MOVBUload { 10026 break 10027 } 10028 i6 := x6.AuxInt 10029 if x6.Aux != s { 10030 break 10031 } 10032 _ = x6.Args[1] 10033 if p != x6.Args[0] { 10034 break 10035 } 10036 if mem != x6.Args[1] { 10037 break 10038 } 10039 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 10040 break 10041 } 10042 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 10043 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 10044 v.reset(OpCopy) 10045 v.AddArg(v0) 10046 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 10047 v1.Aux = s 10048 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 10049 v2.AuxInt = i0 10050 v2.AddArg(p) 10051 v1.AddArg(v2) 10052 v1.AddArg(mem) 10053 v0.AddArg(v1) 10054 return true 10055 } 10056 return false 10057 } 10058 func rewriteValueARM64_OpARM64ORconst_0(v *Value) bool { 10059 // match: (ORconst [0] x) 10060 // cond: 10061 // result: x 10062 for { 10063 if v.AuxInt != 0 { 10064 break 10065 } 10066 x := v.Args[0] 10067 v.reset(OpCopy) 10068 v.Type = x.Type 10069 v.AddArg(x) 10070 return true 10071 } 10072 // match: (ORconst [-1] _) 10073 // cond: 10074 // result: (MOVDconst [-1]) 10075 for { 10076 if v.AuxInt != -1 { 10077 break 10078 } 10079 v.reset(OpARM64MOVDconst) 10080 v.AuxInt = -1 10081 return true 10082 } 10083 // match: (ORconst [c] (MOVDconst [d])) 10084 // cond: 10085 // result: (MOVDconst [c|d]) 10086 for { 10087 c := v.AuxInt 10088 v_0 := v.Args[0] 10089 if v_0.Op != OpARM64MOVDconst { 10090 break 10091 } 10092 d := v_0.AuxInt 10093 v.reset(OpARM64MOVDconst) 10094 v.AuxInt = c | d 10095 return true 10096 } 10097 // match: (ORconst [c] (ORconst [d] x)) 10098 // cond: 10099 // result: (ORconst [c|d] x) 10100 for { 10101 c := v.AuxInt 10102 v_0 := v.Args[0] 10103 if v_0.Op != OpARM64ORconst { 10104 break 10105 } 10106 d := v_0.AuxInt 10107 x := v_0.Args[0] 10108 v.reset(OpARM64ORconst) 10109 v.AuxInt = c | d 10110 v.AddArg(x) 10111 return true 10112 } 10113 return false 10114 } 10115 func rewriteValueARM64_OpARM64ORshiftLL_0(v *Value) bool { 10116 b := v.Block 10117 _ = b 10118 // match: (ORshiftLL (MOVDconst [c]) x [d]) 10119 // cond: 10120 // result: (ORconst [c] (SLLconst <x.Type> x [d])) 10121 for { 10122 d := v.AuxInt 10123 _ = v.Args[1] 10124 v_0 := v.Args[0] 10125 if v_0.Op != OpARM64MOVDconst { 10126 break 10127 } 10128 c := v_0.AuxInt 10129 x := v.Args[1] 10130 v.reset(OpARM64ORconst) 10131 v.AuxInt = c 10132 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 10133 v0.AuxInt = d 10134 v0.AddArg(x) 10135 v.AddArg(v0) 10136 return true 10137 } 10138 // match: (ORshiftLL x (MOVDconst [c]) [d]) 10139 // cond: 10140 // result: (ORconst x [int64(uint64(c)<<uint64(d))]) 10141 for { 10142 d := v.AuxInt 10143 _ = v.Args[1] 10144 x := v.Args[0] 10145 v_1 := v.Args[1] 10146 if v_1.Op != OpARM64MOVDconst { 10147 break 10148 } 10149 c := v_1.AuxInt 10150 v.reset(OpARM64ORconst) 10151 v.AuxInt = int64(uint64(c) << uint64(d)) 10152 v.AddArg(x) 10153 return true 10154 } 10155 // match: (ORshiftLL x y:(SLLconst x [c]) [d]) 10156 // cond: c==d 10157 // result: y 10158 for { 10159 d := v.AuxInt 10160 _ = v.Args[1] 10161 x := v.Args[0] 10162 y := v.Args[1] 10163 if y.Op != OpARM64SLLconst { 10164 break 10165 } 10166 c := y.AuxInt 10167 if x != y.Args[0] { 10168 break 10169 } 10170 if !(c == d) { 10171 break 10172 } 10173 v.reset(OpCopy) 10174 v.Type = y.Type 10175 v.AddArg(y) 10176 return true 10177 } 10178 // match: (ORshiftLL [c] (SRLconst x [64-c]) x) 10179 // cond: 10180 // result: (RORconst [64-c] x) 10181 for { 10182 c := v.AuxInt 10183 _ = v.Args[1] 10184 v_0 := v.Args[0] 10185 if v_0.Op != OpARM64SRLconst { 10186 break 10187 } 10188 if v_0.AuxInt != 64-c { 10189 break 10190 } 10191 x := v_0.Args[0] 10192 if x != v.Args[1] { 10193 break 10194 } 10195 v.reset(OpARM64RORconst) 10196 v.AuxInt = 64 - c 10197 v.AddArg(x) 10198 return true 10199 } 10200 // match: (ORshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x) 10201 // cond: c < 32 && t.Size() == 4 10202 // result: (RORWconst [32-c] x) 10203 for { 10204 t := v.Type 10205 c := v.AuxInt 10206 _ = v.Args[1] 10207 v_0 := v.Args[0] 10208 if v_0.Op != OpARM64SRLconst { 10209 break 10210 } 10211 if v_0.AuxInt != 32-c { 10212 break 10213 } 10214 v_0_0 := v_0.Args[0] 10215 if v_0_0.Op != OpARM64MOVWUreg { 10216 break 10217 } 10218 x := v_0_0.Args[0] 10219 if x != v.Args[1] { 10220 break 10221 } 10222 if !(c < 32 && t.Size() == 4) { 10223 break 10224 } 10225 v.reset(OpARM64RORWconst) 10226 v.AuxInt = 32 - c 10227 v.AddArg(x) 10228 return true 10229 } 10230 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) 10231 // cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 10232 // result: @mergePoint(b,x0,x1) (MOVHUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 10233 for { 10234 t := v.Type 10235 if v.AuxInt != 8 { 10236 break 10237 } 10238 _ = v.Args[1] 10239 y0 := v.Args[0] 10240 if y0.Op != OpARM64MOVDnop { 10241 break 10242 } 10243 x0 := y0.Args[0] 10244 if x0.Op != OpARM64MOVBUload { 10245 break 10246 } 10247 i0 := x0.AuxInt 10248 s := x0.Aux 10249 _ = x0.Args[1] 10250 p := x0.Args[0] 10251 mem := x0.Args[1] 10252 y1 := v.Args[1] 10253 if y1.Op != OpARM64MOVDnop { 10254 break 10255 } 10256 x1 := y1.Args[0] 10257 if x1.Op != OpARM64MOVBUload { 10258 break 10259 } 10260 i1 := x1.AuxInt 10261 if x1.Aux != s { 10262 break 10263 } 10264 _ = x1.Args[1] 10265 if p != x1.Args[0] { 10266 break 10267 } 10268 if mem != x1.Args[1] { 10269 break 10270 } 10271 if !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 10272 break 10273 } 10274 b = mergePoint(b, x0, x1) 10275 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, t) 10276 v.reset(OpCopy) 10277 v.AddArg(v0) 10278 v0.Aux = s 10279 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 10280 v1.AuxInt = i0 10281 v1.AddArg(p) 10282 v0.AddArg(v1) 10283 v0.AddArg(mem) 10284 return true 10285 } 10286 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i3] {s} p mem))) 10287 // cond: i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0) 10288 // result: @mergePoint(b,x0,x1,x2) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 10289 for { 10290 t := v.Type 10291 if v.AuxInt != 24 { 10292 break 10293 } 10294 _ = v.Args[1] 10295 o0 := v.Args[0] 10296 if o0.Op != OpARM64ORshiftLL { 10297 break 10298 } 10299 if o0.AuxInt != 16 { 10300 break 10301 } 10302 _ = o0.Args[1] 10303 x0 := o0.Args[0] 10304 if x0.Op != OpARM64MOVHUload { 10305 break 10306 } 10307 i0 := x0.AuxInt 10308 s := x0.Aux 10309 _ = x0.Args[1] 10310 p := x0.Args[0] 10311 mem := x0.Args[1] 10312 y1 := o0.Args[1] 10313 if y1.Op != OpARM64MOVDnop { 10314 break 10315 } 10316 x1 := y1.Args[0] 10317 if x1.Op != OpARM64MOVBUload { 10318 break 10319 } 10320 i2 := x1.AuxInt 10321 if x1.Aux != s { 10322 break 10323 } 10324 _ = x1.Args[1] 10325 if p != x1.Args[0] { 10326 break 10327 } 10328 if mem != x1.Args[1] { 10329 break 10330 } 10331 y2 := v.Args[1] 10332 if y2.Op != OpARM64MOVDnop { 10333 break 10334 } 10335 x2 := y2.Args[0] 10336 if x2.Op != OpARM64MOVBUload { 10337 break 10338 } 10339 i3 := x2.AuxInt 10340 if x2.Aux != s { 10341 break 10342 } 10343 _ = x2.Args[1] 10344 if p != x2.Args[0] { 10345 break 10346 } 10347 if mem != x2.Args[1] { 10348 break 10349 } 10350 if !(i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) { 10351 break 10352 } 10353 b = mergePoint(b, x0, x1, x2) 10354 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 10355 v.reset(OpCopy) 10356 v.AddArg(v0) 10357 v0.Aux = s 10358 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 10359 v1.AuxInt = i0 10360 v1.AddArg(p) 10361 v0.AddArg(v1) 10362 v0.AddArg(mem) 10363 return true 10364 } 10365 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i4] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i7] {s} p mem))) 10366 // cond: i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 10367 // result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem) 10368 for { 10369 t := v.Type 10370 if v.AuxInt != 56 { 10371 break 10372 } 10373 _ = v.Args[1] 10374 o0 := v.Args[0] 10375 if o0.Op != OpARM64ORshiftLL { 10376 break 10377 } 10378 if o0.AuxInt != 48 { 10379 break 10380 } 10381 _ = o0.Args[1] 10382 o1 := o0.Args[0] 10383 if o1.Op != OpARM64ORshiftLL { 10384 break 10385 } 10386 if o1.AuxInt != 40 { 10387 break 10388 } 10389 _ = o1.Args[1] 10390 o2 := o1.Args[0] 10391 if o2.Op != OpARM64ORshiftLL { 10392 break 10393 } 10394 if o2.AuxInt != 32 { 10395 break 10396 } 10397 _ = o2.Args[1] 10398 x0 := o2.Args[0] 10399 if x0.Op != OpARM64MOVWUload { 10400 break 10401 } 10402 i0 := x0.AuxInt 10403 s := x0.Aux 10404 _ = x0.Args[1] 10405 p := x0.Args[0] 10406 mem := x0.Args[1] 10407 y1 := o2.Args[1] 10408 if y1.Op != OpARM64MOVDnop { 10409 break 10410 } 10411 x1 := y1.Args[0] 10412 if x1.Op != OpARM64MOVBUload { 10413 break 10414 } 10415 i4 := x1.AuxInt 10416 if x1.Aux != s { 10417 break 10418 } 10419 _ = x1.Args[1] 10420 if p != x1.Args[0] { 10421 break 10422 } 10423 if mem != x1.Args[1] { 10424 break 10425 } 10426 y2 := o1.Args[1] 10427 if y2.Op != OpARM64MOVDnop { 10428 break 10429 } 10430 x2 := y2.Args[0] 10431 if x2.Op != OpARM64MOVBUload { 10432 break 10433 } 10434 i5 := x2.AuxInt 10435 if x2.Aux != s { 10436 break 10437 } 10438 _ = x2.Args[1] 10439 if p != x2.Args[0] { 10440 break 10441 } 10442 if mem != x2.Args[1] { 10443 break 10444 } 10445 y3 := o0.Args[1] 10446 if y3.Op != OpARM64MOVDnop { 10447 break 10448 } 10449 x3 := y3.Args[0] 10450 if x3.Op != OpARM64MOVBUload { 10451 break 10452 } 10453 i6 := x3.AuxInt 10454 if x3.Aux != s { 10455 break 10456 } 10457 _ = x3.Args[1] 10458 if p != x3.Args[0] { 10459 break 10460 } 10461 if mem != x3.Args[1] { 10462 break 10463 } 10464 y4 := v.Args[1] 10465 if y4.Op != OpARM64MOVDnop { 10466 break 10467 } 10468 x4 := y4.Args[0] 10469 if x4.Op != OpARM64MOVBUload { 10470 break 10471 } 10472 i7 := x4.AuxInt 10473 if x4.Aux != s { 10474 break 10475 } 10476 _ = x4.Args[1] 10477 if p != x4.Args[0] { 10478 break 10479 } 10480 if mem != x4.Args[1] { 10481 break 10482 } 10483 if !(i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 10484 break 10485 } 10486 b = mergePoint(b, x0, x1, x2, x3, x4) 10487 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 10488 v.reset(OpCopy) 10489 v.AddArg(v0) 10490 v0.Aux = s 10491 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 10492 v1.AuxInt = i0 10493 v1.AddArg(p) 10494 v0.AddArg(v1) 10495 v0.AddArg(mem) 10496 return true 10497 } 10498 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i1] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i0] {s} p mem))) 10499 // cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 10500 // result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUload <t> [i0] {s} p mem)) 10501 for { 10502 t := v.Type 10503 if v.AuxInt != 8 { 10504 break 10505 } 10506 _ = v.Args[1] 10507 y0 := v.Args[0] 10508 if y0.Op != OpARM64MOVDnop { 10509 break 10510 } 10511 x0 := y0.Args[0] 10512 if x0.Op != OpARM64MOVBUload { 10513 break 10514 } 10515 i1 := x0.AuxInt 10516 s := x0.Aux 10517 _ = x0.Args[1] 10518 p := x0.Args[0] 10519 mem := x0.Args[1] 10520 y1 := v.Args[1] 10521 if y1.Op != OpARM64MOVDnop { 10522 break 10523 } 10524 x1 := y1.Args[0] 10525 if x1.Op != OpARM64MOVBUload { 10526 break 10527 } 10528 i0 := x1.AuxInt 10529 if x1.Aux != s { 10530 break 10531 } 10532 _ = x1.Args[1] 10533 if p != x1.Args[0] { 10534 break 10535 } 10536 if mem != x1.Args[1] { 10537 break 10538 } 10539 if !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 10540 break 10541 } 10542 b = mergePoint(b, x0, x1) 10543 v0 := b.NewValue0(v.Pos, OpARM64REV16W, t) 10544 v.reset(OpCopy) 10545 v.AddArg(v0) 10546 v1 := b.NewValue0(v.Pos, OpARM64MOVHUload, t) 10547 v1.AuxInt = i0 10548 v1.Aux = s 10549 v1.AddArg(p) 10550 v1.AddArg(mem) 10551 v0.AddArg(v1) 10552 return true 10553 } 10554 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUload [i2] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i0] {s} p mem))) 10555 // cond: i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0) 10556 // result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 10557 for { 10558 t := v.Type 10559 if v.AuxInt != 24 { 10560 break 10561 } 10562 _ = v.Args[1] 10563 o0 := v.Args[0] 10564 if o0.Op != OpARM64ORshiftLL { 10565 break 10566 } 10567 if o0.AuxInt != 16 { 10568 break 10569 } 10570 _ = o0.Args[1] 10571 y0 := o0.Args[0] 10572 if y0.Op != OpARM64REV16W { 10573 break 10574 } 10575 x0 := y0.Args[0] 10576 if x0.Op != OpARM64MOVHUload { 10577 break 10578 } 10579 i2 := x0.AuxInt 10580 s := x0.Aux 10581 _ = x0.Args[1] 10582 p := x0.Args[0] 10583 mem := x0.Args[1] 10584 y1 := o0.Args[1] 10585 if y1.Op != OpARM64MOVDnop { 10586 break 10587 } 10588 x1 := y1.Args[0] 10589 if x1.Op != OpARM64MOVBUload { 10590 break 10591 } 10592 i1 := x1.AuxInt 10593 if x1.Aux != s { 10594 break 10595 } 10596 _ = x1.Args[1] 10597 if p != x1.Args[0] { 10598 break 10599 } 10600 if mem != x1.Args[1] { 10601 break 10602 } 10603 y2 := v.Args[1] 10604 if y2.Op != OpARM64MOVDnop { 10605 break 10606 } 10607 x2 := y2.Args[0] 10608 if x2.Op != OpARM64MOVBUload { 10609 break 10610 } 10611 i0 := x2.AuxInt 10612 if x2.Aux != s { 10613 break 10614 } 10615 _ = x2.Args[1] 10616 if p != x2.Args[0] { 10617 break 10618 } 10619 if mem != x2.Args[1] { 10620 break 10621 } 10622 if !(i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) { 10623 break 10624 } 10625 b = mergePoint(b, x0, x1, x2) 10626 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 10627 v.reset(OpCopy) 10628 v.AddArg(v0) 10629 v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 10630 v1.Aux = s 10631 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 10632 v2.AuxInt = i0 10633 v2.AddArg(p) 10634 v1.AddArg(v2) 10635 v1.AddArg(mem) 10636 v0.AddArg(v1) 10637 return true 10638 } 10639 return false 10640 } 10641 func rewriteValueARM64_OpARM64ORshiftLL_10(v *Value) bool { 10642 b := v.Block 10643 _ = b 10644 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUload [i4] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i3] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i1] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i0] {s} p mem))) 10645 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 10646 // result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 10647 for { 10648 t := v.Type 10649 if v.AuxInt != 56 { 10650 break 10651 } 10652 _ = v.Args[1] 10653 o0 := v.Args[0] 10654 if o0.Op != OpARM64ORshiftLL { 10655 break 10656 } 10657 if o0.AuxInt != 48 { 10658 break 10659 } 10660 _ = o0.Args[1] 10661 o1 := o0.Args[0] 10662 if o1.Op != OpARM64ORshiftLL { 10663 break 10664 } 10665 if o1.AuxInt != 40 { 10666 break 10667 } 10668 _ = o1.Args[1] 10669 o2 := o1.Args[0] 10670 if o2.Op != OpARM64ORshiftLL { 10671 break 10672 } 10673 if o2.AuxInt != 32 { 10674 break 10675 } 10676 _ = o2.Args[1] 10677 y0 := o2.Args[0] 10678 if y0.Op != OpARM64REVW { 10679 break 10680 } 10681 x0 := y0.Args[0] 10682 if x0.Op != OpARM64MOVWUload { 10683 break 10684 } 10685 i4 := x0.AuxInt 10686 s := x0.Aux 10687 _ = x0.Args[1] 10688 p := x0.Args[0] 10689 mem := x0.Args[1] 10690 y1 := o2.Args[1] 10691 if y1.Op != OpARM64MOVDnop { 10692 break 10693 } 10694 x1 := y1.Args[0] 10695 if x1.Op != OpARM64MOVBUload { 10696 break 10697 } 10698 i3 := x1.AuxInt 10699 if x1.Aux != s { 10700 break 10701 } 10702 _ = x1.Args[1] 10703 if p != x1.Args[0] { 10704 break 10705 } 10706 if mem != x1.Args[1] { 10707 break 10708 } 10709 y2 := o1.Args[1] 10710 if y2.Op != OpARM64MOVDnop { 10711 break 10712 } 10713 x2 := y2.Args[0] 10714 if x2.Op != OpARM64MOVBUload { 10715 break 10716 } 10717 i2 := x2.AuxInt 10718 if x2.Aux != s { 10719 break 10720 } 10721 _ = x2.Args[1] 10722 if p != x2.Args[0] { 10723 break 10724 } 10725 if mem != x2.Args[1] { 10726 break 10727 } 10728 y3 := o0.Args[1] 10729 if y3.Op != OpARM64MOVDnop { 10730 break 10731 } 10732 x3 := y3.Args[0] 10733 if x3.Op != OpARM64MOVBUload { 10734 break 10735 } 10736 i1 := x3.AuxInt 10737 if x3.Aux != s { 10738 break 10739 } 10740 _ = x3.Args[1] 10741 if p != x3.Args[0] { 10742 break 10743 } 10744 if mem != x3.Args[1] { 10745 break 10746 } 10747 y4 := v.Args[1] 10748 if y4.Op != OpARM64MOVDnop { 10749 break 10750 } 10751 x4 := y4.Args[0] 10752 if x4.Op != OpARM64MOVBUload { 10753 break 10754 } 10755 i0 := x4.AuxInt 10756 if x4.Aux != s { 10757 break 10758 } 10759 _ = x4.Args[1] 10760 if p != x4.Args[0] { 10761 break 10762 } 10763 if mem != x4.Args[1] { 10764 break 10765 } 10766 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 10767 break 10768 } 10769 b = mergePoint(b, x0, x1, x2, x3, x4) 10770 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 10771 v.reset(OpCopy) 10772 v.AddArg(v0) 10773 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 10774 v1.Aux = s 10775 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 10776 v2.AuxInt = i0 10777 v2.AddArg(p) 10778 v1.AddArg(v2) 10779 v1.AddArg(mem) 10780 v0.AddArg(v1) 10781 return true 10782 } 10783 return false 10784 } 10785 func rewriteValueARM64_OpARM64ORshiftRA_0(v *Value) bool { 10786 b := v.Block 10787 _ = b 10788 // match: (ORshiftRA (MOVDconst [c]) x [d]) 10789 // cond: 10790 // result: (ORconst [c] (SRAconst <x.Type> x [d])) 10791 for { 10792 d := v.AuxInt 10793 _ = v.Args[1] 10794 v_0 := v.Args[0] 10795 if v_0.Op != OpARM64MOVDconst { 10796 break 10797 } 10798 c := v_0.AuxInt 10799 x := v.Args[1] 10800 v.reset(OpARM64ORconst) 10801 v.AuxInt = c 10802 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 10803 v0.AuxInt = d 10804 v0.AddArg(x) 10805 v.AddArg(v0) 10806 return true 10807 } 10808 // match: (ORshiftRA x (MOVDconst [c]) [d]) 10809 // cond: 10810 // result: (ORconst x [int64(int64(c)>>uint64(d))]) 10811 for { 10812 d := v.AuxInt 10813 _ = v.Args[1] 10814 x := v.Args[0] 10815 v_1 := v.Args[1] 10816 if v_1.Op != OpARM64MOVDconst { 10817 break 10818 } 10819 c := v_1.AuxInt 10820 v.reset(OpARM64ORconst) 10821 v.AuxInt = int64(int64(c) >> uint64(d)) 10822 v.AddArg(x) 10823 return true 10824 } 10825 // match: (ORshiftRA x y:(SRAconst x [c]) [d]) 10826 // cond: c==d 10827 // result: y 10828 for { 10829 d := v.AuxInt 10830 _ = v.Args[1] 10831 x := v.Args[0] 10832 y := v.Args[1] 10833 if y.Op != OpARM64SRAconst { 10834 break 10835 } 10836 c := y.AuxInt 10837 if x != y.Args[0] { 10838 break 10839 } 10840 if !(c == d) { 10841 break 10842 } 10843 v.reset(OpCopy) 10844 v.Type = y.Type 10845 v.AddArg(y) 10846 return true 10847 } 10848 return false 10849 } 10850 func rewriteValueARM64_OpARM64ORshiftRL_0(v *Value) bool { 10851 b := v.Block 10852 _ = b 10853 // match: (ORshiftRL (MOVDconst [c]) x [d]) 10854 // cond: 10855 // result: (ORconst [c] (SRLconst <x.Type> x [d])) 10856 for { 10857 d := v.AuxInt 10858 _ = v.Args[1] 10859 v_0 := v.Args[0] 10860 if v_0.Op != OpARM64MOVDconst { 10861 break 10862 } 10863 c := v_0.AuxInt 10864 x := v.Args[1] 10865 v.reset(OpARM64ORconst) 10866 v.AuxInt = c 10867 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 10868 v0.AuxInt = d 10869 v0.AddArg(x) 10870 v.AddArg(v0) 10871 return true 10872 } 10873 // match: (ORshiftRL x (MOVDconst [c]) [d]) 10874 // cond: 10875 // result: (ORconst x [int64(uint64(c)>>uint64(d))]) 10876 for { 10877 d := v.AuxInt 10878 _ = v.Args[1] 10879 x := v.Args[0] 10880 v_1 := v.Args[1] 10881 if v_1.Op != OpARM64MOVDconst { 10882 break 10883 } 10884 c := v_1.AuxInt 10885 v.reset(OpARM64ORconst) 10886 v.AuxInt = int64(uint64(c) >> uint64(d)) 10887 v.AddArg(x) 10888 return true 10889 } 10890 // match: (ORshiftRL x y:(SRLconst x [c]) [d]) 10891 // cond: c==d 10892 // result: y 10893 for { 10894 d := v.AuxInt 10895 _ = v.Args[1] 10896 x := v.Args[0] 10897 y := v.Args[1] 10898 if y.Op != OpARM64SRLconst { 10899 break 10900 } 10901 c := y.AuxInt 10902 if x != y.Args[0] { 10903 break 10904 } 10905 if !(c == d) { 10906 break 10907 } 10908 v.reset(OpCopy) 10909 v.Type = y.Type 10910 v.AddArg(y) 10911 return true 10912 } 10913 // match: (ORshiftRL [c] (SLLconst x [64-c]) x) 10914 // cond: 10915 // result: (RORconst [ c] x) 10916 for { 10917 c := v.AuxInt 10918 _ = v.Args[1] 10919 v_0 := v.Args[0] 10920 if v_0.Op != OpARM64SLLconst { 10921 break 10922 } 10923 if v_0.AuxInt != 64-c { 10924 break 10925 } 10926 x := v_0.Args[0] 10927 if x != v.Args[1] { 10928 break 10929 } 10930 v.reset(OpARM64RORconst) 10931 v.AuxInt = c 10932 v.AddArg(x) 10933 return true 10934 } 10935 // match: (ORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 10936 // cond: c < 32 && t.Size() == 4 10937 // result: (RORWconst [ c] x) 10938 for { 10939 t := v.Type 10940 c := v.AuxInt 10941 _ = v.Args[1] 10942 v_0 := v.Args[0] 10943 if v_0.Op != OpARM64SLLconst { 10944 break 10945 } 10946 if v_0.AuxInt != 32-c { 10947 break 10948 } 10949 x := v_0.Args[0] 10950 v_1 := v.Args[1] 10951 if v_1.Op != OpARM64MOVWUreg { 10952 break 10953 } 10954 if x != v_1.Args[0] { 10955 break 10956 } 10957 if !(c < 32 && t.Size() == 4) { 10958 break 10959 } 10960 v.reset(OpARM64RORWconst) 10961 v.AuxInt = c 10962 v.AddArg(x) 10963 return true 10964 } 10965 return false 10966 } 10967 func rewriteValueARM64_OpARM64SLL_0(v *Value) bool { 10968 // match: (SLL x (MOVDconst [c])) 10969 // cond: 10970 // result: (SLLconst x [c&63]) 10971 for { 10972 _ = v.Args[1] 10973 x := v.Args[0] 10974 v_1 := v.Args[1] 10975 if v_1.Op != OpARM64MOVDconst { 10976 break 10977 } 10978 c := v_1.AuxInt 10979 v.reset(OpARM64SLLconst) 10980 v.AuxInt = c & 63 10981 v.AddArg(x) 10982 return true 10983 } 10984 return false 10985 } 10986 func rewriteValueARM64_OpARM64SLLconst_0(v *Value) bool { 10987 // match: (SLLconst [c] (MOVDconst [d])) 10988 // cond: 10989 // result: (MOVDconst [int64(d)<<uint64(c)]) 10990 for { 10991 c := v.AuxInt 10992 v_0 := v.Args[0] 10993 if v_0.Op != OpARM64MOVDconst { 10994 break 10995 } 10996 d := v_0.AuxInt 10997 v.reset(OpARM64MOVDconst) 10998 v.AuxInt = int64(d) << uint64(c) 10999 return true 11000 } 11001 // match: (SLLconst [c] (SRLconst [c] x)) 11002 // cond: 0 < c && c < 64 11003 // result: (ANDconst [^(1<<uint(c)-1)] x) 11004 for { 11005 c := v.AuxInt 11006 v_0 := v.Args[0] 11007 if v_0.Op != OpARM64SRLconst { 11008 break 11009 } 11010 if v_0.AuxInt != c { 11011 break 11012 } 11013 x := v_0.Args[0] 11014 if !(0 < c && c < 64) { 11015 break 11016 } 11017 v.reset(OpARM64ANDconst) 11018 v.AuxInt = ^(1<<uint(c) - 1) 11019 v.AddArg(x) 11020 return true 11021 } 11022 return false 11023 } 11024 func rewriteValueARM64_OpARM64SRA_0(v *Value) bool { 11025 // match: (SRA x (MOVDconst [c])) 11026 // cond: 11027 // result: (SRAconst x [c&63]) 11028 for { 11029 _ = v.Args[1] 11030 x := v.Args[0] 11031 v_1 := v.Args[1] 11032 if v_1.Op != OpARM64MOVDconst { 11033 break 11034 } 11035 c := v_1.AuxInt 11036 v.reset(OpARM64SRAconst) 11037 v.AuxInt = c & 63 11038 v.AddArg(x) 11039 return true 11040 } 11041 return false 11042 } 11043 func rewriteValueARM64_OpARM64SRAconst_0(v *Value) bool { 11044 // match: (SRAconst [c] (MOVDconst [d])) 11045 // cond: 11046 // result: (MOVDconst [int64(d)>>uint64(c)]) 11047 for { 11048 c := v.AuxInt 11049 v_0 := v.Args[0] 11050 if v_0.Op != OpARM64MOVDconst { 11051 break 11052 } 11053 d := v_0.AuxInt 11054 v.reset(OpARM64MOVDconst) 11055 v.AuxInt = int64(d) >> uint64(c) 11056 return true 11057 } 11058 return false 11059 } 11060 func rewriteValueARM64_OpARM64SRL_0(v *Value) bool { 11061 // match: (SRL x (MOVDconst [c])) 11062 // cond: 11063 // result: (SRLconst x [c&63]) 11064 for { 11065 _ = v.Args[1] 11066 x := v.Args[0] 11067 v_1 := v.Args[1] 11068 if v_1.Op != OpARM64MOVDconst { 11069 break 11070 } 11071 c := v_1.AuxInt 11072 v.reset(OpARM64SRLconst) 11073 v.AuxInt = c & 63 11074 v.AddArg(x) 11075 return true 11076 } 11077 return false 11078 } 11079 func rewriteValueARM64_OpARM64SRLconst_0(v *Value) bool { 11080 // match: (SRLconst [c] (MOVDconst [d])) 11081 // cond: 11082 // result: (MOVDconst [int64(uint64(d)>>uint64(c))]) 11083 for { 11084 c := v.AuxInt 11085 v_0 := v.Args[0] 11086 if v_0.Op != OpARM64MOVDconst { 11087 break 11088 } 11089 d := v_0.AuxInt 11090 v.reset(OpARM64MOVDconst) 11091 v.AuxInt = int64(uint64(d) >> uint64(c)) 11092 return true 11093 } 11094 // match: (SRLconst [c] (SLLconst [c] x)) 11095 // cond: 0 < c && c < 64 11096 // result: (ANDconst [1<<uint(64-c)-1] x) 11097 for { 11098 c := v.AuxInt 11099 v_0 := v.Args[0] 11100 if v_0.Op != OpARM64SLLconst { 11101 break 11102 } 11103 if v_0.AuxInt != c { 11104 break 11105 } 11106 x := v_0.Args[0] 11107 if !(0 < c && c < 64) { 11108 break 11109 } 11110 v.reset(OpARM64ANDconst) 11111 v.AuxInt = 1<<uint(64-c) - 1 11112 v.AddArg(x) 11113 return true 11114 } 11115 return false 11116 } 11117 func rewriteValueARM64_OpARM64STP_0(v *Value) bool { 11118 b := v.Block 11119 _ = b 11120 config := b.Func.Config 11121 _ = config 11122 // match: (STP [off1] {sym} (ADDconst [off2] ptr) val1 val2 mem) 11123 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 11124 // result: (STP [off1+off2] {sym} ptr val1 val2 mem) 11125 for { 11126 off1 := v.AuxInt 11127 sym := v.Aux 11128 _ = v.Args[3] 11129 v_0 := v.Args[0] 11130 if v_0.Op != OpARM64ADDconst { 11131 break 11132 } 11133 off2 := v_0.AuxInt 11134 ptr := v_0.Args[0] 11135 val1 := v.Args[1] 11136 val2 := v.Args[2] 11137 mem := v.Args[3] 11138 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 11139 break 11140 } 11141 v.reset(OpARM64STP) 11142 v.AuxInt = off1 + off2 11143 v.Aux = sym 11144 v.AddArg(ptr) 11145 v.AddArg(val1) 11146 v.AddArg(val2) 11147 v.AddArg(mem) 11148 return true 11149 } 11150 // match: (STP [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val1 val2 mem) 11151 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 11152 // result: (STP [off1+off2] {mergeSym(sym1,sym2)} ptr val1 val2 mem) 11153 for { 11154 off1 := v.AuxInt 11155 sym1 := v.Aux 11156 _ = v.Args[3] 11157 v_0 := v.Args[0] 11158 if v_0.Op != OpARM64MOVDaddr { 11159 break 11160 } 11161 off2 := v_0.AuxInt 11162 sym2 := v_0.Aux 11163 ptr := v_0.Args[0] 11164 val1 := v.Args[1] 11165 val2 := v.Args[2] 11166 mem := v.Args[3] 11167 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 11168 break 11169 } 11170 v.reset(OpARM64STP) 11171 v.AuxInt = off1 + off2 11172 v.Aux = mergeSym(sym1, sym2) 11173 v.AddArg(ptr) 11174 v.AddArg(val1) 11175 v.AddArg(val2) 11176 v.AddArg(mem) 11177 return true 11178 } 11179 // match: (STP [off] {sym} ptr (MOVDconst [0]) (MOVDconst [0]) mem) 11180 // cond: 11181 // result: (MOVQstorezero [off] {sym} ptr mem) 11182 for { 11183 off := v.AuxInt 11184 sym := v.Aux 11185 _ = v.Args[3] 11186 ptr := v.Args[0] 11187 v_1 := v.Args[1] 11188 if v_1.Op != OpARM64MOVDconst { 11189 break 11190 } 11191 if v_1.AuxInt != 0 { 11192 break 11193 } 11194 v_2 := v.Args[2] 11195 if v_2.Op != OpARM64MOVDconst { 11196 break 11197 } 11198 if v_2.AuxInt != 0 { 11199 break 11200 } 11201 mem := v.Args[3] 11202 v.reset(OpARM64MOVQstorezero) 11203 v.AuxInt = off 11204 v.Aux = sym 11205 v.AddArg(ptr) 11206 v.AddArg(mem) 11207 return true 11208 } 11209 return false 11210 } 11211 func rewriteValueARM64_OpARM64SUB_0(v *Value) bool { 11212 b := v.Block 11213 _ = b 11214 // match: (SUB x (MOVDconst [c])) 11215 // cond: 11216 // result: (SUBconst [c] x) 11217 for { 11218 _ = v.Args[1] 11219 x := v.Args[0] 11220 v_1 := v.Args[1] 11221 if v_1.Op != OpARM64MOVDconst { 11222 break 11223 } 11224 c := v_1.AuxInt 11225 v.reset(OpARM64SUBconst) 11226 v.AuxInt = c 11227 v.AddArg(x) 11228 return true 11229 } 11230 // match: (SUB x x) 11231 // cond: 11232 // result: (MOVDconst [0]) 11233 for { 11234 _ = v.Args[1] 11235 x := v.Args[0] 11236 if x != v.Args[1] { 11237 break 11238 } 11239 v.reset(OpARM64MOVDconst) 11240 v.AuxInt = 0 11241 return true 11242 } 11243 // match: (SUB x (SUB y z)) 11244 // cond: 11245 // result: (SUB (ADD <v.Type> x z) y) 11246 for { 11247 _ = v.Args[1] 11248 x := v.Args[0] 11249 v_1 := v.Args[1] 11250 if v_1.Op != OpARM64SUB { 11251 break 11252 } 11253 _ = v_1.Args[1] 11254 y := v_1.Args[0] 11255 z := v_1.Args[1] 11256 v.reset(OpARM64SUB) 11257 v0 := b.NewValue0(v.Pos, OpARM64ADD, v.Type) 11258 v0.AddArg(x) 11259 v0.AddArg(z) 11260 v.AddArg(v0) 11261 v.AddArg(y) 11262 return true 11263 } 11264 // match: (SUB (SUB x y) z) 11265 // cond: 11266 // result: (SUB x (ADD <y.Type> y z)) 11267 for { 11268 _ = v.Args[1] 11269 v_0 := v.Args[0] 11270 if v_0.Op != OpARM64SUB { 11271 break 11272 } 11273 _ = v_0.Args[1] 11274 x := v_0.Args[0] 11275 y := v_0.Args[1] 11276 z := v.Args[1] 11277 v.reset(OpARM64SUB) 11278 v.AddArg(x) 11279 v0 := b.NewValue0(v.Pos, OpARM64ADD, y.Type) 11280 v0.AddArg(y) 11281 v0.AddArg(z) 11282 v.AddArg(v0) 11283 return true 11284 } 11285 // match: (SUB x0 x1:(SLLconst [c] y)) 11286 // cond: clobberIfDead(x1) 11287 // result: (SUBshiftLL x0 y [c]) 11288 for { 11289 _ = v.Args[1] 11290 x0 := v.Args[0] 11291 x1 := v.Args[1] 11292 if x1.Op != OpARM64SLLconst { 11293 break 11294 } 11295 c := x1.AuxInt 11296 y := x1.Args[0] 11297 if !(clobberIfDead(x1)) { 11298 break 11299 } 11300 v.reset(OpARM64SUBshiftLL) 11301 v.AuxInt = c 11302 v.AddArg(x0) 11303 v.AddArg(y) 11304 return true 11305 } 11306 // match: (SUB x0 x1:(SRLconst [c] y)) 11307 // cond: clobberIfDead(x1) 11308 // result: (SUBshiftRL x0 y [c]) 11309 for { 11310 _ = v.Args[1] 11311 x0 := v.Args[0] 11312 x1 := v.Args[1] 11313 if x1.Op != OpARM64SRLconst { 11314 break 11315 } 11316 c := x1.AuxInt 11317 y := x1.Args[0] 11318 if !(clobberIfDead(x1)) { 11319 break 11320 } 11321 v.reset(OpARM64SUBshiftRL) 11322 v.AuxInt = c 11323 v.AddArg(x0) 11324 v.AddArg(y) 11325 return true 11326 } 11327 // match: (SUB x0 x1:(SRAconst [c] y)) 11328 // cond: clobberIfDead(x1) 11329 // result: (SUBshiftRA x0 y [c]) 11330 for { 11331 _ = v.Args[1] 11332 x0 := v.Args[0] 11333 x1 := v.Args[1] 11334 if x1.Op != OpARM64SRAconst { 11335 break 11336 } 11337 c := x1.AuxInt 11338 y := x1.Args[0] 11339 if !(clobberIfDead(x1)) { 11340 break 11341 } 11342 v.reset(OpARM64SUBshiftRA) 11343 v.AuxInt = c 11344 v.AddArg(x0) 11345 v.AddArg(y) 11346 return true 11347 } 11348 return false 11349 } 11350 func rewriteValueARM64_OpARM64SUBconst_0(v *Value) bool { 11351 // match: (SUBconst [0] x) 11352 // cond: 11353 // result: x 11354 for { 11355 if v.AuxInt != 0 { 11356 break 11357 } 11358 x := v.Args[0] 11359 v.reset(OpCopy) 11360 v.Type = x.Type 11361 v.AddArg(x) 11362 return true 11363 } 11364 // match: (SUBconst [c] (MOVDconst [d])) 11365 // cond: 11366 // result: (MOVDconst [d-c]) 11367 for { 11368 c := v.AuxInt 11369 v_0 := v.Args[0] 11370 if v_0.Op != OpARM64MOVDconst { 11371 break 11372 } 11373 d := v_0.AuxInt 11374 v.reset(OpARM64MOVDconst) 11375 v.AuxInt = d - c 11376 return true 11377 } 11378 // match: (SUBconst [c] (SUBconst [d] x)) 11379 // cond: 11380 // result: (ADDconst [-c-d] x) 11381 for { 11382 c := v.AuxInt 11383 v_0 := v.Args[0] 11384 if v_0.Op != OpARM64SUBconst { 11385 break 11386 } 11387 d := v_0.AuxInt 11388 x := v_0.Args[0] 11389 v.reset(OpARM64ADDconst) 11390 v.AuxInt = -c - d 11391 v.AddArg(x) 11392 return true 11393 } 11394 // match: (SUBconst [c] (ADDconst [d] x)) 11395 // cond: 11396 // result: (ADDconst [-c+d] x) 11397 for { 11398 c := v.AuxInt 11399 v_0 := v.Args[0] 11400 if v_0.Op != OpARM64ADDconst { 11401 break 11402 } 11403 d := v_0.AuxInt 11404 x := v_0.Args[0] 11405 v.reset(OpARM64ADDconst) 11406 v.AuxInt = -c + d 11407 v.AddArg(x) 11408 return true 11409 } 11410 return false 11411 } 11412 func rewriteValueARM64_OpARM64SUBshiftLL_0(v *Value) bool { 11413 // match: (SUBshiftLL x (MOVDconst [c]) [d]) 11414 // cond: 11415 // result: (SUBconst x [int64(uint64(c)<<uint64(d))]) 11416 for { 11417 d := v.AuxInt 11418 _ = v.Args[1] 11419 x := v.Args[0] 11420 v_1 := v.Args[1] 11421 if v_1.Op != OpARM64MOVDconst { 11422 break 11423 } 11424 c := v_1.AuxInt 11425 v.reset(OpARM64SUBconst) 11426 v.AuxInt = int64(uint64(c) << uint64(d)) 11427 v.AddArg(x) 11428 return true 11429 } 11430 // match: (SUBshiftLL x (SLLconst x [c]) [d]) 11431 // cond: c==d 11432 // result: (MOVDconst [0]) 11433 for { 11434 d := v.AuxInt 11435 _ = v.Args[1] 11436 x := v.Args[0] 11437 v_1 := v.Args[1] 11438 if v_1.Op != OpARM64SLLconst { 11439 break 11440 } 11441 c := v_1.AuxInt 11442 if x != v_1.Args[0] { 11443 break 11444 } 11445 if !(c == d) { 11446 break 11447 } 11448 v.reset(OpARM64MOVDconst) 11449 v.AuxInt = 0 11450 return true 11451 } 11452 return false 11453 } 11454 func rewriteValueARM64_OpARM64SUBshiftRA_0(v *Value) bool { 11455 // match: (SUBshiftRA x (MOVDconst [c]) [d]) 11456 // cond: 11457 // result: (SUBconst x [int64(int64(c)>>uint64(d))]) 11458 for { 11459 d := v.AuxInt 11460 _ = v.Args[1] 11461 x := v.Args[0] 11462 v_1 := v.Args[1] 11463 if v_1.Op != OpARM64MOVDconst { 11464 break 11465 } 11466 c := v_1.AuxInt 11467 v.reset(OpARM64SUBconst) 11468 v.AuxInt = int64(int64(c) >> uint64(d)) 11469 v.AddArg(x) 11470 return true 11471 } 11472 // match: (SUBshiftRA x (SRAconst x [c]) [d]) 11473 // cond: c==d 11474 // result: (MOVDconst [0]) 11475 for { 11476 d := v.AuxInt 11477 _ = v.Args[1] 11478 x := v.Args[0] 11479 v_1 := v.Args[1] 11480 if v_1.Op != OpARM64SRAconst { 11481 break 11482 } 11483 c := v_1.AuxInt 11484 if x != v_1.Args[0] { 11485 break 11486 } 11487 if !(c == d) { 11488 break 11489 } 11490 v.reset(OpARM64MOVDconst) 11491 v.AuxInt = 0 11492 return true 11493 } 11494 return false 11495 } 11496 func rewriteValueARM64_OpARM64SUBshiftRL_0(v *Value) bool { 11497 // match: (SUBshiftRL x (MOVDconst [c]) [d]) 11498 // cond: 11499 // result: (SUBconst x [int64(uint64(c)>>uint64(d))]) 11500 for { 11501 d := v.AuxInt 11502 _ = v.Args[1] 11503 x := v.Args[0] 11504 v_1 := v.Args[1] 11505 if v_1.Op != OpARM64MOVDconst { 11506 break 11507 } 11508 c := v_1.AuxInt 11509 v.reset(OpARM64SUBconst) 11510 v.AuxInt = int64(uint64(c) >> uint64(d)) 11511 v.AddArg(x) 11512 return true 11513 } 11514 // match: (SUBshiftRL x (SRLconst x [c]) [d]) 11515 // cond: c==d 11516 // result: (MOVDconst [0]) 11517 for { 11518 d := v.AuxInt 11519 _ = v.Args[1] 11520 x := v.Args[0] 11521 v_1 := v.Args[1] 11522 if v_1.Op != OpARM64SRLconst { 11523 break 11524 } 11525 c := v_1.AuxInt 11526 if x != v_1.Args[0] { 11527 break 11528 } 11529 if !(c == d) { 11530 break 11531 } 11532 v.reset(OpARM64MOVDconst) 11533 v.AuxInt = 0 11534 return true 11535 } 11536 return false 11537 } 11538 func rewriteValueARM64_OpARM64UDIV_0(v *Value) bool { 11539 // match: (UDIV x (MOVDconst [1])) 11540 // cond: 11541 // result: x 11542 for { 11543 _ = v.Args[1] 11544 x := v.Args[0] 11545 v_1 := v.Args[1] 11546 if v_1.Op != OpARM64MOVDconst { 11547 break 11548 } 11549 if v_1.AuxInt != 1 { 11550 break 11551 } 11552 v.reset(OpCopy) 11553 v.Type = x.Type 11554 v.AddArg(x) 11555 return true 11556 } 11557 // match: (UDIV x (MOVDconst [c])) 11558 // cond: isPowerOfTwo(c) 11559 // result: (SRLconst [log2(c)] x) 11560 for { 11561 _ = v.Args[1] 11562 x := v.Args[0] 11563 v_1 := v.Args[1] 11564 if v_1.Op != OpARM64MOVDconst { 11565 break 11566 } 11567 c := v_1.AuxInt 11568 if !(isPowerOfTwo(c)) { 11569 break 11570 } 11571 v.reset(OpARM64SRLconst) 11572 v.AuxInt = log2(c) 11573 v.AddArg(x) 11574 return true 11575 } 11576 // match: (UDIV (MOVDconst [c]) (MOVDconst [d])) 11577 // cond: 11578 // result: (MOVDconst [int64(uint64(c)/uint64(d))]) 11579 for { 11580 _ = v.Args[1] 11581 v_0 := v.Args[0] 11582 if v_0.Op != OpARM64MOVDconst { 11583 break 11584 } 11585 c := v_0.AuxInt 11586 v_1 := v.Args[1] 11587 if v_1.Op != OpARM64MOVDconst { 11588 break 11589 } 11590 d := v_1.AuxInt 11591 v.reset(OpARM64MOVDconst) 11592 v.AuxInt = int64(uint64(c) / uint64(d)) 11593 return true 11594 } 11595 return false 11596 } 11597 func rewriteValueARM64_OpARM64UDIVW_0(v *Value) bool { 11598 // match: (UDIVW x (MOVDconst [c])) 11599 // cond: uint32(c)==1 11600 // result: x 11601 for { 11602 _ = v.Args[1] 11603 x := v.Args[0] 11604 v_1 := v.Args[1] 11605 if v_1.Op != OpARM64MOVDconst { 11606 break 11607 } 11608 c := v_1.AuxInt 11609 if !(uint32(c) == 1) { 11610 break 11611 } 11612 v.reset(OpCopy) 11613 v.Type = x.Type 11614 v.AddArg(x) 11615 return true 11616 } 11617 // match: (UDIVW x (MOVDconst [c])) 11618 // cond: isPowerOfTwo(c) && is32Bit(c) 11619 // result: (SRLconst [log2(c)] x) 11620 for { 11621 _ = v.Args[1] 11622 x := v.Args[0] 11623 v_1 := v.Args[1] 11624 if v_1.Op != OpARM64MOVDconst { 11625 break 11626 } 11627 c := v_1.AuxInt 11628 if !(isPowerOfTwo(c) && is32Bit(c)) { 11629 break 11630 } 11631 v.reset(OpARM64SRLconst) 11632 v.AuxInt = log2(c) 11633 v.AddArg(x) 11634 return true 11635 } 11636 // match: (UDIVW (MOVDconst [c]) (MOVDconst [d])) 11637 // cond: 11638 // result: (MOVDconst [int64(uint32(c)/uint32(d))]) 11639 for { 11640 _ = v.Args[1] 11641 v_0 := v.Args[0] 11642 if v_0.Op != OpARM64MOVDconst { 11643 break 11644 } 11645 c := v_0.AuxInt 11646 v_1 := v.Args[1] 11647 if v_1.Op != OpARM64MOVDconst { 11648 break 11649 } 11650 d := v_1.AuxInt 11651 v.reset(OpARM64MOVDconst) 11652 v.AuxInt = int64(uint32(c) / uint32(d)) 11653 return true 11654 } 11655 return false 11656 } 11657 func rewriteValueARM64_OpARM64UMOD_0(v *Value) bool { 11658 // match: (UMOD _ (MOVDconst [1])) 11659 // cond: 11660 // result: (MOVDconst [0]) 11661 for { 11662 _ = v.Args[1] 11663 v_1 := v.Args[1] 11664 if v_1.Op != OpARM64MOVDconst { 11665 break 11666 } 11667 if v_1.AuxInt != 1 { 11668 break 11669 } 11670 v.reset(OpARM64MOVDconst) 11671 v.AuxInt = 0 11672 return true 11673 } 11674 // match: (UMOD x (MOVDconst [c])) 11675 // cond: isPowerOfTwo(c) 11676 // result: (ANDconst [c-1] x) 11677 for { 11678 _ = v.Args[1] 11679 x := v.Args[0] 11680 v_1 := v.Args[1] 11681 if v_1.Op != OpARM64MOVDconst { 11682 break 11683 } 11684 c := v_1.AuxInt 11685 if !(isPowerOfTwo(c)) { 11686 break 11687 } 11688 v.reset(OpARM64ANDconst) 11689 v.AuxInt = c - 1 11690 v.AddArg(x) 11691 return true 11692 } 11693 // match: (UMOD (MOVDconst [c]) (MOVDconst [d])) 11694 // cond: 11695 // result: (MOVDconst [int64(uint64(c)%uint64(d))]) 11696 for { 11697 _ = v.Args[1] 11698 v_0 := v.Args[0] 11699 if v_0.Op != OpARM64MOVDconst { 11700 break 11701 } 11702 c := v_0.AuxInt 11703 v_1 := v.Args[1] 11704 if v_1.Op != OpARM64MOVDconst { 11705 break 11706 } 11707 d := v_1.AuxInt 11708 v.reset(OpARM64MOVDconst) 11709 v.AuxInt = int64(uint64(c) % uint64(d)) 11710 return true 11711 } 11712 return false 11713 } 11714 func rewriteValueARM64_OpARM64UMODW_0(v *Value) bool { 11715 // match: (UMODW _ (MOVDconst [c])) 11716 // cond: uint32(c)==1 11717 // result: (MOVDconst [0]) 11718 for { 11719 _ = v.Args[1] 11720 v_1 := v.Args[1] 11721 if v_1.Op != OpARM64MOVDconst { 11722 break 11723 } 11724 c := v_1.AuxInt 11725 if !(uint32(c) == 1) { 11726 break 11727 } 11728 v.reset(OpARM64MOVDconst) 11729 v.AuxInt = 0 11730 return true 11731 } 11732 // match: (UMODW x (MOVDconst [c])) 11733 // cond: isPowerOfTwo(c) && is32Bit(c) 11734 // result: (ANDconst [c-1] x) 11735 for { 11736 _ = v.Args[1] 11737 x := v.Args[0] 11738 v_1 := v.Args[1] 11739 if v_1.Op != OpARM64MOVDconst { 11740 break 11741 } 11742 c := v_1.AuxInt 11743 if !(isPowerOfTwo(c) && is32Bit(c)) { 11744 break 11745 } 11746 v.reset(OpARM64ANDconst) 11747 v.AuxInt = c - 1 11748 v.AddArg(x) 11749 return true 11750 } 11751 // match: (UMODW (MOVDconst [c]) (MOVDconst [d])) 11752 // cond: 11753 // result: (MOVDconst [int64(uint32(c)%uint32(d))]) 11754 for { 11755 _ = v.Args[1] 11756 v_0 := v.Args[0] 11757 if v_0.Op != OpARM64MOVDconst { 11758 break 11759 } 11760 c := v_0.AuxInt 11761 v_1 := v.Args[1] 11762 if v_1.Op != OpARM64MOVDconst { 11763 break 11764 } 11765 d := v_1.AuxInt 11766 v.reset(OpARM64MOVDconst) 11767 v.AuxInt = int64(uint32(c) % uint32(d)) 11768 return true 11769 } 11770 return false 11771 } 11772 func rewriteValueARM64_OpARM64XOR_0(v *Value) bool { 11773 // match: (XOR x (MOVDconst [c])) 11774 // cond: 11775 // result: (XORconst [c] x) 11776 for { 11777 _ = v.Args[1] 11778 x := v.Args[0] 11779 v_1 := v.Args[1] 11780 if v_1.Op != OpARM64MOVDconst { 11781 break 11782 } 11783 c := v_1.AuxInt 11784 v.reset(OpARM64XORconst) 11785 v.AuxInt = c 11786 v.AddArg(x) 11787 return true 11788 } 11789 // match: (XOR (MOVDconst [c]) x) 11790 // cond: 11791 // result: (XORconst [c] x) 11792 for { 11793 _ = v.Args[1] 11794 v_0 := v.Args[0] 11795 if v_0.Op != OpARM64MOVDconst { 11796 break 11797 } 11798 c := v_0.AuxInt 11799 x := v.Args[1] 11800 v.reset(OpARM64XORconst) 11801 v.AuxInt = c 11802 v.AddArg(x) 11803 return true 11804 } 11805 // match: (XOR x x) 11806 // cond: 11807 // result: (MOVDconst [0]) 11808 for { 11809 _ = v.Args[1] 11810 x := v.Args[0] 11811 if x != v.Args[1] { 11812 break 11813 } 11814 v.reset(OpARM64MOVDconst) 11815 v.AuxInt = 0 11816 return true 11817 } 11818 // match: (XOR x0 x1:(SLLconst [c] y)) 11819 // cond: clobberIfDead(x1) 11820 // result: (XORshiftLL x0 y [c]) 11821 for { 11822 _ = v.Args[1] 11823 x0 := v.Args[0] 11824 x1 := v.Args[1] 11825 if x1.Op != OpARM64SLLconst { 11826 break 11827 } 11828 c := x1.AuxInt 11829 y := x1.Args[0] 11830 if !(clobberIfDead(x1)) { 11831 break 11832 } 11833 v.reset(OpARM64XORshiftLL) 11834 v.AuxInt = c 11835 v.AddArg(x0) 11836 v.AddArg(y) 11837 return true 11838 } 11839 // match: (XOR x1:(SLLconst [c] y) x0) 11840 // cond: clobberIfDead(x1) 11841 // result: (XORshiftLL x0 y [c]) 11842 for { 11843 _ = v.Args[1] 11844 x1 := v.Args[0] 11845 if x1.Op != OpARM64SLLconst { 11846 break 11847 } 11848 c := x1.AuxInt 11849 y := x1.Args[0] 11850 x0 := v.Args[1] 11851 if !(clobberIfDead(x1)) { 11852 break 11853 } 11854 v.reset(OpARM64XORshiftLL) 11855 v.AuxInt = c 11856 v.AddArg(x0) 11857 v.AddArg(y) 11858 return true 11859 } 11860 // match: (XOR x0 x1:(SRLconst [c] y)) 11861 // cond: clobberIfDead(x1) 11862 // result: (XORshiftRL x0 y [c]) 11863 for { 11864 _ = v.Args[1] 11865 x0 := v.Args[0] 11866 x1 := v.Args[1] 11867 if x1.Op != OpARM64SRLconst { 11868 break 11869 } 11870 c := x1.AuxInt 11871 y := x1.Args[0] 11872 if !(clobberIfDead(x1)) { 11873 break 11874 } 11875 v.reset(OpARM64XORshiftRL) 11876 v.AuxInt = c 11877 v.AddArg(x0) 11878 v.AddArg(y) 11879 return true 11880 } 11881 // match: (XOR x1:(SRLconst [c] y) x0) 11882 // cond: clobberIfDead(x1) 11883 // result: (XORshiftRL x0 y [c]) 11884 for { 11885 _ = v.Args[1] 11886 x1 := v.Args[0] 11887 if x1.Op != OpARM64SRLconst { 11888 break 11889 } 11890 c := x1.AuxInt 11891 y := x1.Args[0] 11892 x0 := v.Args[1] 11893 if !(clobberIfDead(x1)) { 11894 break 11895 } 11896 v.reset(OpARM64XORshiftRL) 11897 v.AuxInt = c 11898 v.AddArg(x0) 11899 v.AddArg(y) 11900 return true 11901 } 11902 // match: (XOR x0 x1:(SRAconst [c] y)) 11903 // cond: clobberIfDead(x1) 11904 // result: (XORshiftRA x0 y [c]) 11905 for { 11906 _ = v.Args[1] 11907 x0 := v.Args[0] 11908 x1 := v.Args[1] 11909 if x1.Op != OpARM64SRAconst { 11910 break 11911 } 11912 c := x1.AuxInt 11913 y := x1.Args[0] 11914 if !(clobberIfDead(x1)) { 11915 break 11916 } 11917 v.reset(OpARM64XORshiftRA) 11918 v.AuxInt = c 11919 v.AddArg(x0) 11920 v.AddArg(y) 11921 return true 11922 } 11923 // match: (XOR x1:(SRAconst [c] y) x0) 11924 // cond: clobberIfDead(x1) 11925 // result: (XORshiftRA x0 y [c]) 11926 for { 11927 _ = v.Args[1] 11928 x1 := v.Args[0] 11929 if x1.Op != OpARM64SRAconst { 11930 break 11931 } 11932 c := x1.AuxInt 11933 y := x1.Args[0] 11934 x0 := v.Args[1] 11935 if !(clobberIfDead(x1)) { 11936 break 11937 } 11938 v.reset(OpARM64XORshiftRA) 11939 v.AuxInt = c 11940 v.AddArg(x0) 11941 v.AddArg(y) 11942 return true 11943 } 11944 return false 11945 } 11946 func rewriteValueARM64_OpARM64XORconst_0(v *Value) bool { 11947 // match: (XORconst [0] x) 11948 // cond: 11949 // result: x 11950 for { 11951 if v.AuxInt != 0 { 11952 break 11953 } 11954 x := v.Args[0] 11955 v.reset(OpCopy) 11956 v.Type = x.Type 11957 v.AddArg(x) 11958 return true 11959 } 11960 // match: (XORconst [-1] x) 11961 // cond: 11962 // result: (MVN x) 11963 for { 11964 if v.AuxInt != -1 { 11965 break 11966 } 11967 x := v.Args[0] 11968 v.reset(OpARM64MVN) 11969 v.AddArg(x) 11970 return true 11971 } 11972 // match: (XORconst [c] (MOVDconst [d])) 11973 // cond: 11974 // result: (MOVDconst [c^d]) 11975 for { 11976 c := v.AuxInt 11977 v_0 := v.Args[0] 11978 if v_0.Op != OpARM64MOVDconst { 11979 break 11980 } 11981 d := v_0.AuxInt 11982 v.reset(OpARM64MOVDconst) 11983 v.AuxInt = c ^ d 11984 return true 11985 } 11986 // match: (XORconst [c] (XORconst [d] x)) 11987 // cond: 11988 // result: (XORconst [c^d] x) 11989 for { 11990 c := v.AuxInt 11991 v_0 := v.Args[0] 11992 if v_0.Op != OpARM64XORconst { 11993 break 11994 } 11995 d := v_0.AuxInt 11996 x := v_0.Args[0] 11997 v.reset(OpARM64XORconst) 11998 v.AuxInt = c ^ d 11999 v.AddArg(x) 12000 return true 12001 } 12002 return false 12003 } 12004 func rewriteValueARM64_OpARM64XORshiftLL_0(v *Value) bool { 12005 b := v.Block 12006 _ = b 12007 // match: (XORshiftLL (MOVDconst [c]) x [d]) 12008 // cond: 12009 // result: (XORconst [c] (SLLconst <x.Type> x [d])) 12010 for { 12011 d := v.AuxInt 12012 _ = v.Args[1] 12013 v_0 := v.Args[0] 12014 if v_0.Op != OpARM64MOVDconst { 12015 break 12016 } 12017 c := v_0.AuxInt 12018 x := v.Args[1] 12019 v.reset(OpARM64XORconst) 12020 v.AuxInt = c 12021 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 12022 v0.AuxInt = d 12023 v0.AddArg(x) 12024 v.AddArg(v0) 12025 return true 12026 } 12027 // match: (XORshiftLL x (MOVDconst [c]) [d]) 12028 // cond: 12029 // result: (XORconst x [int64(uint64(c)<<uint64(d))]) 12030 for { 12031 d := v.AuxInt 12032 _ = v.Args[1] 12033 x := v.Args[0] 12034 v_1 := v.Args[1] 12035 if v_1.Op != OpARM64MOVDconst { 12036 break 12037 } 12038 c := v_1.AuxInt 12039 v.reset(OpARM64XORconst) 12040 v.AuxInt = int64(uint64(c) << uint64(d)) 12041 v.AddArg(x) 12042 return true 12043 } 12044 // match: (XORshiftLL x (SLLconst x [c]) [d]) 12045 // cond: c==d 12046 // result: (MOVDconst [0]) 12047 for { 12048 d := v.AuxInt 12049 _ = v.Args[1] 12050 x := v.Args[0] 12051 v_1 := v.Args[1] 12052 if v_1.Op != OpARM64SLLconst { 12053 break 12054 } 12055 c := v_1.AuxInt 12056 if x != v_1.Args[0] { 12057 break 12058 } 12059 if !(c == d) { 12060 break 12061 } 12062 v.reset(OpARM64MOVDconst) 12063 v.AuxInt = 0 12064 return true 12065 } 12066 // match: (XORshiftLL [c] (SRLconst x [64-c]) x) 12067 // cond: 12068 // result: (RORconst [64-c] x) 12069 for { 12070 c := v.AuxInt 12071 _ = v.Args[1] 12072 v_0 := v.Args[0] 12073 if v_0.Op != OpARM64SRLconst { 12074 break 12075 } 12076 if v_0.AuxInt != 64-c { 12077 break 12078 } 12079 x := v_0.Args[0] 12080 if x != v.Args[1] { 12081 break 12082 } 12083 v.reset(OpARM64RORconst) 12084 v.AuxInt = 64 - c 12085 v.AddArg(x) 12086 return true 12087 } 12088 // match: (XORshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x) 12089 // cond: c < 32 && t.Size() == 4 12090 // result: (RORWconst [32-c] x) 12091 for { 12092 t := v.Type 12093 c := v.AuxInt 12094 _ = v.Args[1] 12095 v_0 := v.Args[0] 12096 if v_0.Op != OpARM64SRLconst { 12097 break 12098 } 12099 if v_0.AuxInt != 32-c { 12100 break 12101 } 12102 v_0_0 := v_0.Args[0] 12103 if v_0_0.Op != OpARM64MOVWUreg { 12104 break 12105 } 12106 x := v_0_0.Args[0] 12107 if x != v.Args[1] { 12108 break 12109 } 12110 if !(c < 32 && t.Size() == 4) { 12111 break 12112 } 12113 v.reset(OpARM64RORWconst) 12114 v.AuxInt = 32 - c 12115 v.AddArg(x) 12116 return true 12117 } 12118 return false 12119 } 12120 func rewriteValueARM64_OpARM64XORshiftRA_0(v *Value) bool { 12121 b := v.Block 12122 _ = b 12123 // match: (XORshiftRA (MOVDconst [c]) x [d]) 12124 // cond: 12125 // result: (XORconst [c] (SRAconst <x.Type> x [d])) 12126 for { 12127 d := v.AuxInt 12128 _ = v.Args[1] 12129 v_0 := v.Args[0] 12130 if v_0.Op != OpARM64MOVDconst { 12131 break 12132 } 12133 c := v_0.AuxInt 12134 x := v.Args[1] 12135 v.reset(OpARM64XORconst) 12136 v.AuxInt = c 12137 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 12138 v0.AuxInt = d 12139 v0.AddArg(x) 12140 v.AddArg(v0) 12141 return true 12142 } 12143 // match: (XORshiftRA x (MOVDconst [c]) [d]) 12144 // cond: 12145 // result: (XORconst x [int64(int64(c)>>uint64(d))]) 12146 for { 12147 d := v.AuxInt 12148 _ = v.Args[1] 12149 x := v.Args[0] 12150 v_1 := v.Args[1] 12151 if v_1.Op != OpARM64MOVDconst { 12152 break 12153 } 12154 c := v_1.AuxInt 12155 v.reset(OpARM64XORconst) 12156 v.AuxInt = int64(int64(c) >> uint64(d)) 12157 v.AddArg(x) 12158 return true 12159 } 12160 // match: (XORshiftRA x (SRAconst x [c]) [d]) 12161 // cond: c==d 12162 // result: (MOVDconst [0]) 12163 for { 12164 d := v.AuxInt 12165 _ = v.Args[1] 12166 x := v.Args[0] 12167 v_1 := v.Args[1] 12168 if v_1.Op != OpARM64SRAconst { 12169 break 12170 } 12171 c := v_1.AuxInt 12172 if x != v_1.Args[0] { 12173 break 12174 } 12175 if !(c == d) { 12176 break 12177 } 12178 v.reset(OpARM64MOVDconst) 12179 v.AuxInt = 0 12180 return true 12181 } 12182 return false 12183 } 12184 func rewriteValueARM64_OpARM64XORshiftRL_0(v *Value) bool { 12185 b := v.Block 12186 _ = b 12187 // match: (XORshiftRL (MOVDconst [c]) x [d]) 12188 // cond: 12189 // result: (XORconst [c] (SRLconst <x.Type> x [d])) 12190 for { 12191 d := v.AuxInt 12192 _ = v.Args[1] 12193 v_0 := v.Args[0] 12194 if v_0.Op != OpARM64MOVDconst { 12195 break 12196 } 12197 c := v_0.AuxInt 12198 x := v.Args[1] 12199 v.reset(OpARM64XORconst) 12200 v.AuxInt = c 12201 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 12202 v0.AuxInt = d 12203 v0.AddArg(x) 12204 v.AddArg(v0) 12205 return true 12206 } 12207 // match: (XORshiftRL x (MOVDconst [c]) [d]) 12208 // cond: 12209 // result: (XORconst x [int64(uint64(c)>>uint64(d))]) 12210 for { 12211 d := v.AuxInt 12212 _ = v.Args[1] 12213 x := v.Args[0] 12214 v_1 := v.Args[1] 12215 if v_1.Op != OpARM64MOVDconst { 12216 break 12217 } 12218 c := v_1.AuxInt 12219 v.reset(OpARM64XORconst) 12220 v.AuxInt = int64(uint64(c) >> uint64(d)) 12221 v.AddArg(x) 12222 return true 12223 } 12224 // match: (XORshiftRL x (SRLconst x [c]) [d]) 12225 // cond: c==d 12226 // result: (MOVDconst [0]) 12227 for { 12228 d := v.AuxInt 12229 _ = v.Args[1] 12230 x := v.Args[0] 12231 v_1 := v.Args[1] 12232 if v_1.Op != OpARM64SRLconst { 12233 break 12234 } 12235 c := v_1.AuxInt 12236 if x != v_1.Args[0] { 12237 break 12238 } 12239 if !(c == d) { 12240 break 12241 } 12242 v.reset(OpARM64MOVDconst) 12243 v.AuxInt = 0 12244 return true 12245 } 12246 // match: (XORshiftRL [c] (SLLconst x [64-c]) x) 12247 // cond: 12248 // result: (RORconst [ c] x) 12249 for { 12250 c := v.AuxInt 12251 _ = v.Args[1] 12252 v_0 := v.Args[0] 12253 if v_0.Op != OpARM64SLLconst { 12254 break 12255 } 12256 if v_0.AuxInt != 64-c { 12257 break 12258 } 12259 x := v_0.Args[0] 12260 if x != v.Args[1] { 12261 break 12262 } 12263 v.reset(OpARM64RORconst) 12264 v.AuxInt = c 12265 v.AddArg(x) 12266 return true 12267 } 12268 // match: (XORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 12269 // cond: c < 32 && t.Size() == 4 12270 // result: (RORWconst [ c] x) 12271 for { 12272 t := v.Type 12273 c := v.AuxInt 12274 _ = v.Args[1] 12275 v_0 := v.Args[0] 12276 if v_0.Op != OpARM64SLLconst { 12277 break 12278 } 12279 if v_0.AuxInt != 32-c { 12280 break 12281 } 12282 x := v_0.Args[0] 12283 v_1 := v.Args[1] 12284 if v_1.Op != OpARM64MOVWUreg { 12285 break 12286 } 12287 if x != v_1.Args[0] { 12288 break 12289 } 12290 if !(c < 32 && t.Size() == 4) { 12291 break 12292 } 12293 v.reset(OpARM64RORWconst) 12294 v.AuxInt = c 12295 v.AddArg(x) 12296 return true 12297 } 12298 return false 12299 } 12300 func rewriteValueARM64_OpAdd16_0(v *Value) bool { 12301 // match: (Add16 x y) 12302 // cond: 12303 // result: (ADD x y) 12304 for { 12305 _ = v.Args[1] 12306 x := v.Args[0] 12307 y := v.Args[1] 12308 v.reset(OpARM64ADD) 12309 v.AddArg(x) 12310 v.AddArg(y) 12311 return true 12312 } 12313 } 12314 func rewriteValueARM64_OpAdd32_0(v *Value) bool { 12315 // match: (Add32 x y) 12316 // cond: 12317 // result: (ADD x y) 12318 for { 12319 _ = v.Args[1] 12320 x := v.Args[0] 12321 y := v.Args[1] 12322 v.reset(OpARM64ADD) 12323 v.AddArg(x) 12324 v.AddArg(y) 12325 return true 12326 } 12327 } 12328 func rewriteValueARM64_OpAdd32F_0(v *Value) bool { 12329 // match: (Add32F x y) 12330 // cond: 12331 // result: (FADDS x y) 12332 for { 12333 _ = v.Args[1] 12334 x := v.Args[0] 12335 y := v.Args[1] 12336 v.reset(OpARM64FADDS) 12337 v.AddArg(x) 12338 v.AddArg(y) 12339 return true 12340 } 12341 } 12342 func rewriteValueARM64_OpAdd64_0(v *Value) bool { 12343 // match: (Add64 x y) 12344 // cond: 12345 // result: (ADD x y) 12346 for { 12347 _ = v.Args[1] 12348 x := v.Args[0] 12349 y := v.Args[1] 12350 v.reset(OpARM64ADD) 12351 v.AddArg(x) 12352 v.AddArg(y) 12353 return true 12354 } 12355 } 12356 func rewriteValueARM64_OpAdd64F_0(v *Value) bool { 12357 // match: (Add64F x y) 12358 // cond: 12359 // result: (FADDD x y) 12360 for { 12361 _ = v.Args[1] 12362 x := v.Args[0] 12363 y := v.Args[1] 12364 v.reset(OpARM64FADDD) 12365 v.AddArg(x) 12366 v.AddArg(y) 12367 return true 12368 } 12369 } 12370 func rewriteValueARM64_OpAdd8_0(v *Value) bool { 12371 // match: (Add8 x y) 12372 // cond: 12373 // result: (ADD x y) 12374 for { 12375 _ = v.Args[1] 12376 x := v.Args[0] 12377 y := v.Args[1] 12378 v.reset(OpARM64ADD) 12379 v.AddArg(x) 12380 v.AddArg(y) 12381 return true 12382 } 12383 } 12384 func rewriteValueARM64_OpAddPtr_0(v *Value) bool { 12385 // match: (AddPtr x y) 12386 // cond: 12387 // result: (ADD x y) 12388 for { 12389 _ = v.Args[1] 12390 x := v.Args[0] 12391 y := v.Args[1] 12392 v.reset(OpARM64ADD) 12393 v.AddArg(x) 12394 v.AddArg(y) 12395 return true 12396 } 12397 } 12398 func rewriteValueARM64_OpAddr_0(v *Value) bool { 12399 // match: (Addr {sym} base) 12400 // cond: 12401 // result: (MOVDaddr {sym} base) 12402 for { 12403 sym := v.Aux 12404 base := v.Args[0] 12405 v.reset(OpARM64MOVDaddr) 12406 v.Aux = sym 12407 v.AddArg(base) 12408 return true 12409 } 12410 } 12411 func rewriteValueARM64_OpAnd16_0(v *Value) bool { 12412 // match: (And16 x y) 12413 // cond: 12414 // result: (AND x y) 12415 for { 12416 _ = v.Args[1] 12417 x := v.Args[0] 12418 y := v.Args[1] 12419 v.reset(OpARM64AND) 12420 v.AddArg(x) 12421 v.AddArg(y) 12422 return true 12423 } 12424 } 12425 func rewriteValueARM64_OpAnd32_0(v *Value) bool { 12426 // match: (And32 x y) 12427 // cond: 12428 // result: (AND x y) 12429 for { 12430 _ = v.Args[1] 12431 x := v.Args[0] 12432 y := v.Args[1] 12433 v.reset(OpARM64AND) 12434 v.AddArg(x) 12435 v.AddArg(y) 12436 return true 12437 } 12438 } 12439 func rewriteValueARM64_OpAnd64_0(v *Value) bool { 12440 // match: (And64 x y) 12441 // cond: 12442 // result: (AND x y) 12443 for { 12444 _ = v.Args[1] 12445 x := v.Args[0] 12446 y := v.Args[1] 12447 v.reset(OpARM64AND) 12448 v.AddArg(x) 12449 v.AddArg(y) 12450 return true 12451 } 12452 } 12453 func rewriteValueARM64_OpAnd8_0(v *Value) bool { 12454 // match: (And8 x y) 12455 // cond: 12456 // result: (AND x y) 12457 for { 12458 _ = v.Args[1] 12459 x := v.Args[0] 12460 y := v.Args[1] 12461 v.reset(OpARM64AND) 12462 v.AddArg(x) 12463 v.AddArg(y) 12464 return true 12465 } 12466 } 12467 func rewriteValueARM64_OpAndB_0(v *Value) bool { 12468 // match: (AndB x y) 12469 // cond: 12470 // result: (AND x y) 12471 for { 12472 _ = v.Args[1] 12473 x := v.Args[0] 12474 y := v.Args[1] 12475 v.reset(OpARM64AND) 12476 v.AddArg(x) 12477 v.AddArg(y) 12478 return true 12479 } 12480 } 12481 func rewriteValueARM64_OpAtomicAdd32_0(v *Value) bool { 12482 // match: (AtomicAdd32 ptr val mem) 12483 // cond: 12484 // result: (LoweredAtomicAdd32 ptr val mem) 12485 for { 12486 _ = v.Args[2] 12487 ptr := v.Args[0] 12488 val := v.Args[1] 12489 mem := v.Args[2] 12490 v.reset(OpARM64LoweredAtomicAdd32) 12491 v.AddArg(ptr) 12492 v.AddArg(val) 12493 v.AddArg(mem) 12494 return true 12495 } 12496 } 12497 func rewriteValueARM64_OpAtomicAdd64_0(v *Value) bool { 12498 // match: (AtomicAdd64 ptr val mem) 12499 // cond: 12500 // result: (LoweredAtomicAdd64 ptr val mem) 12501 for { 12502 _ = v.Args[2] 12503 ptr := v.Args[0] 12504 val := v.Args[1] 12505 mem := v.Args[2] 12506 v.reset(OpARM64LoweredAtomicAdd64) 12507 v.AddArg(ptr) 12508 v.AddArg(val) 12509 v.AddArg(mem) 12510 return true 12511 } 12512 } 12513 func rewriteValueARM64_OpAtomicAnd8_0(v *Value) bool { 12514 // match: (AtomicAnd8 ptr val mem) 12515 // cond: 12516 // result: (LoweredAtomicAnd8 ptr val mem) 12517 for { 12518 _ = v.Args[2] 12519 ptr := v.Args[0] 12520 val := v.Args[1] 12521 mem := v.Args[2] 12522 v.reset(OpARM64LoweredAtomicAnd8) 12523 v.AddArg(ptr) 12524 v.AddArg(val) 12525 v.AddArg(mem) 12526 return true 12527 } 12528 } 12529 func rewriteValueARM64_OpAtomicCompareAndSwap32_0(v *Value) bool { 12530 // match: (AtomicCompareAndSwap32 ptr old new_ mem) 12531 // cond: 12532 // result: (LoweredAtomicCas32 ptr old new_ mem) 12533 for { 12534 _ = v.Args[3] 12535 ptr := v.Args[0] 12536 old := v.Args[1] 12537 new_ := v.Args[2] 12538 mem := v.Args[3] 12539 v.reset(OpARM64LoweredAtomicCas32) 12540 v.AddArg(ptr) 12541 v.AddArg(old) 12542 v.AddArg(new_) 12543 v.AddArg(mem) 12544 return true 12545 } 12546 } 12547 func rewriteValueARM64_OpAtomicCompareAndSwap64_0(v *Value) bool { 12548 // match: (AtomicCompareAndSwap64 ptr old new_ mem) 12549 // cond: 12550 // result: (LoweredAtomicCas64 ptr old new_ mem) 12551 for { 12552 _ = v.Args[3] 12553 ptr := v.Args[0] 12554 old := v.Args[1] 12555 new_ := v.Args[2] 12556 mem := v.Args[3] 12557 v.reset(OpARM64LoweredAtomicCas64) 12558 v.AddArg(ptr) 12559 v.AddArg(old) 12560 v.AddArg(new_) 12561 v.AddArg(mem) 12562 return true 12563 } 12564 } 12565 func rewriteValueARM64_OpAtomicExchange32_0(v *Value) bool { 12566 // match: (AtomicExchange32 ptr val mem) 12567 // cond: 12568 // result: (LoweredAtomicExchange32 ptr val mem) 12569 for { 12570 _ = v.Args[2] 12571 ptr := v.Args[0] 12572 val := v.Args[1] 12573 mem := v.Args[2] 12574 v.reset(OpARM64LoweredAtomicExchange32) 12575 v.AddArg(ptr) 12576 v.AddArg(val) 12577 v.AddArg(mem) 12578 return true 12579 } 12580 } 12581 func rewriteValueARM64_OpAtomicExchange64_0(v *Value) bool { 12582 // match: (AtomicExchange64 ptr val mem) 12583 // cond: 12584 // result: (LoweredAtomicExchange64 ptr val mem) 12585 for { 12586 _ = v.Args[2] 12587 ptr := v.Args[0] 12588 val := v.Args[1] 12589 mem := v.Args[2] 12590 v.reset(OpARM64LoweredAtomicExchange64) 12591 v.AddArg(ptr) 12592 v.AddArg(val) 12593 v.AddArg(mem) 12594 return true 12595 } 12596 } 12597 func rewriteValueARM64_OpAtomicLoad32_0(v *Value) bool { 12598 // match: (AtomicLoad32 ptr mem) 12599 // cond: 12600 // result: (LDARW ptr mem) 12601 for { 12602 _ = v.Args[1] 12603 ptr := v.Args[0] 12604 mem := v.Args[1] 12605 v.reset(OpARM64LDARW) 12606 v.AddArg(ptr) 12607 v.AddArg(mem) 12608 return true 12609 } 12610 } 12611 func rewriteValueARM64_OpAtomicLoad64_0(v *Value) bool { 12612 // match: (AtomicLoad64 ptr mem) 12613 // cond: 12614 // result: (LDAR ptr mem) 12615 for { 12616 _ = v.Args[1] 12617 ptr := v.Args[0] 12618 mem := v.Args[1] 12619 v.reset(OpARM64LDAR) 12620 v.AddArg(ptr) 12621 v.AddArg(mem) 12622 return true 12623 } 12624 } 12625 func rewriteValueARM64_OpAtomicLoadPtr_0(v *Value) bool { 12626 // match: (AtomicLoadPtr ptr mem) 12627 // cond: 12628 // result: (LDAR ptr mem) 12629 for { 12630 _ = v.Args[1] 12631 ptr := v.Args[0] 12632 mem := v.Args[1] 12633 v.reset(OpARM64LDAR) 12634 v.AddArg(ptr) 12635 v.AddArg(mem) 12636 return true 12637 } 12638 } 12639 func rewriteValueARM64_OpAtomicOr8_0(v *Value) bool { 12640 // match: (AtomicOr8 ptr val mem) 12641 // cond: 12642 // result: (LoweredAtomicOr8 ptr val mem) 12643 for { 12644 _ = v.Args[2] 12645 ptr := v.Args[0] 12646 val := v.Args[1] 12647 mem := v.Args[2] 12648 v.reset(OpARM64LoweredAtomicOr8) 12649 v.AddArg(ptr) 12650 v.AddArg(val) 12651 v.AddArg(mem) 12652 return true 12653 } 12654 } 12655 func rewriteValueARM64_OpAtomicStore32_0(v *Value) bool { 12656 // match: (AtomicStore32 ptr val mem) 12657 // cond: 12658 // result: (STLRW ptr val mem) 12659 for { 12660 _ = v.Args[2] 12661 ptr := v.Args[0] 12662 val := v.Args[1] 12663 mem := v.Args[2] 12664 v.reset(OpARM64STLRW) 12665 v.AddArg(ptr) 12666 v.AddArg(val) 12667 v.AddArg(mem) 12668 return true 12669 } 12670 } 12671 func rewriteValueARM64_OpAtomicStore64_0(v *Value) bool { 12672 // match: (AtomicStore64 ptr val mem) 12673 // cond: 12674 // result: (STLR ptr val mem) 12675 for { 12676 _ = v.Args[2] 12677 ptr := v.Args[0] 12678 val := v.Args[1] 12679 mem := v.Args[2] 12680 v.reset(OpARM64STLR) 12681 v.AddArg(ptr) 12682 v.AddArg(val) 12683 v.AddArg(mem) 12684 return true 12685 } 12686 } 12687 func rewriteValueARM64_OpAtomicStorePtrNoWB_0(v *Value) bool { 12688 // match: (AtomicStorePtrNoWB ptr val mem) 12689 // cond: 12690 // result: (STLR ptr val mem) 12691 for { 12692 _ = v.Args[2] 12693 ptr := v.Args[0] 12694 val := v.Args[1] 12695 mem := v.Args[2] 12696 v.reset(OpARM64STLR) 12697 v.AddArg(ptr) 12698 v.AddArg(val) 12699 v.AddArg(mem) 12700 return true 12701 } 12702 } 12703 func rewriteValueARM64_OpAvg64u_0(v *Value) bool { 12704 b := v.Block 12705 _ = b 12706 // match: (Avg64u <t> x y) 12707 // cond: 12708 // result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y) 12709 for { 12710 t := v.Type 12711 _ = v.Args[1] 12712 x := v.Args[0] 12713 y := v.Args[1] 12714 v.reset(OpARM64ADD) 12715 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, t) 12716 v0.AuxInt = 1 12717 v1 := b.NewValue0(v.Pos, OpARM64SUB, t) 12718 v1.AddArg(x) 12719 v1.AddArg(y) 12720 v0.AddArg(v1) 12721 v.AddArg(v0) 12722 v.AddArg(y) 12723 return true 12724 } 12725 } 12726 func rewriteValueARM64_OpBitLen64_0(v *Value) bool { 12727 b := v.Block 12728 _ = b 12729 typ := &b.Func.Config.Types 12730 _ = typ 12731 // match: (BitLen64 x) 12732 // cond: 12733 // result: (SUB (MOVDconst [64]) (CLZ <typ.Int> x)) 12734 for { 12735 x := v.Args[0] 12736 v.reset(OpARM64SUB) 12737 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 12738 v0.AuxInt = 64 12739 v.AddArg(v0) 12740 v1 := b.NewValue0(v.Pos, OpARM64CLZ, typ.Int) 12741 v1.AddArg(x) 12742 v.AddArg(v1) 12743 return true 12744 } 12745 } 12746 func rewriteValueARM64_OpBitRev16_0(v *Value) bool { 12747 b := v.Block 12748 _ = b 12749 typ := &b.Func.Config.Types 12750 _ = typ 12751 // match: (BitRev16 x) 12752 // cond: 12753 // result: (SRLconst [48] (RBIT <typ.UInt64> x)) 12754 for { 12755 x := v.Args[0] 12756 v.reset(OpARM64SRLconst) 12757 v.AuxInt = 48 12758 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64) 12759 v0.AddArg(x) 12760 v.AddArg(v0) 12761 return true 12762 } 12763 } 12764 func rewriteValueARM64_OpBitRev32_0(v *Value) bool { 12765 // match: (BitRev32 x) 12766 // cond: 12767 // result: (RBITW x) 12768 for { 12769 x := v.Args[0] 12770 v.reset(OpARM64RBITW) 12771 v.AddArg(x) 12772 return true 12773 } 12774 } 12775 func rewriteValueARM64_OpBitRev64_0(v *Value) bool { 12776 // match: (BitRev64 x) 12777 // cond: 12778 // result: (RBIT x) 12779 for { 12780 x := v.Args[0] 12781 v.reset(OpARM64RBIT) 12782 v.AddArg(x) 12783 return true 12784 } 12785 } 12786 func rewriteValueARM64_OpBitRev8_0(v *Value) bool { 12787 b := v.Block 12788 _ = b 12789 typ := &b.Func.Config.Types 12790 _ = typ 12791 // match: (BitRev8 x) 12792 // cond: 12793 // result: (SRLconst [56] (RBIT <typ.UInt64> x)) 12794 for { 12795 x := v.Args[0] 12796 v.reset(OpARM64SRLconst) 12797 v.AuxInt = 56 12798 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64) 12799 v0.AddArg(x) 12800 v.AddArg(v0) 12801 return true 12802 } 12803 } 12804 func rewriteValueARM64_OpBswap32_0(v *Value) bool { 12805 // match: (Bswap32 x) 12806 // cond: 12807 // result: (REVW x) 12808 for { 12809 x := v.Args[0] 12810 v.reset(OpARM64REVW) 12811 v.AddArg(x) 12812 return true 12813 } 12814 } 12815 func rewriteValueARM64_OpBswap64_0(v *Value) bool { 12816 // match: (Bswap64 x) 12817 // cond: 12818 // result: (REV x) 12819 for { 12820 x := v.Args[0] 12821 v.reset(OpARM64REV) 12822 v.AddArg(x) 12823 return true 12824 } 12825 } 12826 func rewriteValueARM64_OpCeil_0(v *Value) bool { 12827 // match: (Ceil x) 12828 // cond: 12829 // result: (FRINTPD x) 12830 for { 12831 x := v.Args[0] 12832 v.reset(OpARM64FRINTPD) 12833 v.AddArg(x) 12834 return true 12835 } 12836 } 12837 func rewriteValueARM64_OpClosureCall_0(v *Value) bool { 12838 // match: (ClosureCall [argwid] entry closure mem) 12839 // cond: 12840 // result: (CALLclosure [argwid] entry closure mem) 12841 for { 12842 argwid := v.AuxInt 12843 _ = v.Args[2] 12844 entry := v.Args[0] 12845 closure := v.Args[1] 12846 mem := v.Args[2] 12847 v.reset(OpARM64CALLclosure) 12848 v.AuxInt = argwid 12849 v.AddArg(entry) 12850 v.AddArg(closure) 12851 v.AddArg(mem) 12852 return true 12853 } 12854 } 12855 func rewriteValueARM64_OpCom16_0(v *Value) bool { 12856 // match: (Com16 x) 12857 // cond: 12858 // result: (MVN x) 12859 for { 12860 x := v.Args[0] 12861 v.reset(OpARM64MVN) 12862 v.AddArg(x) 12863 return true 12864 } 12865 } 12866 func rewriteValueARM64_OpCom32_0(v *Value) bool { 12867 // match: (Com32 x) 12868 // cond: 12869 // result: (MVN x) 12870 for { 12871 x := v.Args[0] 12872 v.reset(OpARM64MVN) 12873 v.AddArg(x) 12874 return true 12875 } 12876 } 12877 func rewriteValueARM64_OpCom64_0(v *Value) bool { 12878 // match: (Com64 x) 12879 // cond: 12880 // result: (MVN x) 12881 for { 12882 x := v.Args[0] 12883 v.reset(OpARM64MVN) 12884 v.AddArg(x) 12885 return true 12886 } 12887 } 12888 func rewriteValueARM64_OpCom8_0(v *Value) bool { 12889 // match: (Com8 x) 12890 // cond: 12891 // result: (MVN x) 12892 for { 12893 x := v.Args[0] 12894 v.reset(OpARM64MVN) 12895 v.AddArg(x) 12896 return true 12897 } 12898 } 12899 func rewriteValueARM64_OpCondSelect_0(v *Value) bool { 12900 b := v.Block 12901 _ = b 12902 // match: (CondSelect x y bool) 12903 // cond: flagArg(bool) != nil 12904 // result: (CSEL {bool.Op} x y flagArg(bool)) 12905 for { 12906 _ = v.Args[2] 12907 x := v.Args[0] 12908 y := v.Args[1] 12909 bool := v.Args[2] 12910 if !(flagArg(bool) != nil) { 12911 break 12912 } 12913 v.reset(OpARM64CSEL) 12914 v.Aux = bool.Op 12915 v.AddArg(x) 12916 v.AddArg(y) 12917 v.AddArg(flagArg(bool)) 12918 return true 12919 } 12920 // match: (CondSelect x y bool) 12921 // cond: flagArg(bool) == nil 12922 // result: (CSEL {OpARM64NotEqual} x y (CMPWconst [0] bool)) 12923 for { 12924 _ = v.Args[2] 12925 x := v.Args[0] 12926 y := v.Args[1] 12927 bool := v.Args[2] 12928 if !(flagArg(bool) == nil) { 12929 break 12930 } 12931 v.reset(OpARM64CSEL) 12932 v.Aux = OpARM64NotEqual 12933 v.AddArg(x) 12934 v.AddArg(y) 12935 v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags) 12936 v0.AuxInt = 0 12937 v0.AddArg(bool) 12938 v.AddArg(v0) 12939 return true 12940 } 12941 return false 12942 } 12943 func rewriteValueARM64_OpConst16_0(v *Value) bool { 12944 // match: (Const16 [val]) 12945 // cond: 12946 // result: (MOVDconst [val]) 12947 for { 12948 val := v.AuxInt 12949 v.reset(OpARM64MOVDconst) 12950 v.AuxInt = val 12951 return true 12952 } 12953 } 12954 func rewriteValueARM64_OpConst32_0(v *Value) bool { 12955 // match: (Const32 [val]) 12956 // cond: 12957 // result: (MOVDconst [val]) 12958 for { 12959 val := v.AuxInt 12960 v.reset(OpARM64MOVDconst) 12961 v.AuxInt = val 12962 return true 12963 } 12964 } 12965 func rewriteValueARM64_OpConst32F_0(v *Value) bool { 12966 // match: (Const32F [val]) 12967 // cond: 12968 // result: (FMOVSconst [val]) 12969 for { 12970 val := v.AuxInt 12971 v.reset(OpARM64FMOVSconst) 12972 v.AuxInt = val 12973 return true 12974 } 12975 } 12976 func rewriteValueARM64_OpConst64_0(v *Value) bool { 12977 // match: (Const64 [val]) 12978 // cond: 12979 // result: (MOVDconst [val]) 12980 for { 12981 val := v.AuxInt 12982 v.reset(OpARM64MOVDconst) 12983 v.AuxInt = val 12984 return true 12985 } 12986 } 12987 func rewriteValueARM64_OpConst64F_0(v *Value) bool { 12988 // match: (Const64F [val]) 12989 // cond: 12990 // result: (FMOVDconst [val]) 12991 for { 12992 val := v.AuxInt 12993 v.reset(OpARM64FMOVDconst) 12994 v.AuxInt = val 12995 return true 12996 } 12997 } 12998 func rewriteValueARM64_OpConst8_0(v *Value) bool { 12999 // match: (Const8 [val]) 13000 // cond: 13001 // result: (MOVDconst [val]) 13002 for { 13003 val := v.AuxInt 13004 v.reset(OpARM64MOVDconst) 13005 v.AuxInt = val 13006 return true 13007 } 13008 } 13009 func rewriteValueARM64_OpConstBool_0(v *Value) bool { 13010 // match: (ConstBool [b]) 13011 // cond: 13012 // result: (MOVDconst [b]) 13013 for { 13014 b := v.AuxInt 13015 v.reset(OpARM64MOVDconst) 13016 v.AuxInt = b 13017 return true 13018 } 13019 } 13020 func rewriteValueARM64_OpConstNil_0(v *Value) bool { 13021 // match: (ConstNil) 13022 // cond: 13023 // result: (MOVDconst [0]) 13024 for { 13025 v.reset(OpARM64MOVDconst) 13026 v.AuxInt = 0 13027 return true 13028 } 13029 } 13030 func rewriteValueARM64_OpConvert_0(v *Value) bool { 13031 // match: (Convert x mem) 13032 // cond: 13033 // result: (MOVDconvert x mem) 13034 for { 13035 _ = v.Args[1] 13036 x := v.Args[0] 13037 mem := v.Args[1] 13038 v.reset(OpARM64MOVDconvert) 13039 v.AddArg(x) 13040 v.AddArg(mem) 13041 return true 13042 } 13043 } 13044 func rewriteValueARM64_OpCtz32_0(v *Value) bool { 13045 b := v.Block 13046 _ = b 13047 // match: (Ctz32 <t> x) 13048 // cond: 13049 // result: (CLZW (RBITW <t> x)) 13050 for { 13051 t := v.Type 13052 x := v.Args[0] 13053 v.reset(OpARM64CLZW) 13054 v0 := b.NewValue0(v.Pos, OpARM64RBITW, t) 13055 v0.AddArg(x) 13056 v.AddArg(v0) 13057 return true 13058 } 13059 } 13060 func rewriteValueARM64_OpCtz64_0(v *Value) bool { 13061 b := v.Block 13062 _ = b 13063 // match: (Ctz64 <t> x) 13064 // cond: 13065 // result: (CLZ (RBIT <t> x)) 13066 for { 13067 t := v.Type 13068 x := v.Args[0] 13069 v.reset(OpARM64CLZ) 13070 v0 := b.NewValue0(v.Pos, OpARM64RBIT, t) 13071 v0.AddArg(x) 13072 v.AddArg(v0) 13073 return true 13074 } 13075 } 13076 func rewriteValueARM64_OpCvt32Fto32_0(v *Value) bool { 13077 // match: (Cvt32Fto32 x) 13078 // cond: 13079 // result: (FCVTZSSW x) 13080 for { 13081 x := v.Args[0] 13082 v.reset(OpARM64FCVTZSSW) 13083 v.AddArg(x) 13084 return true 13085 } 13086 } 13087 func rewriteValueARM64_OpCvt32Fto32U_0(v *Value) bool { 13088 // match: (Cvt32Fto32U x) 13089 // cond: 13090 // result: (FCVTZUSW x) 13091 for { 13092 x := v.Args[0] 13093 v.reset(OpARM64FCVTZUSW) 13094 v.AddArg(x) 13095 return true 13096 } 13097 } 13098 func rewriteValueARM64_OpCvt32Fto64_0(v *Value) bool { 13099 // match: (Cvt32Fto64 x) 13100 // cond: 13101 // result: (FCVTZSS x) 13102 for { 13103 x := v.Args[0] 13104 v.reset(OpARM64FCVTZSS) 13105 v.AddArg(x) 13106 return true 13107 } 13108 } 13109 func rewriteValueARM64_OpCvt32Fto64F_0(v *Value) bool { 13110 // match: (Cvt32Fto64F x) 13111 // cond: 13112 // result: (FCVTSD x) 13113 for { 13114 x := v.Args[0] 13115 v.reset(OpARM64FCVTSD) 13116 v.AddArg(x) 13117 return true 13118 } 13119 } 13120 func rewriteValueARM64_OpCvt32Fto64U_0(v *Value) bool { 13121 // match: (Cvt32Fto64U x) 13122 // cond: 13123 // result: (FCVTZUS x) 13124 for { 13125 x := v.Args[0] 13126 v.reset(OpARM64FCVTZUS) 13127 v.AddArg(x) 13128 return true 13129 } 13130 } 13131 func rewriteValueARM64_OpCvt32Uto32F_0(v *Value) bool { 13132 // match: (Cvt32Uto32F x) 13133 // cond: 13134 // result: (UCVTFWS x) 13135 for { 13136 x := v.Args[0] 13137 v.reset(OpARM64UCVTFWS) 13138 v.AddArg(x) 13139 return true 13140 } 13141 } 13142 func rewriteValueARM64_OpCvt32Uto64F_0(v *Value) bool { 13143 // match: (Cvt32Uto64F x) 13144 // cond: 13145 // result: (UCVTFWD x) 13146 for { 13147 x := v.Args[0] 13148 v.reset(OpARM64UCVTFWD) 13149 v.AddArg(x) 13150 return true 13151 } 13152 } 13153 func rewriteValueARM64_OpCvt32to32F_0(v *Value) bool { 13154 // match: (Cvt32to32F x) 13155 // cond: 13156 // result: (SCVTFWS x) 13157 for { 13158 x := v.Args[0] 13159 v.reset(OpARM64SCVTFWS) 13160 v.AddArg(x) 13161 return true 13162 } 13163 } 13164 func rewriteValueARM64_OpCvt32to64F_0(v *Value) bool { 13165 // match: (Cvt32to64F x) 13166 // cond: 13167 // result: (SCVTFWD x) 13168 for { 13169 x := v.Args[0] 13170 v.reset(OpARM64SCVTFWD) 13171 v.AddArg(x) 13172 return true 13173 } 13174 } 13175 func rewriteValueARM64_OpCvt64Fto32_0(v *Value) bool { 13176 // match: (Cvt64Fto32 x) 13177 // cond: 13178 // result: (FCVTZSDW x) 13179 for { 13180 x := v.Args[0] 13181 v.reset(OpARM64FCVTZSDW) 13182 v.AddArg(x) 13183 return true 13184 } 13185 } 13186 func rewriteValueARM64_OpCvt64Fto32F_0(v *Value) bool { 13187 // match: (Cvt64Fto32F x) 13188 // cond: 13189 // result: (FCVTDS x) 13190 for { 13191 x := v.Args[0] 13192 v.reset(OpARM64FCVTDS) 13193 v.AddArg(x) 13194 return true 13195 } 13196 } 13197 func rewriteValueARM64_OpCvt64Fto32U_0(v *Value) bool { 13198 // match: (Cvt64Fto32U x) 13199 // cond: 13200 // result: (FCVTZUDW x) 13201 for { 13202 x := v.Args[0] 13203 v.reset(OpARM64FCVTZUDW) 13204 v.AddArg(x) 13205 return true 13206 } 13207 } 13208 func rewriteValueARM64_OpCvt64Fto64_0(v *Value) bool { 13209 // match: (Cvt64Fto64 x) 13210 // cond: 13211 // result: (FCVTZSD x) 13212 for { 13213 x := v.Args[0] 13214 v.reset(OpARM64FCVTZSD) 13215 v.AddArg(x) 13216 return true 13217 } 13218 } 13219 func rewriteValueARM64_OpCvt64Fto64U_0(v *Value) bool { 13220 // match: (Cvt64Fto64U x) 13221 // cond: 13222 // result: (FCVTZUD x) 13223 for { 13224 x := v.Args[0] 13225 v.reset(OpARM64FCVTZUD) 13226 v.AddArg(x) 13227 return true 13228 } 13229 } 13230 func rewriteValueARM64_OpCvt64Uto32F_0(v *Value) bool { 13231 // match: (Cvt64Uto32F x) 13232 // cond: 13233 // result: (UCVTFS x) 13234 for { 13235 x := v.Args[0] 13236 v.reset(OpARM64UCVTFS) 13237 v.AddArg(x) 13238 return true 13239 } 13240 } 13241 func rewriteValueARM64_OpCvt64Uto64F_0(v *Value) bool { 13242 // match: (Cvt64Uto64F x) 13243 // cond: 13244 // result: (UCVTFD x) 13245 for { 13246 x := v.Args[0] 13247 v.reset(OpARM64UCVTFD) 13248 v.AddArg(x) 13249 return true 13250 } 13251 } 13252 func rewriteValueARM64_OpCvt64to32F_0(v *Value) bool { 13253 // match: (Cvt64to32F x) 13254 // cond: 13255 // result: (SCVTFS x) 13256 for { 13257 x := v.Args[0] 13258 v.reset(OpARM64SCVTFS) 13259 v.AddArg(x) 13260 return true 13261 } 13262 } 13263 func rewriteValueARM64_OpCvt64to64F_0(v *Value) bool { 13264 // match: (Cvt64to64F x) 13265 // cond: 13266 // result: (SCVTFD x) 13267 for { 13268 x := v.Args[0] 13269 v.reset(OpARM64SCVTFD) 13270 v.AddArg(x) 13271 return true 13272 } 13273 } 13274 func rewriteValueARM64_OpDiv16_0(v *Value) bool { 13275 b := v.Block 13276 _ = b 13277 typ := &b.Func.Config.Types 13278 _ = typ 13279 // match: (Div16 x y) 13280 // cond: 13281 // result: (DIVW (SignExt16to32 x) (SignExt16to32 y)) 13282 for { 13283 _ = v.Args[1] 13284 x := v.Args[0] 13285 y := v.Args[1] 13286 v.reset(OpARM64DIVW) 13287 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 13288 v0.AddArg(x) 13289 v.AddArg(v0) 13290 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 13291 v1.AddArg(y) 13292 v.AddArg(v1) 13293 return true 13294 } 13295 } 13296 func rewriteValueARM64_OpDiv16u_0(v *Value) bool { 13297 b := v.Block 13298 _ = b 13299 typ := &b.Func.Config.Types 13300 _ = typ 13301 // match: (Div16u x y) 13302 // cond: 13303 // result: (UDIVW (ZeroExt16to32 x) (ZeroExt16to32 y)) 13304 for { 13305 _ = v.Args[1] 13306 x := v.Args[0] 13307 y := v.Args[1] 13308 v.reset(OpARM64UDIVW) 13309 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 13310 v0.AddArg(x) 13311 v.AddArg(v0) 13312 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 13313 v1.AddArg(y) 13314 v.AddArg(v1) 13315 return true 13316 } 13317 } 13318 func rewriteValueARM64_OpDiv32_0(v *Value) bool { 13319 // match: (Div32 x y) 13320 // cond: 13321 // result: (DIVW x y) 13322 for { 13323 _ = v.Args[1] 13324 x := v.Args[0] 13325 y := v.Args[1] 13326 v.reset(OpARM64DIVW) 13327 v.AddArg(x) 13328 v.AddArg(y) 13329 return true 13330 } 13331 } 13332 func rewriteValueARM64_OpDiv32F_0(v *Value) bool { 13333 // match: (Div32F x y) 13334 // cond: 13335 // result: (FDIVS x y) 13336 for { 13337 _ = v.Args[1] 13338 x := v.Args[0] 13339 y := v.Args[1] 13340 v.reset(OpARM64FDIVS) 13341 v.AddArg(x) 13342 v.AddArg(y) 13343 return true 13344 } 13345 } 13346 func rewriteValueARM64_OpDiv32u_0(v *Value) bool { 13347 // match: (Div32u x y) 13348 // cond: 13349 // result: (UDIVW x y) 13350 for { 13351 _ = v.Args[1] 13352 x := v.Args[0] 13353 y := v.Args[1] 13354 v.reset(OpARM64UDIVW) 13355 v.AddArg(x) 13356 v.AddArg(y) 13357 return true 13358 } 13359 } 13360 func rewriteValueARM64_OpDiv64_0(v *Value) bool { 13361 // match: (Div64 x y) 13362 // cond: 13363 // result: (DIV x y) 13364 for { 13365 _ = v.Args[1] 13366 x := v.Args[0] 13367 y := v.Args[1] 13368 v.reset(OpARM64DIV) 13369 v.AddArg(x) 13370 v.AddArg(y) 13371 return true 13372 } 13373 } 13374 func rewriteValueARM64_OpDiv64F_0(v *Value) bool { 13375 // match: (Div64F x y) 13376 // cond: 13377 // result: (FDIVD x y) 13378 for { 13379 _ = v.Args[1] 13380 x := v.Args[0] 13381 y := v.Args[1] 13382 v.reset(OpARM64FDIVD) 13383 v.AddArg(x) 13384 v.AddArg(y) 13385 return true 13386 } 13387 } 13388 func rewriteValueARM64_OpDiv64u_0(v *Value) bool { 13389 // match: (Div64u x y) 13390 // cond: 13391 // result: (UDIV x y) 13392 for { 13393 _ = v.Args[1] 13394 x := v.Args[0] 13395 y := v.Args[1] 13396 v.reset(OpARM64UDIV) 13397 v.AddArg(x) 13398 v.AddArg(y) 13399 return true 13400 } 13401 } 13402 func rewriteValueARM64_OpDiv8_0(v *Value) bool { 13403 b := v.Block 13404 _ = b 13405 typ := &b.Func.Config.Types 13406 _ = typ 13407 // match: (Div8 x y) 13408 // cond: 13409 // result: (DIVW (SignExt8to32 x) (SignExt8to32 y)) 13410 for { 13411 _ = v.Args[1] 13412 x := v.Args[0] 13413 y := v.Args[1] 13414 v.reset(OpARM64DIVW) 13415 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 13416 v0.AddArg(x) 13417 v.AddArg(v0) 13418 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 13419 v1.AddArg(y) 13420 v.AddArg(v1) 13421 return true 13422 } 13423 } 13424 func rewriteValueARM64_OpDiv8u_0(v *Value) bool { 13425 b := v.Block 13426 _ = b 13427 typ := &b.Func.Config.Types 13428 _ = typ 13429 // match: (Div8u x y) 13430 // cond: 13431 // result: (UDIVW (ZeroExt8to32 x) (ZeroExt8to32 y)) 13432 for { 13433 _ = v.Args[1] 13434 x := v.Args[0] 13435 y := v.Args[1] 13436 v.reset(OpARM64UDIVW) 13437 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 13438 v0.AddArg(x) 13439 v.AddArg(v0) 13440 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 13441 v1.AddArg(y) 13442 v.AddArg(v1) 13443 return true 13444 } 13445 } 13446 func rewriteValueARM64_OpEq16_0(v *Value) bool { 13447 b := v.Block 13448 _ = b 13449 typ := &b.Func.Config.Types 13450 _ = typ 13451 // match: (Eq16 x y) 13452 // cond: 13453 // result: (Equal (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 13454 for { 13455 _ = v.Args[1] 13456 x := v.Args[0] 13457 y := v.Args[1] 13458 v.reset(OpARM64Equal) 13459 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 13460 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 13461 v1.AddArg(x) 13462 v0.AddArg(v1) 13463 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 13464 v2.AddArg(y) 13465 v0.AddArg(v2) 13466 v.AddArg(v0) 13467 return true 13468 } 13469 } 13470 func rewriteValueARM64_OpEq32_0(v *Value) bool { 13471 b := v.Block 13472 _ = b 13473 // match: (Eq32 x y) 13474 // cond: 13475 // result: (Equal (CMPW x y)) 13476 for { 13477 _ = v.Args[1] 13478 x := v.Args[0] 13479 y := v.Args[1] 13480 v.reset(OpARM64Equal) 13481 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 13482 v0.AddArg(x) 13483 v0.AddArg(y) 13484 v.AddArg(v0) 13485 return true 13486 } 13487 } 13488 func rewriteValueARM64_OpEq32F_0(v *Value) bool { 13489 b := v.Block 13490 _ = b 13491 // match: (Eq32F x y) 13492 // cond: 13493 // result: (Equal (FCMPS x y)) 13494 for { 13495 _ = v.Args[1] 13496 x := v.Args[0] 13497 y := v.Args[1] 13498 v.reset(OpARM64Equal) 13499 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 13500 v0.AddArg(x) 13501 v0.AddArg(y) 13502 v.AddArg(v0) 13503 return true 13504 } 13505 } 13506 func rewriteValueARM64_OpEq64_0(v *Value) bool { 13507 b := v.Block 13508 _ = b 13509 // match: (Eq64 x y) 13510 // cond: 13511 // result: (Equal (CMP x y)) 13512 for { 13513 _ = v.Args[1] 13514 x := v.Args[0] 13515 y := v.Args[1] 13516 v.reset(OpARM64Equal) 13517 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 13518 v0.AddArg(x) 13519 v0.AddArg(y) 13520 v.AddArg(v0) 13521 return true 13522 } 13523 } 13524 func rewriteValueARM64_OpEq64F_0(v *Value) bool { 13525 b := v.Block 13526 _ = b 13527 // match: (Eq64F x y) 13528 // cond: 13529 // result: (Equal (FCMPD x y)) 13530 for { 13531 _ = v.Args[1] 13532 x := v.Args[0] 13533 y := v.Args[1] 13534 v.reset(OpARM64Equal) 13535 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 13536 v0.AddArg(x) 13537 v0.AddArg(y) 13538 v.AddArg(v0) 13539 return true 13540 } 13541 } 13542 func rewriteValueARM64_OpEq8_0(v *Value) bool { 13543 b := v.Block 13544 _ = b 13545 typ := &b.Func.Config.Types 13546 _ = typ 13547 // match: (Eq8 x y) 13548 // cond: 13549 // result: (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 13550 for { 13551 _ = v.Args[1] 13552 x := v.Args[0] 13553 y := v.Args[1] 13554 v.reset(OpARM64Equal) 13555 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 13556 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 13557 v1.AddArg(x) 13558 v0.AddArg(v1) 13559 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 13560 v2.AddArg(y) 13561 v0.AddArg(v2) 13562 v.AddArg(v0) 13563 return true 13564 } 13565 } 13566 func rewriteValueARM64_OpEqB_0(v *Value) bool { 13567 b := v.Block 13568 _ = b 13569 typ := &b.Func.Config.Types 13570 _ = typ 13571 // match: (EqB x y) 13572 // cond: 13573 // result: (XOR (MOVDconst [1]) (XOR <typ.Bool> x y)) 13574 for { 13575 _ = v.Args[1] 13576 x := v.Args[0] 13577 y := v.Args[1] 13578 v.reset(OpARM64XOR) 13579 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 13580 v0.AuxInt = 1 13581 v.AddArg(v0) 13582 v1 := b.NewValue0(v.Pos, OpARM64XOR, typ.Bool) 13583 v1.AddArg(x) 13584 v1.AddArg(y) 13585 v.AddArg(v1) 13586 return true 13587 } 13588 } 13589 func rewriteValueARM64_OpEqPtr_0(v *Value) bool { 13590 b := v.Block 13591 _ = b 13592 // match: (EqPtr x y) 13593 // cond: 13594 // result: (Equal (CMP x y)) 13595 for { 13596 _ = v.Args[1] 13597 x := v.Args[0] 13598 y := v.Args[1] 13599 v.reset(OpARM64Equal) 13600 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 13601 v0.AddArg(x) 13602 v0.AddArg(y) 13603 v.AddArg(v0) 13604 return true 13605 } 13606 } 13607 func rewriteValueARM64_OpFloor_0(v *Value) bool { 13608 // match: (Floor x) 13609 // cond: 13610 // result: (FRINTMD x) 13611 for { 13612 x := v.Args[0] 13613 v.reset(OpARM64FRINTMD) 13614 v.AddArg(x) 13615 return true 13616 } 13617 } 13618 func rewriteValueARM64_OpGeq16_0(v *Value) bool { 13619 b := v.Block 13620 _ = b 13621 typ := &b.Func.Config.Types 13622 _ = typ 13623 // match: (Geq16 x y) 13624 // cond: 13625 // result: (GreaterEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) 13626 for { 13627 _ = v.Args[1] 13628 x := v.Args[0] 13629 y := v.Args[1] 13630 v.reset(OpARM64GreaterEqual) 13631 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 13632 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 13633 v1.AddArg(x) 13634 v0.AddArg(v1) 13635 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 13636 v2.AddArg(y) 13637 v0.AddArg(v2) 13638 v.AddArg(v0) 13639 return true 13640 } 13641 } 13642 func rewriteValueARM64_OpGeq16U_0(v *Value) bool { 13643 b := v.Block 13644 _ = b 13645 typ := &b.Func.Config.Types 13646 _ = typ 13647 // match: (Geq16U x y) 13648 // cond: 13649 // result: (GreaterEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 13650 for { 13651 _ = v.Args[1] 13652 x := v.Args[0] 13653 y := v.Args[1] 13654 v.reset(OpARM64GreaterEqualU) 13655 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 13656 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 13657 v1.AddArg(x) 13658 v0.AddArg(v1) 13659 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 13660 v2.AddArg(y) 13661 v0.AddArg(v2) 13662 v.AddArg(v0) 13663 return true 13664 } 13665 } 13666 func rewriteValueARM64_OpGeq32_0(v *Value) bool { 13667 b := v.Block 13668 _ = b 13669 // match: (Geq32 x y) 13670 // cond: 13671 // result: (GreaterEqual (CMPW x y)) 13672 for { 13673 _ = v.Args[1] 13674 x := v.Args[0] 13675 y := v.Args[1] 13676 v.reset(OpARM64GreaterEqual) 13677 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 13678 v0.AddArg(x) 13679 v0.AddArg(y) 13680 v.AddArg(v0) 13681 return true 13682 } 13683 } 13684 func rewriteValueARM64_OpGeq32F_0(v *Value) bool { 13685 b := v.Block 13686 _ = b 13687 // match: (Geq32F x y) 13688 // cond: 13689 // result: (GreaterEqual (FCMPS x y)) 13690 for { 13691 _ = v.Args[1] 13692 x := v.Args[0] 13693 y := v.Args[1] 13694 v.reset(OpARM64GreaterEqual) 13695 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 13696 v0.AddArg(x) 13697 v0.AddArg(y) 13698 v.AddArg(v0) 13699 return true 13700 } 13701 } 13702 func rewriteValueARM64_OpGeq32U_0(v *Value) bool { 13703 b := v.Block 13704 _ = b 13705 // match: (Geq32U x y) 13706 // cond: 13707 // result: (GreaterEqualU (CMPW x y)) 13708 for { 13709 _ = v.Args[1] 13710 x := v.Args[0] 13711 y := v.Args[1] 13712 v.reset(OpARM64GreaterEqualU) 13713 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 13714 v0.AddArg(x) 13715 v0.AddArg(y) 13716 v.AddArg(v0) 13717 return true 13718 } 13719 } 13720 func rewriteValueARM64_OpGeq64_0(v *Value) bool { 13721 b := v.Block 13722 _ = b 13723 // match: (Geq64 x y) 13724 // cond: 13725 // result: (GreaterEqual (CMP x y)) 13726 for { 13727 _ = v.Args[1] 13728 x := v.Args[0] 13729 y := v.Args[1] 13730 v.reset(OpARM64GreaterEqual) 13731 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 13732 v0.AddArg(x) 13733 v0.AddArg(y) 13734 v.AddArg(v0) 13735 return true 13736 } 13737 } 13738 func rewriteValueARM64_OpGeq64F_0(v *Value) bool { 13739 b := v.Block 13740 _ = b 13741 // match: (Geq64F x y) 13742 // cond: 13743 // result: (GreaterEqual (FCMPD x y)) 13744 for { 13745 _ = v.Args[1] 13746 x := v.Args[0] 13747 y := v.Args[1] 13748 v.reset(OpARM64GreaterEqual) 13749 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 13750 v0.AddArg(x) 13751 v0.AddArg(y) 13752 v.AddArg(v0) 13753 return true 13754 } 13755 } 13756 func rewriteValueARM64_OpGeq64U_0(v *Value) bool { 13757 b := v.Block 13758 _ = b 13759 // match: (Geq64U x y) 13760 // cond: 13761 // result: (GreaterEqualU (CMP x y)) 13762 for { 13763 _ = v.Args[1] 13764 x := v.Args[0] 13765 y := v.Args[1] 13766 v.reset(OpARM64GreaterEqualU) 13767 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 13768 v0.AddArg(x) 13769 v0.AddArg(y) 13770 v.AddArg(v0) 13771 return true 13772 } 13773 } 13774 func rewriteValueARM64_OpGeq8_0(v *Value) bool { 13775 b := v.Block 13776 _ = b 13777 typ := &b.Func.Config.Types 13778 _ = typ 13779 // match: (Geq8 x y) 13780 // cond: 13781 // result: (GreaterEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) 13782 for { 13783 _ = v.Args[1] 13784 x := v.Args[0] 13785 y := v.Args[1] 13786 v.reset(OpARM64GreaterEqual) 13787 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 13788 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 13789 v1.AddArg(x) 13790 v0.AddArg(v1) 13791 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 13792 v2.AddArg(y) 13793 v0.AddArg(v2) 13794 v.AddArg(v0) 13795 return true 13796 } 13797 } 13798 func rewriteValueARM64_OpGeq8U_0(v *Value) bool { 13799 b := v.Block 13800 _ = b 13801 typ := &b.Func.Config.Types 13802 _ = typ 13803 // match: (Geq8U x y) 13804 // cond: 13805 // result: (GreaterEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 13806 for { 13807 _ = v.Args[1] 13808 x := v.Args[0] 13809 y := v.Args[1] 13810 v.reset(OpARM64GreaterEqualU) 13811 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 13812 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 13813 v1.AddArg(x) 13814 v0.AddArg(v1) 13815 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 13816 v2.AddArg(y) 13817 v0.AddArg(v2) 13818 v.AddArg(v0) 13819 return true 13820 } 13821 } 13822 func rewriteValueARM64_OpGetCallerSP_0(v *Value) bool { 13823 // match: (GetCallerSP) 13824 // cond: 13825 // result: (LoweredGetCallerSP) 13826 for { 13827 v.reset(OpARM64LoweredGetCallerSP) 13828 return true 13829 } 13830 } 13831 func rewriteValueARM64_OpGetClosurePtr_0(v *Value) bool { 13832 // match: (GetClosurePtr) 13833 // cond: 13834 // result: (LoweredGetClosurePtr) 13835 for { 13836 v.reset(OpARM64LoweredGetClosurePtr) 13837 return true 13838 } 13839 } 13840 func rewriteValueARM64_OpGreater16_0(v *Value) bool { 13841 b := v.Block 13842 _ = b 13843 typ := &b.Func.Config.Types 13844 _ = typ 13845 // match: (Greater16 x y) 13846 // cond: 13847 // result: (GreaterThan (CMPW (SignExt16to32 x) (SignExt16to32 y))) 13848 for { 13849 _ = v.Args[1] 13850 x := v.Args[0] 13851 y := v.Args[1] 13852 v.reset(OpARM64GreaterThan) 13853 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 13854 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 13855 v1.AddArg(x) 13856 v0.AddArg(v1) 13857 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 13858 v2.AddArg(y) 13859 v0.AddArg(v2) 13860 v.AddArg(v0) 13861 return true 13862 } 13863 } 13864 func rewriteValueARM64_OpGreater16U_0(v *Value) bool { 13865 b := v.Block 13866 _ = b 13867 typ := &b.Func.Config.Types 13868 _ = typ 13869 // match: (Greater16U x y) 13870 // cond: 13871 // result: (GreaterThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 13872 for { 13873 _ = v.Args[1] 13874 x := v.Args[0] 13875 y := v.Args[1] 13876 v.reset(OpARM64GreaterThanU) 13877 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 13878 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 13879 v1.AddArg(x) 13880 v0.AddArg(v1) 13881 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 13882 v2.AddArg(y) 13883 v0.AddArg(v2) 13884 v.AddArg(v0) 13885 return true 13886 } 13887 } 13888 func rewriteValueARM64_OpGreater32_0(v *Value) bool { 13889 b := v.Block 13890 _ = b 13891 // match: (Greater32 x y) 13892 // cond: 13893 // result: (GreaterThan (CMPW x y)) 13894 for { 13895 _ = v.Args[1] 13896 x := v.Args[0] 13897 y := v.Args[1] 13898 v.reset(OpARM64GreaterThan) 13899 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 13900 v0.AddArg(x) 13901 v0.AddArg(y) 13902 v.AddArg(v0) 13903 return true 13904 } 13905 } 13906 func rewriteValueARM64_OpGreater32F_0(v *Value) bool { 13907 b := v.Block 13908 _ = b 13909 // match: (Greater32F x y) 13910 // cond: 13911 // result: (GreaterThan (FCMPS x y)) 13912 for { 13913 _ = v.Args[1] 13914 x := v.Args[0] 13915 y := v.Args[1] 13916 v.reset(OpARM64GreaterThan) 13917 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 13918 v0.AddArg(x) 13919 v0.AddArg(y) 13920 v.AddArg(v0) 13921 return true 13922 } 13923 } 13924 func rewriteValueARM64_OpGreater32U_0(v *Value) bool { 13925 b := v.Block 13926 _ = b 13927 // match: (Greater32U x y) 13928 // cond: 13929 // result: (GreaterThanU (CMPW x y)) 13930 for { 13931 _ = v.Args[1] 13932 x := v.Args[0] 13933 y := v.Args[1] 13934 v.reset(OpARM64GreaterThanU) 13935 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 13936 v0.AddArg(x) 13937 v0.AddArg(y) 13938 v.AddArg(v0) 13939 return true 13940 } 13941 } 13942 func rewriteValueARM64_OpGreater64_0(v *Value) bool { 13943 b := v.Block 13944 _ = b 13945 // match: (Greater64 x y) 13946 // cond: 13947 // result: (GreaterThan (CMP x y)) 13948 for { 13949 _ = v.Args[1] 13950 x := v.Args[0] 13951 y := v.Args[1] 13952 v.reset(OpARM64GreaterThan) 13953 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 13954 v0.AddArg(x) 13955 v0.AddArg(y) 13956 v.AddArg(v0) 13957 return true 13958 } 13959 } 13960 func rewriteValueARM64_OpGreater64F_0(v *Value) bool { 13961 b := v.Block 13962 _ = b 13963 // match: (Greater64F x y) 13964 // cond: 13965 // result: (GreaterThan (FCMPD x y)) 13966 for { 13967 _ = v.Args[1] 13968 x := v.Args[0] 13969 y := v.Args[1] 13970 v.reset(OpARM64GreaterThan) 13971 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 13972 v0.AddArg(x) 13973 v0.AddArg(y) 13974 v.AddArg(v0) 13975 return true 13976 } 13977 } 13978 func rewriteValueARM64_OpGreater64U_0(v *Value) bool { 13979 b := v.Block 13980 _ = b 13981 // match: (Greater64U x y) 13982 // cond: 13983 // result: (GreaterThanU (CMP x y)) 13984 for { 13985 _ = v.Args[1] 13986 x := v.Args[0] 13987 y := v.Args[1] 13988 v.reset(OpARM64GreaterThanU) 13989 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 13990 v0.AddArg(x) 13991 v0.AddArg(y) 13992 v.AddArg(v0) 13993 return true 13994 } 13995 } 13996 func rewriteValueARM64_OpGreater8_0(v *Value) bool { 13997 b := v.Block 13998 _ = b 13999 typ := &b.Func.Config.Types 14000 _ = typ 14001 // match: (Greater8 x y) 14002 // cond: 14003 // result: (GreaterThan (CMPW (SignExt8to32 x) (SignExt8to32 y))) 14004 for { 14005 _ = v.Args[1] 14006 x := v.Args[0] 14007 y := v.Args[1] 14008 v.reset(OpARM64GreaterThan) 14009 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 14010 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 14011 v1.AddArg(x) 14012 v0.AddArg(v1) 14013 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 14014 v2.AddArg(y) 14015 v0.AddArg(v2) 14016 v.AddArg(v0) 14017 return true 14018 } 14019 } 14020 func rewriteValueARM64_OpGreater8U_0(v *Value) bool { 14021 b := v.Block 14022 _ = b 14023 typ := &b.Func.Config.Types 14024 _ = typ 14025 // match: (Greater8U x y) 14026 // cond: 14027 // result: (GreaterThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 14028 for { 14029 _ = v.Args[1] 14030 x := v.Args[0] 14031 y := v.Args[1] 14032 v.reset(OpARM64GreaterThanU) 14033 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 14034 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 14035 v1.AddArg(x) 14036 v0.AddArg(v1) 14037 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 14038 v2.AddArg(y) 14039 v0.AddArg(v2) 14040 v.AddArg(v0) 14041 return true 14042 } 14043 } 14044 func rewriteValueARM64_OpHmul32_0(v *Value) bool { 14045 b := v.Block 14046 _ = b 14047 typ := &b.Func.Config.Types 14048 _ = typ 14049 // match: (Hmul32 x y) 14050 // cond: 14051 // result: (SRAconst (MULL <typ.Int64> x y) [32]) 14052 for { 14053 _ = v.Args[1] 14054 x := v.Args[0] 14055 y := v.Args[1] 14056 v.reset(OpARM64SRAconst) 14057 v.AuxInt = 32 14058 v0 := b.NewValue0(v.Pos, OpARM64MULL, typ.Int64) 14059 v0.AddArg(x) 14060 v0.AddArg(y) 14061 v.AddArg(v0) 14062 return true 14063 } 14064 } 14065 func rewriteValueARM64_OpHmul32u_0(v *Value) bool { 14066 b := v.Block 14067 _ = b 14068 typ := &b.Func.Config.Types 14069 _ = typ 14070 // match: (Hmul32u x y) 14071 // cond: 14072 // result: (SRAconst (UMULL <typ.UInt64> x y) [32]) 14073 for { 14074 _ = v.Args[1] 14075 x := v.Args[0] 14076 y := v.Args[1] 14077 v.reset(OpARM64SRAconst) 14078 v.AuxInt = 32 14079 v0 := b.NewValue0(v.Pos, OpARM64UMULL, typ.UInt64) 14080 v0.AddArg(x) 14081 v0.AddArg(y) 14082 v.AddArg(v0) 14083 return true 14084 } 14085 } 14086 func rewriteValueARM64_OpHmul64_0(v *Value) bool { 14087 // match: (Hmul64 x y) 14088 // cond: 14089 // result: (MULH x y) 14090 for { 14091 _ = v.Args[1] 14092 x := v.Args[0] 14093 y := v.Args[1] 14094 v.reset(OpARM64MULH) 14095 v.AddArg(x) 14096 v.AddArg(y) 14097 return true 14098 } 14099 } 14100 func rewriteValueARM64_OpHmul64u_0(v *Value) bool { 14101 // match: (Hmul64u x y) 14102 // cond: 14103 // result: (UMULH x y) 14104 for { 14105 _ = v.Args[1] 14106 x := v.Args[0] 14107 y := v.Args[1] 14108 v.reset(OpARM64UMULH) 14109 v.AddArg(x) 14110 v.AddArg(y) 14111 return true 14112 } 14113 } 14114 func rewriteValueARM64_OpInterCall_0(v *Value) bool { 14115 // match: (InterCall [argwid] entry mem) 14116 // cond: 14117 // result: (CALLinter [argwid] entry mem) 14118 for { 14119 argwid := v.AuxInt 14120 _ = v.Args[1] 14121 entry := v.Args[0] 14122 mem := v.Args[1] 14123 v.reset(OpARM64CALLinter) 14124 v.AuxInt = argwid 14125 v.AddArg(entry) 14126 v.AddArg(mem) 14127 return true 14128 } 14129 } 14130 func rewriteValueARM64_OpIsInBounds_0(v *Value) bool { 14131 b := v.Block 14132 _ = b 14133 // match: (IsInBounds idx len) 14134 // cond: 14135 // result: (LessThanU (CMP idx len)) 14136 for { 14137 _ = v.Args[1] 14138 idx := v.Args[0] 14139 len := v.Args[1] 14140 v.reset(OpARM64LessThanU) 14141 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 14142 v0.AddArg(idx) 14143 v0.AddArg(len) 14144 v.AddArg(v0) 14145 return true 14146 } 14147 } 14148 func rewriteValueARM64_OpIsNonNil_0(v *Value) bool { 14149 b := v.Block 14150 _ = b 14151 // match: (IsNonNil ptr) 14152 // cond: 14153 // result: (NotEqual (CMPconst [0] ptr)) 14154 for { 14155 ptr := v.Args[0] 14156 v.reset(OpARM64NotEqual) 14157 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14158 v0.AuxInt = 0 14159 v0.AddArg(ptr) 14160 v.AddArg(v0) 14161 return true 14162 } 14163 } 14164 func rewriteValueARM64_OpIsSliceInBounds_0(v *Value) bool { 14165 b := v.Block 14166 _ = b 14167 // match: (IsSliceInBounds idx len) 14168 // cond: 14169 // result: (LessEqualU (CMP idx len)) 14170 for { 14171 _ = v.Args[1] 14172 idx := v.Args[0] 14173 len := v.Args[1] 14174 v.reset(OpARM64LessEqualU) 14175 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 14176 v0.AddArg(idx) 14177 v0.AddArg(len) 14178 v.AddArg(v0) 14179 return true 14180 } 14181 } 14182 func rewriteValueARM64_OpLeq16_0(v *Value) bool { 14183 b := v.Block 14184 _ = b 14185 typ := &b.Func.Config.Types 14186 _ = typ 14187 // match: (Leq16 x y) 14188 // cond: 14189 // result: (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) 14190 for { 14191 _ = v.Args[1] 14192 x := v.Args[0] 14193 y := v.Args[1] 14194 v.reset(OpARM64LessEqual) 14195 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 14196 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 14197 v1.AddArg(x) 14198 v0.AddArg(v1) 14199 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 14200 v2.AddArg(y) 14201 v0.AddArg(v2) 14202 v.AddArg(v0) 14203 return true 14204 } 14205 } 14206 func rewriteValueARM64_OpLeq16U_0(v *Value) bool { 14207 b := v.Block 14208 _ = b 14209 typ := &b.Func.Config.Types 14210 _ = typ 14211 // match: (Leq16U x y) 14212 // cond: 14213 // result: (LessEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 14214 for { 14215 _ = v.Args[1] 14216 x := v.Args[0] 14217 y := v.Args[1] 14218 v.reset(OpARM64LessEqualU) 14219 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 14220 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 14221 v1.AddArg(x) 14222 v0.AddArg(v1) 14223 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 14224 v2.AddArg(y) 14225 v0.AddArg(v2) 14226 v.AddArg(v0) 14227 return true 14228 } 14229 } 14230 func rewriteValueARM64_OpLeq32_0(v *Value) bool { 14231 b := v.Block 14232 _ = b 14233 // match: (Leq32 x y) 14234 // cond: 14235 // result: (LessEqual (CMPW x y)) 14236 for { 14237 _ = v.Args[1] 14238 x := v.Args[0] 14239 y := v.Args[1] 14240 v.reset(OpARM64LessEqual) 14241 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 14242 v0.AddArg(x) 14243 v0.AddArg(y) 14244 v.AddArg(v0) 14245 return true 14246 } 14247 } 14248 func rewriteValueARM64_OpLeq32F_0(v *Value) bool { 14249 b := v.Block 14250 _ = b 14251 // match: (Leq32F x y) 14252 // cond: 14253 // result: (GreaterEqual (FCMPS y x)) 14254 for { 14255 _ = v.Args[1] 14256 x := v.Args[0] 14257 y := v.Args[1] 14258 v.reset(OpARM64GreaterEqual) 14259 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 14260 v0.AddArg(y) 14261 v0.AddArg(x) 14262 v.AddArg(v0) 14263 return true 14264 } 14265 } 14266 func rewriteValueARM64_OpLeq32U_0(v *Value) bool { 14267 b := v.Block 14268 _ = b 14269 // match: (Leq32U x y) 14270 // cond: 14271 // result: (LessEqualU (CMPW x y)) 14272 for { 14273 _ = v.Args[1] 14274 x := v.Args[0] 14275 y := v.Args[1] 14276 v.reset(OpARM64LessEqualU) 14277 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 14278 v0.AddArg(x) 14279 v0.AddArg(y) 14280 v.AddArg(v0) 14281 return true 14282 } 14283 } 14284 func rewriteValueARM64_OpLeq64_0(v *Value) bool { 14285 b := v.Block 14286 _ = b 14287 // match: (Leq64 x y) 14288 // cond: 14289 // result: (LessEqual (CMP x y)) 14290 for { 14291 _ = v.Args[1] 14292 x := v.Args[0] 14293 y := v.Args[1] 14294 v.reset(OpARM64LessEqual) 14295 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 14296 v0.AddArg(x) 14297 v0.AddArg(y) 14298 v.AddArg(v0) 14299 return true 14300 } 14301 } 14302 func rewriteValueARM64_OpLeq64F_0(v *Value) bool { 14303 b := v.Block 14304 _ = b 14305 // match: (Leq64F x y) 14306 // cond: 14307 // result: (GreaterEqual (FCMPD y x)) 14308 for { 14309 _ = v.Args[1] 14310 x := v.Args[0] 14311 y := v.Args[1] 14312 v.reset(OpARM64GreaterEqual) 14313 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 14314 v0.AddArg(y) 14315 v0.AddArg(x) 14316 v.AddArg(v0) 14317 return true 14318 } 14319 } 14320 func rewriteValueARM64_OpLeq64U_0(v *Value) bool { 14321 b := v.Block 14322 _ = b 14323 // match: (Leq64U x y) 14324 // cond: 14325 // result: (LessEqualU (CMP x y)) 14326 for { 14327 _ = v.Args[1] 14328 x := v.Args[0] 14329 y := v.Args[1] 14330 v.reset(OpARM64LessEqualU) 14331 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 14332 v0.AddArg(x) 14333 v0.AddArg(y) 14334 v.AddArg(v0) 14335 return true 14336 } 14337 } 14338 func rewriteValueARM64_OpLeq8_0(v *Value) bool { 14339 b := v.Block 14340 _ = b 14341 typ := &b.Func.Config.Types 14342 _ = typ 14343 // match: (Leq8 x y) 14344 // cond: 14345 // result: (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) 14346 for { 14347 _ = v.Args[1] 14348 x := v.Args[0] 14349 y := v.Args[1] 14350 v.reset(OpARM64LessEqual) 14351 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 14352 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 14353 v1.AddArg(x) 14354 v0.AddArg(v1) 14355 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 14356 v2.AddArg(y) 14357 v0.AddArg(v2) 14358 v.AddArg(v0) 14359 return true 14360 } 14361 } 14362 func rewriteValueARM64_OpLeq8U_0(v *Value) bool { 14363 b := v.Block 14364 _ = b 14365 typ := &b.Func.Config.Types 14366 _ = typ 14367 // match: (Leq8U x y) 14368 // cond: 14369 // result: (LessEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 14370 for { 14371 _ = v.Args[1] 14372 x := v.Args[0] 14373 y := v.Args[1] 14374 v.reset(OpARM64LessEqualU) 14375 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 14376 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 14377 v1.AddArg(x) 14378 v0.AddArg(v1) 14379 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 14380 v2.AddArg(y) 14381 v0.AddArg(v2) 14382 v.AddArg(v0) 14383 return true 14384 } 14385 } 14386 func rewriteValueARM64_OpLess16_0(v *Value) bool { 14387 b := v.Block 14388 _ = b 14389 typ := &b.Func.Config.Types 14390 _ = typ 14391 // match: (Less16 x y) 14392 // cond: 14393 // result: (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y))) 14394 for { 14395 _ = v.Args[1] 14396 x := v.Args[0] 14397 y := v.Args[1] 14398 v.reset(OpARM64LessThan) 14399 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 14400 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 14401 v1.AddArg(x) 14402 v0.AddArg(v1) 14403 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 14404 v2.AddArg(y) 14405 v0.AddArg(v2) 14406 v.AddArg(v0) 14407 return true 14408 } 14409 } 14410 func rewriteValueARM64_OpLess16U_0(v *Value) bool { 14411 b := v.Block 14412 _ = b 14413 typ := &b.Func.Config.Types 14414 _ = typ 14415 // match: (Less16U x y) 14416 // cond: 14417 // result: (LessThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 14418 for { 14419 _ = v.Args[1] 14420 x := v.Args[0] 14421 y := v.Args[1] 14422 v.reset(OpARM64LessThanU) 14423 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 14424 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 14425 v1.AddArg(x) 14426 v0.AddArg(v1) 14427 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 14428 v2.AddArg(y) 14429 v0.AddArg(v2) 14430 v.AddArg(v0) 14431 return true 14432 } 14433 } 14434 func rewriteValueARM64_OpLess32_0(v *Value) bool { 14435 b := v.Block 14436 _ = b 14437 // match: (Less32 x y) 14438 // cond: 14439 // result: (LessThan (CMPW x y)) 14440 for { 14441 _ = v.Args[1] 14442 x := v.Args[0] 14443 y := v.Args[1] 14444 v.reset(OpARM64LessThan) 14445 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 14446 v0.AddArg(x) 14447 v0.AddArg(y) 14448 v.AddArg(v0) 14449 return true 14450 } 14451 } 14452 func rewriteValueARM64_OpLess32F_0(v *Value) bool { 14453 b := v.Block 14454 _ = b 14455 // match: (Less32F x y) 14456 // cond: 14457 // result: (GreaterThan (FCMPS y x)) 14458 for { 14459 _ = v.Args[1] 14460 x := v.Args[0] 14461 y := v.Args[1] 14462 v.reset(OpARM64GreaterThan) 14463 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 14464 v0.AddArg(y) 14465 v0.AddArg(x) 14466 v.AddArg(v0) 14467 return true 14468 } 14469 } 14470 func rewriteValueARM64_OpLess32U_0(v *Value) bool { 14471 b := v.Block 14472 _ = b 14473 // match: (Less32U x y) 14474 // cond: 14475 // result: (LessThanU (CMPW x y)) 14476 for { 14477 _ = v.Args[1] 14478 x := v.Args[0] 14479 y := v.Args[1] 14480 v.reset(OpARM64LessThanU) 14481 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 14482 v0.AddArg(x) 14483 v0.AddArg(y) 14484 v.AddArg(v0) 14485 return true 14486 } 14487 } 14488 func rewriteValueARM64_OpLess64_0(v *Value) bool { 14489 b := v.Block 14490 _ = b 14491 // match: (Less64 x y) 14492 // cond: 14493 // result: (LessThan (CMP x y)) 14494 for { 14495 _ = v.Args[1] 14496 x := v.Args[0] 14497 y := v.Args[1] 14498 v.reset(OpARM64LessThan) 14499 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 14500 v0.AddArg(x) 14501 v0.AddArg(y) 14502 v.AddArg(v0) 14503 return true 14504 } 14505 } 14506 func rewriteValueARM64_OpLess64F_0(v *Value) bool { 14507 b := v.Block 14508 _ = b 14509 // match: (Less64F x y) 14510 // cond: 14511 // result: (GreaterThan (FCMPD y x)) 14512 for { 14513 _ = v.Args[1] 14514 x := v.Args[0] 14515 y := v.Args[1] 14516 v.reset(OpARM64GreaterThan) 14517 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 14518 v0.AddArg(y) 14519 v0.AddArg(x) 14520 v.AddArg(v0) 14521 return true 14522 } 14523 } 14524 func rewriteValueARM64_OpLess64U_0(v *Value) bool { 14525 b := v.Block 14526 _ = b 14527 // match: (Less64U x y) 14528 // cond: 14529 // result: (LessThanU (CMP x y)) 14530 for { 14531 _ = v.Args[1] 14532 x := v.Args[0] 14533 y := v.Args[1] 14534 v.reset(OpARM64LessThanU) 14535 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 14536 v0.AddArg(x) 14537 v0.AddArg(y) 14538 v.AddArg(v0) 14539 return true 14540 } 14541 } 14542 func rewriteValueARM64_OpLess8_0(v *Value) bool { 14543 b := v.Block 14544 _ = b 14545 typ := &b.Func.Config.Types 14546 _ = typ 14547 // match: (Less8 x y) 14548 // cond: 14549 // result: (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y))) 14550 for { 14551 _ = v.Args[1] 14552 x := v.Args[0] 14553 y := v.Args[1] 14554 v.reset(OpARM64LessThan) 14555 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 14556 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 14557 v1.AddArg(x) 14558 v0.AddArg(v1) 14559 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 14560 v2.AddArg(y) 14561 v0.AddArg(v2) 14562 v.AddArg(v0) 14563 return true 14564 } 14565 } 14566 func rewriteValueARM64_OpLess8U_0(v *Value) bool { 14567 b := v.Block 14568 _ = b 14569 typ := &b.Func.Config.Types 14570 _ = typ 14571 // match: (Less8U x y) 14572 // cond: 14573 // result: (LessThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 14574 for { 14575 _ = v.Args[1] 14576 x := v.Args[0] 14577 y := v.Args[1] 14578 v.reset(OpARM64LessThanU) 14579 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 14580 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 14581 v1.AddArg(x) 14582 v0.AddArg(v1) 14583 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 14584 v2.AddArg(y) 14585 v0.AddArg(v2) 14586 v.AddArg(v0) 14587 return true 14588 } 14589 } 14590 func rewriteValueARM64_OpLoad_0(v *Value) bool { 14591 // match: (Load <t> ptr mem) 14592 // cond: t.IsBoolean() 14593 // result: (MOVBUload ptr mem) 14594 for { 14595 t := v.Type 14596 _ = v.Args[1] 14597 ptr := v.Args[0] 14598 mem := v.Args[1] 14599 if !(t.IsBoolean()) { 14600 break 14601 } 14602 v.reset(OpARM64MOVBUload) 14603 v.AddArg(ptr) 14604 v.AddArg(mem) 14605 return true 14606 } 14607 // match: (Load <t> ptr mem) 14608 // cond: (is8BitInt(t) && isSigned(t)) 14609 // result: (MOVBload ptr mem) 14610 for { 14611 t := v.Type 14612 _ = v.Args[1] 14613 ptr := v.Args[0] 14614 mem := v.Args[1] 14615 if !(is8BitInt(t) && isSigned(t)) { 14616 break 14617 } 14618 v.reset(OpARM64MOVBload) 14619 v.AddArg(ptr) 14620 v.AddArg(mem) 14621 return true 14622 } 14623 // match: (Load <t> ptr mem) 14624 // cond: (is8BitInt(t) && !isSigned(t)) 14625 // result: (MOVBUload ptr mem) 14626 for { 14627 t := v.Type 14628 _ = v.Args[1] 14629 ptr := v.Args[0] 14630 mem := v.Args[1] 14631 if !(is8BitInt(t) && !isSigned(t)) { 14632 break 14633 } 14634 v.reset(OpARM64MOVBUload) 14635 v.AddArg(ptr) 14636 v.AddArg(mem) 14637 return true 14638 } 14639 // match: (Load <t> ptr mem) 14640 // cond: (is16BitInt(t) && isSigned(t)) 14641 // result: (MOVHload ptr mem) 14642 for { 14643 t := v.Type 14644 _ = v.Args[1] 14645 ptr := v.Args[0] 14646 mem := v.Args[1] 14647 if !(is16BitInt(t) && isSigned(t)) { 14648 break 14649 } 14650 v.reset(OpARM64MOVHload) 14651 v.AddArg(ptr) 14652 v.AddArg(mem) 14653 return true 14654 } 14655 // match: (Load <t> ptr mem) 14656 // cond: (is16BitInt(t) && !isSigned(t)) 14657 // result: (MOVHUload ptr mem) 14658 for { 14659 t := v.Type 14660 _ = v.Args[1] 14661 ptr := v.Args[0] 14662 mem := v.Args[1] 14663 if !(is16BitInt(t) && !isSigned(t)) { 14664 break 14665 } 14666 v.reset(OpARM64MOVHUload) 14667 v.AddArg(ptr) 14668 v.AddArg(mem) 14669 return true 14670 } 14671 // match: (Load <t> ptr mem) 14672 // cond: (is32BitInt(t) && isSigned(t)) 14673 // result: (MOVWload ptr mem) 14674 for { 14675 t := v.Type 14676 _ = v.Args[1] 14677 ptr := v.Args[0] 14678 mem := v.Args[1] 14679 if !(is32BitInt(t) && isSigned(t)) { 14680 break 14681 } 14682 v.reset(OpARM64MOVWload) 14683 v.AddArg(ptr) 14684 v.AddArg(mem) 14685 return true 14686 } 14687 // match: (Load <t> ptr mem) 14688 // cond: (is32BitInt(t) && !isSigned(t)) 14689 // result: (MOVWUload ptr mem) 14690 for { 14691 t := v.Type 14692 _ = v.Args[1] 14693 ptr := v.Args[0] 14694 mem := v.Args[1] 14695 if !(is32BitInt(t) && !isSigned(t)) { 14696 break 14697 } 14698 v.reset(OpARM64MOVWUload) 14699 v.AddArg(ptr) 14700 v.AddArg(mem) 14701 return true 14702 } 14703 // match: (Load <t> ptr mem) 14704 // cond: (is64BitInt(t) || isPtr(t)) 14705 // result: (MOVDload ptr mem) 14706 for { 14707 t := v.Type 14708 _ = v.Args[1] 14709 ptr := v.Args[0] 14710 mem := v.Args[1] 14711 if !(is64BitInt(t) || isPtr(t)) { 14712 break 14713 } 14714 v.reset(OpARM64MOVDload) 14715 v.AddArg(ptr) 14716 v.AddArg(mem) 14717 return true 14718 } 14719 // match: (Load <t> ptr mem) 14720 // cond: is32BitFloat(t) 14721 // result: (FMOVSload ptr mem) 14722 for { 14723 t := v.Type 14724 _ = v.Args[1] 14725 ptr := v.Args[0] 14726 mem := v.Args[1] 14727 if !(is32BitFloat(t)) { 14728 break 14729 } 14730 v.reset(OpARM64FMOVSload) 14731 v.AddArg(ptr) 14732 v.AddArg(mem) 14733 return true 14734 } 14735 // match: (Load <t> ptr mem) 14736 // cond: is64BitFloat(t) 14737 // result: (FMOVDload ptr mem) 14738 for { 14739 t := v.Type 14740 _ = v.Args[1] 14741 ptr := v.Args[0] 14742 mem := v.Args[1] 14743 if !(is64BitFloat(t)) { 14744 break 14745 } 14746 v.reset(OpARM64FMOVDload) 14747 v.AddArg(ptr) 14748 v.AddArg(mem) 14749 return true 14750 } 14751 return false 14752 } 14753 func rewriteValueARM64_OpLsh16x16_0(v *Value) bool { 14754 b := v.Block 14755 _ = b 14756 typ := &b.Func.Config.Types 14757 _ = typ 14758 // match: (Lsh16x16 <t> x y) 14759 // cond: 14760 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 14761 for { 14762 t := v.Type 14763 _ = v.Args[1] 14764 x := v.Args[0] 14765 y := v.Args[1] 14766 v.reset(OpARM64CSEL) 14767 v.Aux = OpARM64LessThanU 14768 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 14769 v0.AddArg(x) 14770 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14771 v1.AddArg(y) 14772 v0.AddArg(v1) 14773 v.AddArg(v0) 14774 v2 := b.NewValue0(v.Pos, OpConst64, t) 14775 v2.AuxInt = 0 14776 v.AddArg(v2) 14777 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14778 v3.AuxInt = 64 14779 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14780 v4.AddArg(y) 14781 v3.AddArg(v4) 14782 v.AddArg(v3) 14783 return true 14784 } 14785 } 14786 func rewriteValueARM64_OpLsh16x32_0(v *Value) bool { 14787 b := v.Block 14788 _ = b 14789 typ := &b.Func.Config.Types 14790 _ = typ 14791 // match: (Lsh16x32 <t> x y) 14792 // cond: 14793 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 14794 for { 14795 t := v.Type 14796 _ = v.Args[1] 14797 x := v.Args[0] 14798 y := v.Args[1] 14799 v.reset(OpARM64CSEL) 14800 v.Aux = OpARM64LessThanU 14801 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 14802 v0.AddArg(x) 14803 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14804 v1.AddArg(y) 14805 v0.AddArg(v1) 14806 v.AddArg(v0) 14807 v2 := b.NewValue0(v.Pos, OpConst64, t) 14808 v2.AuxInt = 0 14809 v.AddArg(v2) 14810 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14811 v3.AuxInt = 64 14812 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14813 v4.AddArg(y) 14814 v3.AddArg(v4) 14815 v.AddArg(v3) 14816 return true 14817 } 14818 } 14819 func rewriteValueARM64_OpLsh16x64_0(v *Value) bool { 14820 b := v.Block 14821 _ = b 14822 // match: (Lsh16x64 <t> x y) 14823 // cond: 14824 // result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 14825 for { 14826 t := v.Type 14827 _ = v.Args[1] 14828 x := v.Args[0] 14829 y := v.Args[1] 14830 v.reset(OpARM64CSEL) 14831 v.Aux = OpARM64LessThanU 14832 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 14833 v0.AddArg(x) 14834 v0.AddArg(y) 14835 v.AddArg(v0) 14836 v1 := b.NewValue0(v.Pos, OpConst64, t) 14837 v1.AuxInt = 0 14838 v.AddArg(v1) 14839 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14840 v2.AuxInt = 64 14841 v2.AddArg(y) 14842 v.AddArg(v2) 14843 return true 14844 } 14845 } 14846 func rewriteValueARM64_OpLsh16x8_0(v *Value) bool { 14847 b := v.Block 14848 _ = b 14849 typ := &b.Func.Config.Types 14850 _ = typ 14851 // match: (Lsh16x8 <t> x y) 14852 // cond: 14853 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 14854 for { 14855 t := v.Type 14856 _ = v.Args[1] 14857 x := v.Args[0] 14858 y := v.Args[1] 14859 v.reset(OpARM64CSEL) 14860 v.Aux = OpARM64LessThanU 14861 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 14862 v0.AddArg(x) 14863 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14864 v1.AddArg(y) 14865 v0.AddArg(v1) 14866 v.AddArg(v0) 14867 v2 := b.NewValue0(v.Pos, OpConst64, t) 14868 v2.AuxInt = 0 14869 v.AddArg(v2) 14870 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14871 v3.AuxInt = 64 14872 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14873 v4.AddArg(y) 14874 v3.AddArg(v4) 14875 v.AddArg(v3) 14876 return true 14877 } 14878 } 14879 func rewriteValueARM64_OpLsh32x16_0(v *Value) bool { 14880 b := v.Block 14881 _ = b 14882 typ := &b.Func.Config.Types 14883 _ = typ 14884 // match: (Lsh32x16 <t> x y) 14885 // cond: 14886 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 14887 for { 14888 t := v.Type 14889 _ = v.Args[1] 14890 x := v.Args[0] 14891 y := v.Args[1] 14892 v.reset(OpARM64CSEL) 14893 v.Aux = OpARM64LessThanU 14894 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 14895 v0.AddArg(x) 14896 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14897 v1.AddArg(y) 14898 v0.AddArg(v1) 14899 v.AddArg(v0) 14900 v2 := b.NewValue0(v.Pos, OpConst64, t) 14901 v2.AuxInt = 0 14902 v.AddArg(v2) 14903 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14904 v3.AuxInt = 64 14905 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14906 v4.AddArg(y) 14907 v3.AddArg(v4) 14908 v.AddArg(v3) 14909 return true 14910 } 14911 } 14912 func rewriteValueARM64_OpLsh32x32_0(v *Value) bool { 14913 b := v.Block 14914 _ = b 14915 typ := &b.Func.Config.Types 14916 _ = typ 14917 // match: (Lsh32x32 <t> x y) 14918 // cond: 14919 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 14920 for { 14921 t := v.Type 14922 _ = v.Args[1] 14923 x := v.Args[0] 14924 y := v.Args[1] 14925 v.reset(OpARM64CSEL) 14926 v.Aux = OpARM64LessThanU 14927 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 14928 v0.AddArg(x) 14929 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14930 v1.AddArg(y) 14931 v0.AddArg(v1) 14932 v.AddArg(v0) 14933 v2 := b.NewValue0(v.Pos, OpConst64, t) 14934 v2.AuxInt = 0 14935 v.AddArg(v2) 14936 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14937 v3.AuxInt = 64 14938 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14939 v4.AddArg(y) 14940 v3.AddArg(v4) 14941 v.AddArg(v3) 14942 return true 14943 } 14944 } 14945 func rewriteValueARM64_OpLsh32x64_0(v *Value) bool { 14946 b := v.Block 14947 _ = b 14948 // match: (Lsh32x64 <t> x y) 14949 // cond: 14950 // result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 14951 for { 14952 t := v.Type 14953 _ = v.Args[1] 14954 x := v.Args[0] 14955 y := v.Args[1] 14956 v.reset(OpARM64CSEL) 14957 v.Aux = OpARM64LessThanU 14958 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 14959 v0.AddArg(x) 14960 v0.AddArg(y) 14961 v.AddArg(v0) 14962 v1 := b.NewValue0(v.Pos, OpConst64, t) 14963 v1.AuxInt = 0 14964 v.AddArg(v1) 14965 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14966 v2.AuxInt = 64 14967 v2.AddArg(y) 14968 v.AddArg(v2) 14969 return true 14970 } 14971 } 14972 func rewriteValueARM64_OpLsh32x8_0(v *Value) bool { 14973 b := v.Block 14974 _ = b 14975 typ := &b.Func.Config.Types 14976 _ = typ 14977 // match: (Lsh32x8 <t> x y) 14978 // cond: 14979 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 14980 for { 14981 t := v.Type 14982 _ = v.Args[1] 14983 x := v.Args[0] 14984 y := v.Args[1] 14985 v.reset(OpARM64CSEL) 14986 v.Aux = OpARM64LessThanU 14987 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 14988 v0.AddArg(x) 14989 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14990 v1.AddArg(y) 14991 v0.AddArg(v1) 14992 v.AddArg(v0) 14993 v2 := b.NewValue0(v.Pos, OpConst64, t) 14994 v2.AuxInt = 0 14995 v.AddArg(v2) 14996 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14997 v3.AuxInt = 64 14998 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14999 v4.AddArg(y) 15000 v3.AddArg(v4) 15001 v.AddArg(v3) 15002 return true 15003 } 15004 } 15005 func rewriteValueARM64_OpLsh64x16_0(v *Value) bool { 15006 b := v.Block 15007 _ = b 15008 typ := &b.Func.Config.Types 15009 _ = typ 15010 // match: (Lsh64x16 <t> x y) 15011 // cond: 15012 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 15013 for { 15014 t := v.Type 15015 _ = v.Args[1] 15016 x := v.Args[0] 15017 y := v.Args[1] 15018 v.reset(OpARM64CSEL) 15019 v.Aux = OpARM64LessThanU 15020 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 15021 v0.AddArg(x) 15022 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 15023 v1.AddArg(y) 15024 v0.AddArg(v1) 15025 v.AddArg(v0) 15026 v2 := b.NewValue0(v.Pos, OpConst64, t) 15027 v2.AuxInt = 0 15028 v.AddArg(v2) 15029 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15030 v3.AuxInt = 64 15031 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 15032 v4.AddArg(y) 15033 v3.AddArg(v4) 15034 v.AddArg(v3) 15035 return true 15036 } 15037 } 15038 func rewriteValueARM64_OpLsh64x32_0(v *Value) bool { 15039 b := v.Block 15040 _ = b 15041 typ := &b.Func.Config.Types 15042 _ = typ 15043 // match: (Lsh64x32 <t> x y) 15044 // cond: 15045 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 15046 for { 15047 t := v.Type 15048 _ = v.Args[1] 15049 x := v.Args[0] 15050 y := v.Args[1] 15051 v.reset(OpARM64CSEL) 15052 v.Aux = OpARM64LessThanU 15053 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 15054 v0.AddArg(x) 15055 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 15056 v1.AddArg(y) 15057 v0.AddArg(v1) 15058 v.AddArg(v0) 15059 v2 := b.NewValue0(v.Pos, OpConst64, t) 15060 v2.AuxInt = 0 15061 v.AddArg(v2) 15062 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15063 v3.AuxInt = 64 15064 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 15065 v4.AddArg(y) 15066 v3.AddArg(v4) 15067 v.AddArg(v3) 15068 return true 15069 } 15070 } 15071 func rewriteValueARM64_OpLsh64x64_0(v *Value) bool { 15072 b := v.Block 15073 _ = b 15074 // match: (Lsh64x64 <t> x y) 15075 // cond: 15076 // result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 15077 for { 15078 t := v.Type 15079 _ = v.Args[1] 15080 x := v.Args[0] 15081 y := v.Args[1] 15082 v.reset(OpARM64CSEL) 15083 v.Aux = OpARM64LessThanU 15084 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 15085 v0.AddArg(x) 15086 v0.AddArg(y) 15087 v.AddArg(v0) 15088 v1 := b.NewValue0(v.Pos, OpConst64, t) 15089 v1.AuxInt = 0 15090 v.AddArg(v1) 15091 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15092 v2.AuxInt = 64 15093 v2.AddArg(y) 15094 v.AddArg(v2) 15095 return true 15096 } 15097 } 15098 func rewriteValueARM64_OpLsh64x8_0(v *Value) bool { 15099 b := v.Block 15100 _ = b 15101 typ := &b.Func.Config.Types 15102 _ = typ 15103 // match: (Lsh64x8 <t> x y) 15104 // cond: 15105 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 15106 for { 15107 t := v.Type 15108 _ = v.Args[1] 15109 x := v.Args[0] 15110 y := v.Args[1] 15111 v.reset(OpARM64CSEL) 15112 v.Aux = OpARM64LessThanU 15113 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 15114 v0.AddArg(x) 15115 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15116 v1.AddArg(y) 15117 v0.AddArg(v1) 15118 v.AddArg(v0) 15119 v2 := b.NewValue0(v.Pos, OpConst64, t) 15120 v2.AuxInt = 0 15121 v.AddArg(v2) 15122 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15123 v3.AuxInt = 64 15124 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15125 v4.AddArg(y) 15126 v3.AddArg(v4) 15127 v.AddArg(v3) 15128 return true 15129 } 15130 } 15131 func rewriteValueARM64_OpLsh8x16_0(v *Value) bool { 15132 b := v.Block 15133 _ = b 15134 typ := &b.Func.Config.Types 15135 _ = typ 15136 // match: (Lsh8x16 <t> x y) 15137 // cond: 15138 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 15139 for { 15140 t := v.Type 15141 _ = v.Args[1] 15142 x := v.Args[0] 15143 y := v.Args[1] 15144 v.reset(OpARM64CSEL) 15145 v.Aux = OpARM64LessThanU 15146 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 15147 v0.AddArg(x) 15148 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 15149 v1.AddArg(y) 15150 v0.AddArg(v1) 15151 v.AddArg(v0) 15152 v2 := b.NewValue0(v.Pos, OpConst64, t) 15153 v2.AuxInt = 0 15154 v.AddArg(v2) 15155 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15156 v3.AuxInt = 64 15157 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 15158 v4.AddArg(y) 15159 v3.AddArg(v4) 15160 v.AddArg(v3) 15161 return true 15162 } 15163 } 15164 func rewriteValueARM64_OpLsh8x32_0(v *Value) bool { 15165 b := v.Block 15166 _ = b 15167 typ := &b.Func.Config.Types 15168 _ = typ 15169 // match: (Lsh8x32 <t> x y) 15170 // cond: 15171 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 15172 for { 15173 t := v.Type 15174 _ = v.Args[1] 15175 x := v.Args[0] 15176 y := v.Args[1] 15177 v.reset(OpARM64CSEL) 15178 v.Aux = OpARM64LessThanU 15179 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 15180 v0.AddArg(x) 15181 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 15182 v1.AddArg(y) 15183 v0.AddArg(v1) 15184 v.AddArg(v0) 15185 v2 := b.NewValue0(v.Pos, OpConst64, t) 15186 v2.AuxInt = 0 15187 v.AddArg(v2) 15188 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15189 v3.AuxInt = 64 15190 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 15191 v4.AddArg(y) 15192 v3.AddArg(v4) 15193 v.AddArg(v3) 15194 return true 15195 } 15196 } 15197 func rewriteValueARM64_OpLsh8x64_0(v *Value) bool { 15198 b := v.Block 15199 _ = b 15200 // match: (Lsh8x64 <t> x y) 15201 // cond: 15202 // result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 15203 for { 15204 t := v.Type 15205 _ = v.Args[1] 15206 x := v.Args[0] 15207 y := v.Args[1] 15208 v.reset(OpARM64CSEL) 15209 v.Aux = OpARM64LessThanU 15210 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 15211 v0.AddArg(x) 15212 v0.AddArg(y) 15213 v.AddArg(v0) 15214 v1 := b.NewValue0(v.Pos, OpConst64, t) 15215 v1.AuxInt = 0 15216 v.AddArg(v1) 15217 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15218 v2.AuxInt = 64 15219 v2.AddArg(y) 15220 v.AddArg(v2) 15221 return true 15222 } 15223 } 15224 func rewriteValueARM64_OpLsh8x8_0(v *Value) bool { 15225 b := v.Block 15226 _ = b 15227 typ := &b.Func.Config.Types 15228 _ = typ 15229 // match: (Lsh8x8 <t> x y) 15230 // cond: 15231 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 15232 for { 15233 t := v.Type 15234 _ = v.Args[1] 15235 x := v.Args[0] 15236 y := v.Args[1] 15237 v.reset(OpARM64CSEL) 15238 v.Aux = OpARM64LessThanU 15239 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 15240 v0.AddArg(x) 15241 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15242 v1.AddArg(y) 15243 v0.AddArg(v1) 15244 v.AddArg(v0) 15245 v2 := b.NewValue0(v.Pos, OpConst64, t) 15246 v2.AuxInt = 0 15247 v.AddArg(v2) 15248 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15249 v3.AuxInt = 64 15250 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15251 v4.AddArg(y) 15252 v3.AddArg(v4) 15253 v.AddArg(v3) 15254 return true 15255 } 15256 } 15257 func rewriteValueARM64_OpMod16_0(v *Value) bool { 15258 b := v.Block 15259 _ = b 15260 typ := &b.Func.Config.Types 15261 _ = typ 15262 // match: (Mod16 x y) 15263 // cond: 15264 // result: (MODW (SignExt16to32 x) (SignExt16to32 y)) 15265 for { 15266 _ = v.Args[1] 15267 x := v.Args[0] 15268 y := v.Args[1] 15269 v.reset(OpARM64MODW) 15270 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 15271 v0.AddArg(x) 15272 v.AddArg(v0) 15273 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 15274 v1.AddArg(y) 15275 v.AddArg(v1) 15276 return true 15277 } 15278 } 15279 func rewriteValueARM64_OpMod16u_0(v *Value) bool { 15280 b := v.Block 15281 _ = b 15282 typ := &b.Func.Config.Types 15283 _ = typ 15284 // match: (Mod16u x y) 15285 // cond: 15286 // result: (UMODW (ZeroExt16to32 x) (ZeroExt16to32 y)) 15287 for { 15288 _ = v.Args[1] 15289 x := v.Args[0] 15290 y := v.Args[1] 15291 v.reset(OpARM64UMODW) 15292 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 15293 v0.AddArg(x) 15294 v.AddArg(v0) 15295 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 15296 v1.AddArg(y) 15297 v.AddArg(v1) 15298 return true 15299 } 15300 } 15301 func rewriteValueARM64_OpMod32_0(v *Value) bool { 15302 // match: (Mod32 x y) 15303 // cond: 15304 // result: (MODW x y) 15305 for { 15306 _ = v.Args[1] 15307 x := v.Args[0] 15308 y := v.Args[1] 15309 v.reset(OpARM64MODW) 15310 v.AddArg(x) 15311 v.AddArg(y) 15312 return true 15313 } 15314 } 15315 func rewriteValueARM64_OpMod32u_0(v *Value) bool { 15316 // match: (Mod32u x y) 15317 // cond: 15318 // result: (UMODW x y) 15319 for { 15320 _ = v.Args[1] 15321 x := v.Args[0] 15322 y := v.Args[1] 15323 v.reset(OpARM64UMODW) 15324 v.AddArg(x) 15325 v.AddArg(y) 15326 return true 15327 } 15328 } 15329 func rewriteValueARM64_OpMod64_0(v *Value) bool { 15330 // match: (Mod64 x y) 15331 // cond: 15332 // result: (MOD x y) 15333 for { 15334 _ = v.Args[1] 15335 x := v.Args[0] 15336 y := v.Args[1] 15337 v.reset(OpARM64MOD) 15338 v.AddArg(x) 15339 v.AddArg(y) 15340 return true 15341 } 15342 } 15343 func rewriteValueARM64_OpMod64u_0(v *Value) bool { 15344 // match: (Mod64u x y) 15345 // cond: 15346 // result: (UMOD x y) 15347 for { 15348 _ = v.Args[1] 15349 x := v.Args[0] 15350 y := v.Args[1] 15351 v.reset(OpARM64UMOD) 15352 v.AddArg(x) 15353 v.AddArg(y) 15354 return true 15355 } 15356 } 15357 func rewriteValueARM64_OpMod8_0(v *Value) bool { 15358 b := v.Block 15359 _ = b 15360 typ := &b.Func.Config.Types 15361 _ = typ 15362 // match: (Mod8 x y) 15363 // cond: 15364 // result: (MODW (SignExt8to32 x) (SignExt8to32 y)) 15365 for { 15366 _ = v.Args[1] 15367 x := v.Args[0] 15368 y := v.Args[1] 15369 v.reset(OpARM64MODW) 15370 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 15371 v0.AddArg(x) 15372 v.AddArg(v0) 15373 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 15374 v1.AddArg(y) 15375 v.AddArg(v1) 15376 return true 15377 } 15378 } 15379 func rewriteValueARM64_OpMod8u_0(v *Value) bool { 15380 b := v.Block 15381 _ = b 15382 typ := &b.Func.Config.Types 15383 _ = typ 15384 // match: (Mod8u x y) 15385 // cond: 15386 // result: (UMODW (ZeroExt8to32 x) (ZeroExt8to32 y)) 15387 for { 15388 _ = v.Args[1] 15389 x := v.Args[0] 15390 y := v.Args[1] 15391 v.reset(OpARM64UMODW) 15392 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 15393 v0.AddArg(x) 15394 v.AddArg(v0) 15395 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 15396 v1.AddArg(y) 15397 v.AddArg(v1) 15398 return true 15399 } 15400 } 15401 func rewriteValueARM64_OpMove_0(v *Value) bool { 15402 b := v.Block 15403 _ = b 15404 typ := &b.Func.Config.Types 15405 _ = typ 15406 // match: (Move [0] _ _ mem) 15407 // cond: 15408 // result: mem 15409 for { 15410 if v.AuxInt != 0 { 15411 break 15412 } 15413 _ = v.Args[2] 15414 mem := v.Args[2] 15415 v.reset(OpCopy) 15416 v.Type = mem.Type 15417 v.AddArg(mem) 15418 return true 15419 } 15420 // match: (Move [1] dst src mem) 15421 // cond: 15422 // result: (MOVBstore dst (MOVBUload src mem) mem) 15423 for { 15424 if v.AuxInt != 1 { 15425 break 15426 } 15427 _ = v.Args[2] 15428 dst := v.Args[0] 15429 src := v.Args[1] 15430 mem := v.Args[2] 15431 v.reset(OpARM64MOVBstore) 15432 v.AddArg(dst) 15433 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 15434 v0.AddArg(src) 15435 v0.AddArg(mem) 15436 v.AddArg(v0) 15437 v.AddArg(mem) 15438 return true 15439 } 15440 // match: (Move [2] dst src mem) 15441 // cond: 15442 // result: (MOVHstore dst (MOVHUload src mem) mem) 15443 for { 15444 if v.AuxInt != 2 { 15445 break 15446 } 15447 _ = v.Args[2] 15448 dst := v.Args[0] 15449 src := v.Args[1] 15450 mem := v.Args[2] 15451 v.reset(OpARM64MOVHstore) 15452 v.AddArg(dst) 15453 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 15454 v0.AddArg(src) 15455 v0.AddArg(mem) 15456 v.AddArg(v0) 15457 v.AddArg(mem) 15458 return true 15459 } 15460 // match: (Move [4] dst src mem) 15461 // cond: 15462 // result: (MOVWstore dst (MOVWUload src mem) mem) 15463 for { 15464 if v.AuxInt != 4 { 15465 break 15466 } 15467 _ = v.Args[2] 15468 dst := v.Args[0] 15469 src := v.Args[1] 15470 mem := v.Args[2] 15471 v.reset(OpARM64MOVWstore) 15472 v.AddArg(dst) 15473 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 15474 v0.AddArg(src) 15475 v0.AddArg(mem) 15476 v.AddArg(v0) 15477 v.AddArg(mem) 15478 return true 15479 } 15480 // match: (Move [8] dst src mem) 15481 // cond: 15482 // result: (MOVDstore dst (MOVDload src mem) mem) 15483 for { 15484 if v.AuxInt != 8 { 15485 break 15486 } 15487 _ = v.Args[2] 15488 dst := v.Args[0] 15489 src := v.Args[1] 15490 mem := v.Args[2] 15491 v.reset(OpARM64MOVDstore) 15492 v.AddArg(dst) 15493 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 15494 v0.AddArg(src) 15495 v0.AddArg(mem) 15496 v.AddArg(v0) 15497 v.AddArg(mem) 15498 return true 15499 } 15500 // match: (Move [3] dst src mem) 15501 // cond: 15502 // result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem)) 15503 for { 15504 if v.AuxInt != 3 { 15505 break 15506 } 15507 _ = v.Args[2] 15508 dst := v.Args[0] 15509 src := v.Args[1] 15510 mem := v.Args[2] 15511 v.reset(OpARM64MOVBstore) 15512 v.AuxInt = 2 15513 v.AddArg(dst) 15514 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 15515 v0.AuxInt = 2 15516 v0.AddArg(src) 15517 v0.AddArg(mem) 15518 v.AddArg(v0) 15519 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 15520 v1.AddArg(dst) 15521 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 15522 v2.AddArg(src) 15523 v2.AddArg(mem) 15524 v1.AddArg(v2) 15525 v1.AddArg(mem) 15526 v.AddArg(v1) 15527 return true 15528 } 15529 // match: (Move [5] dst src mem) 15530 // cond: 15531 // result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 15532 for { 15533 if v.AuxInt != 5 { 15534 break 15535 } 15536 _ = v.Args[2] 15537 dst := v.Args[0] 15538 src := v.Args[1] 15539 mem := v.Args[2] 15540 v.reset(OpARM64MOVBstore) 15541 v.AuxInt = 4 15542 v.AddArg(dst) 15543 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 15544 v0.AuxInt = 4 15545 v0.AddArg(src) 15546 v0.AddArg(mem) 15547 v.AddArg(v0) 15548 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 15549 v1.AddArg(dst) 15550 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 15551 v2.AddArg(src) 15552 v2.AddArg(mem) 15553 v1.AddArg(v2) 15554 v1.AddArg(mem) 15555 v.AddArg(v1) 15556 return true 15557 } 15558 // match: (Move [6] dst src mem) 15559 // cond: 15560 // result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 15561 for { 15562 if v.AuxInt != 6 { 15563 break 15564 } 15565 _ = v.Args[2] 15566 dst := v.Args[0] 15567 src := v.Args[1] 15568 mem := v.Args[2] 15569 v.reset(OpARM64MOVHstore) 15570 v.AuxInt = 4 15571 v.AddArg(dst) 15572 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 15573 v0.AuxInt = 4 15574 v0.AddArg(src) 15575 v0.AddArg(mem) 15576 v.AddArg(v0) 15577 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 15578 v1.AddArg(dst) 15579 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 15580 v2.AddArg(src) 15581 v2.AddArg(mem) 15582 v1.AddArg(v2) 15583 v1.AddArg(mem) 15584 v.AddArg(v1) 15585 return true 15586 } 15587 // match: (Move [7] dst src mem) 15588 // cond: 15589 // result: (MOVBstore [6] dst (MOVBUload [6] src mem) (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))) 15590 for { 15591 if v.AuxInt != 7 { 15592 break 15593 } 15594 _ = v.Args[2] 15595 dst := v.Args[0] 15596 src := v.Args[1] 15597 mem := v.Args[2] 15598 v.reset(OpARM64MOVBstore) 15599 v.AuxInt = 6 15600 v.AddArg(dst) 15601 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 15602 v0.AuxInt = 6 15603 v0.AddArg(src) 15604 v0.AddArg(mem) 15605 v.AddArg(v0) 15606 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 15607 v1.AuxInt = 4 15608 v1.AddArg(dst) 15609 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 15610 v2.AuxInt = 4 15611 v2.AddArg(src) 15612 v2.AddArg(mem) 15613 v1.AddArg(v2) 15614 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 15615 v3.AddArg(dst) 15616 v4 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 15617 v4.AddArg(src) 15618 v4.AddArg(mem) 15619 v3.AddArg(v4) 15620 v3.AddArg(mem) 15621 v1.AddArg(v3) 15622 v.AddArg(v1) 15623 return true 15624 } 15625 // match: (Move [12] dst src mem) 15626 // cond: 15627 // result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 15628 for { 15629 if v.AuxInt != 12 { 15630 break 15631 } 15632 _ = v.Args[2] 15633 dst := v.Args[0] 15634 src := v.Args[1] 15635 mem := v.Args[2] 15636 v.reset(OpARM64MOVWstore) 15637 v.AuxInt = 8 15638 v.AddArg(dst) 15639 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 15640 v0.AuxInt = 8 15641 v0.AddArg(src) 15642 v0.AddArg(mem) 15643 v.AddArg(v0) 15644 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 15645 v1.AddArg(dst) 15646 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 15647 v2.AddArg(src) 15648 v2.AddArg(mem) 15649 v1.AddArg(v2) 15650 v1.AddArg(mem) 15651 v.AddArg(v1) 15652 return true 15653 } 15654 return false 15655 } 15656 func rewriteValueARM64_OpMove_10(v *Value) bool { 15657 b := v.Block 15658 _ = b 15659 config := b.Func.Config 15660 _ = config 15661 typ := &b.Func.Config.Types 15662 _ = typ 15663 // match: (Move [16] dst src mem) 15664 // cond: 15665 // result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 15666 for { 15667 if v.AuxInt != 16 { 15668 break 15669 } 15670 _ = v.Args[2] 15671 dst := v.Args[0] 15672 src := v.Args[1] 15673 mem := v.Args[2] 15674 v.reset(OpARM64MOVDstore) 15675 v.AuxInt = 8 15676 v.AddArg(dst) 15677 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 15678 v0.AuxInt = 8 15679 v0.AddArg(src) 15680 v0.AddArg(mem) 15681 v.AddArg(v0) 15682 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 15683 v1.AddArg(dst) 15684 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 15685 v2.AddArg(src) 15686 v2.AddArg(mem) 15687 v1.AddArg(v2) 15688 v1.AddArg(mem) 15689 v.AddArg(v1) 15690 return true 15691 } 15692 // match: (Move [24] dst src mem) 15693 // cond: 15694 // result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))) 15695 for { 15696 if v.AuxInt != 24 { 15697 break 15698 } 15699 _ = v.Args[2] 15700 dst := v.Args[0] 15701 src := v.Args[1] 15702 mem := v.Args[2] 15703 v.reset(OpARM64MOVDstore) 15704 v.AuxInt = 16 15705 v.AddArg(dst) 15706 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 15707 v0.AuxInt = 16 15708 v0.AddArg(src) 15709 v0.AddArg(mem) 15710 v.AddArg(v0) 15711 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 15712 v1.AuxInt = 8 15713 v1.AddArg(dst) 15714 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 15715 v2.AuxInt = 8 15716 v2.AddArg(src) 15717 v2.AddArg(mem) 15718 v1.AddArg(v2) 15719 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 15720 v3.AddArg(dst) 15721 v4 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 15722 v4.AddArg(src) 15723 v4.AddArg(mem) 15724 v3.AddArg(v4) 15725 v3.AddArg(mem) 15726 v1.AddArg(v3) 15727 v.AddArg(v1) 15728 return true 15729 } 15730 // match: (Move [s] dst src mem) 15731 // cond: s%8 != 0 && s > 8 15732 // result: (Move [s%8] (OffPtr <dst.Type> dst [s-s%8]) (OffPtr <src.Type> src [s-s%8]) (Move [s-s%8] dst src mem)) 15733 for { 15734 s := v.AuxInt 15735 _ = v.Args[2] 15736 dst := v.Args[0] 15737 src := v.Args[1] 15738 mem := v.Args[2] 15739 if !(s%8 != 0 && s > 8) { 15740 break 15741 } 15742 v.reset(OpMove) 15743 v.AuxInt = s % 8 15744 v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type) 15745 v0.AuxInt = s - s%8 15746 v0.AddArg(dst) 15747 v.AddArg(v0) 15748 v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type) 15749 v1.AuxInt = s - s%8 15750 v1.AddArg(src) 15751 v.AddArg(v1) 15752 v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem) 15753 v2.AuxInt = s - s%8 15754 v2.AddArg(dst) 15755 v2.AddArg(src) 15756 v2.AddArg(mem) 15757 v.AddArg(v2) 15758 return true 15759 } 15760 // match: (Move [s] dst src mem) 15761 // cond: s%8 == 0 && s > 24 && s <= 8*128 && !config.noDuffDevice 15762 // result: (DUFFCOPY [8 * (128 - int64(s/8))] dst src mem) 15763 for { 15764 s := v.AuxInt 15765 _ = v.Args[2] 15766 dst := v.Args[0] 15767 src := v.Args[1] 15768 mem := v.Args[2] 15769 if !(s%8 == 0 && s > 24 && s <= 8*128 && !config.noDuffDevice) { 15770 break 15771 } 15772 v.reset(OpARM64DUFFCOPY) 15773 v.AuxInt = 8 * (128 - int64(s/8)) 15774 v.AddArg(dst) 15775 v.AddArg(src) 15776 v.AddArg(mem) 15777 return true 15778 } 15779 // match: (Move [s] dst src mem) 15780 // cond: s > 24 && s%8 == 0 15781 // result: (LoweredMove dst src (ADDconst <src.Type> src [s-8]) mem) 15782 for { 15783 s := v.AuxInt 15784 _ = v.Args[2] 15785 dst := v.Args[0] 15786 src := v.Args[1] 15787 mem := v.Args[2] 15788 if !(s > 24 && s%8 == 0) { 15789 break 15790 } 15791 v.reset(OpARM64LoweredMove) 15792 v.AddArg(dst) 15793 v.AddArg(src) 15794 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, src.Type) 15795 v0.AuxInt = s - 8 15796 v0.AddArg(src) 15797 v.AddArg(v0) 15798 v.AddArg(mem) 15799 return true 15800 } 15801 return false 15802 } 15803 func rewriteValueARM64_OpMul16_0(v *Value) bool { 15804 // match: (Mul16 x y) 15805 // cond: 15806 // result: (MULW x y) 15807 for { 15808 _ = v.Args[1] 15809 x := v.Args[0] 15810 y := v.Args[1] 15811 v.reset(OpARM64MULW) 15812 v.AddArg(x) 15813 v.AddArg(y) 15814 return true 15815 } 15816 } 15817 func rewriteValueARM64_OpMul32_0(v *Value) bool { 15818 // match: (Mul32 x y) 15819 // cond: 15820 // result: (MULW x y) 15821 for { 15822 _ = v.Args[1] 15823 x := v.Args[0] 15824 y := v.Args[1] 15825 v.reset(OpARM64MULW) 15826 v.AddArg(x) 15827 v.AddArg(y) 15828 return true 15829 } 15830 } 15831 func rewriteValueARM64_OpMul32F_0(v *Value) bool { 15832 // match: (Mul32F x y) 15833 // cond: 15834 // result: (FMULS x y) 15835 for { 15836 _ = v.Args[1] 15837 x := v.Args[0] 15838 y := v.Args[1] 15839 v.reset(OpARM64FMULS) 15840 v.AddArg(x) 15841 v.AddArg(y) 15842 return true 15843 } 15844 } 15845 func rewriteValueARM64_OpMul64_0(v *Value) bool { 15846 // match: (Mul64 x y) 15847 // cond: 15848 // result: (MUL x y) 15849 for { 15850 _ = v.Args[1] 15851 x := v.Args[0] 15852 y := v.Args[1] 15853 v.reset(OpARM64MUL) 15854 v.AddArg(x) 15855 v.AddArg(y) 15856 return true 15857 } 15858 } 15859 func rewriteValueARM64_OpMul64F_0(v *Value) bool { 15860 // match: (Mul64F x y) 15861 // cond: 15862 // result: (FMULD x y) 15863 for { 15864 _ = v.Args[1] 15865 x := v.Args[0] 15866 y := v.Args[1] 15867 v.reset(OpARM64FMULD) 15868 v.AddArg(x) 15869 v.AddArg(y) 15870 return true 15871 } 15872 } 15873 func rewriteValueARM64_OpMul8_0(v *Value) bool { 15874 // match: (Mul8 x y) 15875 // cond: 15876 // result: (MULW x y) 15877 for { 15878 _ = v.Args[1] 15879 x := v.Args[0] 15880 y := v.Args[1] 15881 v.reset(OpARM64MULW) 15882 v.AddArg(x) 15883 v.AddArg(y) 15884 return true 15885 } 15886 } 15887 func rewriteValueARM64_OpNeg16_0(v *Value) bool { 15888 // match: (Neg16 x) 15889 // cond: 15890 // result: (NEG x) 15891 for { 15892 x := v.Args[0] 15893 v.reset(OpARM64NEG) 15894 v.AddArg(x) 15895 return true 15896 } 15897 } 15898 func rewriteValueARM64_OpNeg32_0(v *Value) bool { 15899 // match: (Neg32 x) 15900 // cond: 15901 // result: (NEG x) 15902 for { 15903 x := v.Args[0] 15904 v.reset(OpARM64NEG) 15905 v.AddArg(x) 15906 return true 15907 } 15908 } 15909 func rewriteValueARM64_OpNeg32F_0(v *Value) bool { 15910 // match: (Neg32F x) 15911 // cond: 15912 // result: (FNEGS x) 15913 for { 15914 x := v.Args[0] 15915 v.reset(OpARM64FNEGS) 15916 v.AddArg(x) 15917 return true 15918 } 15919 } 15920 func rewriteValueARM64_OpNeg64_0(v *Value) bool { 15921 // match: (Neg64 x) 15922 // cond: 15923 // result: (NEG x) 15924 for { 15925 x := v.Args[0] 15926 v.reset(OpARM64NEG) 15927 v.AddArg(x) 15928 return true 15929 } 15930 } 15931 func rewriteValueARM64_OpNeg64F_0(v *Value) bool { 15932 // match: (Neg64F x) 15933 // cond: 15934 // result: (FNEGD x) 15935 for { 15936 x := v.Args[0] 15937 v.reset(OpARM64FNEGD) 15938 v.AddArg(x) 15939 return true 15940 } 15941 } 15942 func rewriteValueARM64_OpNeg8_0(v *Value) bool { 15943 // match: (Neg8 x) 15944 // cond: 15945 // result: (NEG x) 15946 for { 15947 x := v.Args[0] 15948 v.reset(OpARM64NEG) 15949 v.AddArg(x) 15950 return true 15951 } 15952 } 15953 func rewriteValueARM64_OpNeq16_0(v *Value) bool { 15954 b := v.Block 15955 _ = b 15956 typ := &b.Func.Config.Types 15957 _ = typ 15958 // match: (Neq16 x y) 15959 // cond: 15960 // result: (NotEqual (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 15961 for { 15962 _ = v.Args[1] 15963 x := v.Args[0] 15964 y := v.Args[1] 15965 v.reset(OpARM64NotEqual) 15966 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 15967 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 15968 v1.AddArg(x) 15969 v0.AddArg(v1) 15970 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 15971 v2.AddArg(y) 15972 v0.AddArg(v2) 15973 v.AddArg(v0) 15974 return true 15975 } 15976 } 15977 func rewriteValueARM64_OpNeq32_0(v *Value) bool { 15978 b := v.Block 15979 _ = b 15980 // match: (Neq32 x y) 15981 // cond: 15982 // result: (NotEqual (CMPW x y)) 15983 for { 15984 _ = v.Args[1] 15985 x := v.Args[0] 15986 y := v.Args[1] 15987 v.reset(OpARM64NotEqual) 15988 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 15989 v0.AddArg(x) 15990 v0.AddArg(y) 15991 v.AddArg(v0) 15992 return true 15993 } 15994 } 15995 func rewriteValueARM64_OpNeq32F_0(v *Value) bool { 15996 b := v.Block 15997 _ = b 15998 // match: (Neq32F x y) 15999 // cond: 16000 // result: (NotEqual (FCMPS x y)) 16001 for { 16002 _ = v.Args[1] 16003 x := v.Args[0] 16004 y := v.Args[1] 16005 v.reset(OpARM64NotEqual) 16006 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 16007 v0.AddArg(x) 16008 v0.AddArg(y) 16009 v.AddArg(v0) 16010 return true 16011 } 16012 } 16013 func rewriteValueARM64_OpNeq64_0(v *Value) bool { 16014 b := v.Block 16015 _ = b 16016 // match: (Neq64 x y) 16017 // cond: 16018 // result: (NotEqual (CMP x y)) 16019 for { 16020 _ = v.Args[1] 16021 x := v.Args[0] 16022 y := v.Args[1] 16023 v.reset(OpARM64NotEqual) 16024 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 16025 v0.AddArg(x) 16026 v0.AddArg(y) 16027 v.AddArg(v0) 16028 return true 16029 } 16030 } 16031 func rewriteValueARM64_OpNeq64F_0(v *Value) bool { 16032 b := v.Block 16033 _ = b 16034 // match: (Neq64F x y) 16035 // cond: 16036 // result: (NotEqual (FCMPD x y)) 16037 for { 16038 _ = v.Args[1] 16039 x := v.Args[0] 16040 y := v.Args[1] 16041 v.reset(OpARM64NotEqual) 16042 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 16043 v0.AddArg(x) 16044 v0.AddArg(y) 16045 v.AddArg(v0) 16046 return true 16047 } 16048 } 16049 func rewriteValueARM64_OpNeq8_0(v *Value) bool { 16050 b := v.Block 16051 _ = b 16052 typ := &b.Func.Config.Types 16053 _ = typ 16054 // match: (Neq8 x y) 16055 // cond: 16056 // result: (NotEqual (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 16057 for { 16058 _ = v.Args[1] 16059 x := v.Args[0] 16060 y := v.Args[1] 16061 v.reset(OpARM64NotEqual) 16062 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 16063 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 16064 v1.AddArg(x) 16065 v0.AddArg(v1) 16066 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 16067 v2.AddArg(y) 16068 v0.AddArg(v2) 16069 v.AddArg(v0) 16070 return true 16071 } 16072 } 16073 func rewriteValueARM64_OpNeqB_0(v *Value) bool { 16074 // match: (NeqB x y) 16075 // cond: 16076 // result: (XOR x y) 16077 for { 16078 _ = v.Args[1] 16079 x := v.Args[0] 16080 y := v.Args[1] 16081 v.reset(OpARM64XOR) 16082 v.AddArg(x) 16083 v.AddArg(y) 16084 return true 16085 } 16086 } 16087 func rewriteValueARM64_OpNeqPtr_0(v *Value) bool { 16088 b := v.Block 16089 _ = b 16090 // match: (NeqPtr x y) 16091 // cond: 16092 // result: (NotEqual (CMP x y)) 16093 for { 16094 _ = v.Args[1] 16095 x := v.Args[0] 16096 y := v.Args[1] 16097 v.reset(OpARM64NotEqual) 16098 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 16099 v0.AddArg(x) 16100 v0.AddArg(y) 16101 v.AddArg(v0) 16102 return true 16103 } 16104 } 16105 func rewriteValueARM64_OpNilCheck_0(v *Value) bool { 16106 // match: (NilCheck ptr mem) 16107 // cond: 16108 // result: (LoweredNilCheck ptr mem) 16109 for { 16110 _ = v.Args[1] 16111 ptr := v.Args[0] 16112 mem := v.Args[1] 16113 v.reset(OpARM64LoweredNilCheck) 16114 v.AddArg(ptr) 16115 v.AddArg(mem) 16116 return true 16117 } 16118 } 16119 func rewriteValueARM64_OpNot_0(v *Value) bool { 16120 b := v.Block 16121 _ = b 16122 typ := &b.Func.Config.Types 16123 _ = typ 16124 // match: (Not x) 16125 // cond: 16126 // result: (XOR (MOVDconst [1]) x) 16127 for { 16128 x := v.Args[0] 16129 v.reset(OpARM64XOR) 16130 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16131 v0.AuxInt = 1 16132 v.AddArg(v0) 16133 v.AddArg(x) 16134 return true 16135 } 16136 } 16137 func rewriteValueARM64_OpOffPtr_0(v *Value) bool { 16138 // match: (OffPtr [off] ptr:(SP)) 16139 // cond: 16140 // result: (MOVDaddr [off] ptr) 16141 for { 16142 off := v.AuxInt 16143 ptr := v.Args[0] 16144 if ptr.Op != OpSP { 16145 break 16146 } 16147 v.reset(OpARM64MOVDaddr) 16148 v.AuxInt = off 16149 v.AddArg(ptr) 16150 return true 16151 } 16152 // match: (OffPtr [off] ptr) 16153 // cond: 16154 // result: (ADDconst [off] ptr) 16155 for { 16156 off := v.AuxInt 16157 ptr := v.Args[0] 16158 v.reset(OpARM64ADDconst) 16159 v.AuxInt = off 16160 v.AddArg(ptr) 16161 return true 16162 } 16163 } 16164 func rewriteValueARM64_OpOr16_0(v *Value) bool { 16165 // match: (Or16 x y) 16166 // cond: 16167 // result: (OR x y) 16168 for { 16169 _ = v.Args[1] 16170 x := v.Args[0] 16171 y := v.Args[1] 16172 v.reset(OpARM64OR) 16173 v.AddArg(x) 16174 v.AddArg(y) 16175 return true 16176 } 16177 } 16178 func rewriteValueARM64_OpOr32_0(v *Value) bool { 16179 // match: (Or32 x y) 16180 // cond: 16181 // result: (OR x y) 16182 for { 16183 _ = v.Args[1] 16184 x := v.Args[0] 16185 y := v.Args[1] 16186 v.reset(OpARM64OR) 16187 v.AddArg(x) 16188 v.AddArg(y) 16189 return true 16190 } 16191 } 16192 func rewriteValueARM64_OpOr64_0(v *Value) bool { 16193 // match: (Or64 x y) 16194 // cond: 16195 // result: (OR x y) 16196 for { 16197 _ = v.Args[1] 16198 x := v.Args[0] 16199 y := v.Args[1] 16200 v.reset(OpARM64OR) 16201 v.AddArg(x) 16202 v.AddArg(y) 16203 return true 16204 } 16205 } 16206 func rewriteValueARM64_OpOr8_0(v *Value) bool { 16207 // match: (Or8 x y) 16208 // cond: 16209 // result: (OR x y) 16210 for { 16211 _ = v.Args[1] 16212 x := v.Args[0] 16213 y := v.Args[1] 16214 v.reset(OpARM64OR) 16215 v.AddArg(x) 16216 v.AddArg(y) 16217 return true 16218 } 16219 } 16220 func rewriteValueARM64_OpOrB_0(v *Value) bool { 16221 // match: (OrB x y) 16222 // cond: 16223 // result: (OR x y) 16224 for { 16225 _ = v.Args[1] 16226 x := v.Args[0] 16227 y := v.Args[1] 16228 v.reset(OpARM64OR) 16229 v.AddArg(x) 16230 v.AddArg(y) 16231 return true 16232 } 16233 } 16234 func rewriteValueARM64_OpPopCount16_0(v *Value) bool { 16235 b := v.Block 16236 _ = b 16237 typ := &b.Func.Config.Types 16238 _ = typ 16239 // match: (PopCount16 <t> x) 16240 // cond: 16241 // result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> (ZeroExt16to64 x))))) 16242 for { 16243 t := v.Type 16244 x := v.Args[0] 16245 v.reset(OpARM64FMOVDfpgp) 16246 v.Type = t 16247 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64) 16248 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64) 16249 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64) 16250 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 16251 v3.AddArg(x) 16252 v2.AddArg(v3) 16253 v1.AddArg(v2) 16254 v0.AddArg(v1) 16255 v.AddArg(v0) 16256 return true 16257 } 16258 } 16259 func rewriteValueARM64_OpPopCount32_0(v *Value) bool { 16260 b := v.Block 16261 _ = b 16262 typ := &b.Func.Config.Types 16263 _ = typ 16264 // match: (PopCount32 <t> x) 16265 // cond: 16266 // result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> (ZeroExt32to64 x))))) 16267 for { 16268 t := v.Type 16269 x := v.Args[0] 16270 v.reset(OpARM64FMOVDfpgp) 16271 v.Type = t 16272 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64) 16273 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64) 16274 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64) 16275 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 16276 v3.AddArg(x) 16277 v2.AddArg(v3) 16278 v1.AddArg(v2) 16279 v0.AddArg(v1) 16280 v.AddArg(v0) 16281 return true 16282 } 16283 } 16284 func rewriteValueARM64_OpPopCount64_0(v *Value) bool { 16285 b := v.Block 16286 _ = b 16287 typ := &b.Func.Config.Types 16288 _ = typ 16289 // match: (PopCount64 <t> x) 16290 // cond: 16291 // result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> x)))) 16292 for { 16293 t := v.Type 16294 x := v.Args[0] 16295 v.reset(OpARM64FMOVDfpgp) 16296 v.Type = t 16297 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64) 16298 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64) 16299 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64) 16300 v2.AddArg(x) 16301 v1.AddArg(v2) 16302 v0.AddArg(v1) 16303 v.AddArg(v0) 16304 return true 16305 } 16306 } 16307 func rewriteValueARM64_OpRound_0(v *Value) bool { 16308 // match: (Round x) 16309 // cond: 16310 // result: (FRINTAD x) 16311 for { 16312 x := v.Args[0] 16313 v.reset(OpARM64FRINTAD) 16314 v.AddArg(x) 16315 return true 16316 } 16317 } 16318 func rewriteValueARM64_OpRound32F_0(v *Value) bool { 16319 // match: (Round32F x) 16320 // cond: 16321 // result: (LoweredRound32F x) 16322 for { 16323 x := v.Args[0] 16324 v.reset(OpARM64LoweredRound32F) 16325 v.AddArg(x) 16326 return true 16327 } 16328 } 16329 func rewriteValueARM64_OpRound64F_0(v *Value) bool { 16330 // match: (Round64F x) 16331 // cond: 16332 // result: (LoweredRound64F x) 16333 for { 16334 x := v.Args[0] 16335 v.reset(OpARM64LoweredRound64F) 16336 v.AddArg(x) 16337 return true 16338 } 16339 } 16340 func rewriteValueARM64_OpRsh16Ux16_0(v *Value) bool { 16341 b := v.Block 16342 _ = b 16343 typ := &b.Func.Config.Types 16344 _ = typ 16345 // match: (Rsh16Ux16 <t> x y) 16346 // cond: 16347 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 16348 for { 16349 t := v.Type 16350 _ = v.Args[1] 16351 x := v.Args[0] 16352 y := v.Args[1] 16353 v.reset(OpARM64CSEL) 16354 v.Aux = OpARM64LessThanU 16355 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 16356 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 16357 v1.AddArg(x) 16358 v0.AddArg(v1) 16359 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 16360 v2.AddArg(y) 16361 v0.AddArg(v2) 16362 v.AddArg(v0) 16363 v3 := b.NewValue0(v.Pos, OpConst64, t) 16364 v3.AuxInt = 0 16365 v.AddArg(v3) 16366 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16367 v4.AuxInt = 64 16368 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 16369 v5.AddArg(y) 16370 v4.AddArg(v5) 16371 v.AddArg(v4) 16372 return true 16373 } 16374 } 16375 func rewriteValueARM64_OpRsh16Ux32_0(v *Value) bool { 16376 b := v.Block 16377 _ = b 16378 typ := &b.Func.Config.Types 16379 _ = typ 16380 // match: (Rsh16Ux32 <t> x y) 16381 // cond: 16382 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 16383 for { 16384 t := v.Type 16385 _ = v.Args[1] 16386 x := v.Args[0] 16387 y := v.Args[1] 16388 v.reset(OpARM64CSEL) 16389 v.Aux = OpARM64LessThanU 16390 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 16391 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 16392 v1.AddArg(x) 16393 v0.AddArg(v1) 16394 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 16395 v2.AddArg(y) 16396 v0.AddArg(v2) 16397 v.AddArg(v0) 16398 v3 := b.NewValue0(v.Pos, OpConst64, t) 16399 v3.AuxInt = 0 16400 v.AddArg(v3) 16401 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16402 v4.AuxInt = 64 16403 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 16404 v5.AddArg(y) 16405 v4.AddArg(v5) 16406 v.AddArg(v4) 16407 return true 16408 } 16409 } 16410 func rewriteValueARM64_OpRsh16Ux64_0(v *Value) bool { 16411 b := v.Block 16412 _ = b 16413 typ := &b.Func.Config.Types 16414 _ = typ 16415 // match: (Rsh16Ux64 <t> x y) 16416 // cond: 16417 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 16418 for { 16419 t := v.Type 16420 _ = v.Args[1] 16421 x := v.Args[0] 16422 y := v.Args[1] 16423 v.reset(OpARM64CSEL) 16424 v.Aux = OpARM64LessThanU 16425 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 16426 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 16427 v1.AddArg(x) 16428 v0.AddArg(v1) 16429 v0.AddArg(y) 16430 v.AddArg(v0) 16431 v2 := b.NewValue0(v.Pos, OpConst64, t) 16432 v2.AuxInt = 0 16433 v.AddArg(v2) 16434 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16435 v3.AuxInt = 64 16436 v3.AddArg(y) 16437 v.AddArg(v3) 16438 return true 16439 } 16440 } 16441 func rewriteValueARM64_OpRsh16Ux8_0(v *Value) bool { 16442 b := v.Block 16443 _ = b 16444 typ := &b.Func.Config.Types 16445 _ = typ 16446 // match: (Rsh16Ux8 <t> x y) 16447 // cond: 16448 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 16449 for { 16450 t := v.Type 16451 _ = v.Args[1] 16452 x := v.Args[0] 16453 y := v.Args[1] 16454 v.reset(OpARM64CSEL) 16455 v.Aux = OpARM64LessThanU 16456 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 16457 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 16458 v1.AddArg(x) 16459 v0.AddArg(v1) 16460 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 16461 v2.AddArg(y) 16462 v0.AddArg(v2) 16463 v.AddArg(v0) 16464 v3 := b.NewValue0(v.Pos, OpConst64, t) 16465 v3.AuxInt = 0 16466 v.AddArg(v3) 16467 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16468 v4.AuxInt = 64 16469 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 16470 v5.AddArg(y) 16471 v4.AddArg(v5) 16472 v.AddArg(v4) 16473 return true 16474 } 16475 } 16476 func rewriteValueARM64_OpRsh16x16_0(v *Value) bool { 16477 b := v.Block 16478 _ = b 16479 typ := &b.Func.Config.Types 16480 _ = typ 16481 // match: (Rsh16x16 x y) 16482 // cond: 16483 // result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 16484 for { 16485 _ = v.Args[1] 16486 x := v.Args[0] 16487 y := v.Args[1] 16488 v.reset(OpARM64SRA) 16489 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 16490 v0.AddArg(x) 16491 v.AddArg(v0) 16492 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 16493 v1.Aux = OpARM64LessThanU 16494 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 16495 v2.AddArg(y) 16496 v1.AddArg(v2) 16497 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 16498 v3.AuxInt = 63 16499 v1.AddArg(v3) 16500 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16501 v4.AuxInt = 64 16502 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 16503 v5.AddArg(y) 16504 v4.AddArg(v5) 16505 v1.AddArg(v4) 16506 v.AddArg(v1) 16507 return true 16508 } 16509 } 16510 func rewriteValueARM64_OpRsh16x32_0(v *Value) bool { 16511 b := v.Block 16512 _ = b 16513 typ := &b.Func.Config.Types 16514 _ = typ 16515 // match: (Rsh16x32 x y) 16516 // cond: 16517 // result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 16518 for { 16519 _ = v.Args[1] 16520 x := v.Args[0] 16521 y := v.Args[1] 16522 v.reset(OpARM64SRA) 16523 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 16524 v0.AddArg(x) 16525 v.AddArg(v0) 16526 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 16527 v1.Aux = OpARM64LessThanU 16528 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 16529 v2.AddArg(y) 16530 v1.AddArg(v2) 16531 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 16532 v3.AuxInt = 63 16533 v1.AddArg(v3) 16534 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16535 v4.AuxInt = 64 16536 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 16537 v5.AddArg(y) 16538 v4.AddArg(v5) 16539 v1.AddArg(v4) 16540 v.AddArg(v1) 16541 return true 16542 } 16543 } 16544 func rewriteValueARM64_OpRsh16x64_0(v *Value) bool { 16545 b := v.Block 16546 _ = b 16547 typ := &b.Func.Config.Types 16548 _ = typ 16549 // match: (Rsh16x64 x y) 16550 // cond: 16551 // result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 16552 for { 16553 _ = v.Args[1] 16554 x := v.Args[0] 16555 y := v.Args[1] 16556 v.reset(OpARM64SRA) 16557 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 16558 v0.AddArg(x) 16559 v.AddArg(v0) 16560 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 16561 v1.Aux = OpARM64LessThanU 16562 v1.AddArg(y) 16563 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 16564 v2.AuxInt = 63 16565 v1.AddArg(v2) 16566 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16567 v3.AuxInt = 64 16568 v3.AddArg(y) 16569 v1.AddArg(v3) 16570 v.AddArg(v1) 16571 return true 16572 } 16573 } 16574 func rewriteValueARM64_OpRsh16x8_0(v *Value) bool { 16575 b := v.Block 16576 _ = b 16577 typ := &b.Func.Config.Types 16578 _ = typ 16579 // match: (Rsh16x8 x y) 16580 // cond: 16581 // result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 16582 for { 16583 _ = v.Args[1] 16584 x := v.Args[0] 16585 y := v.Args[1] 16586 v.reset(OpARM64SRA) 16587 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 16588 v0.AddArg(x) 16589 v.AddArg(v0) 16590 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 16591 v1.Aux = OpARM64LessThanU 16592 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 16593 v2.AddArg(y) 16594 v1.AddArg(v2) 16595 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 16596 v3.AuxInt = 63 16597 v1.AddArg(v3) 16598 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16599 v4.AuxInt = 64 16600 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 16601 v5.AddArg(y) 16602 v4.AddArg(v5) 16603 v1.AddArg(v4) 16604 v.AddArg(v1) 16605 return true 16606 } 16607 } 16608 func rewriteValueARM64_OpRsh32Ux16_0(v *Value) bool { 16609 b := v.Block 16610 _ = b 16611 typ := &b.Func.Config.Types 16612 _ = typ 16613 // match: (Rsh32Ux16 <t> x y) 16614 // cond: 16615 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 16616 for { 16617 t := v.Type 16618 _ = v.Args[1] 16619 x := v.Args[0] 16620 y := v.Args[1] 16621 v.reset(OpARM64CSEL) 16622 v.Aux = OpARM64LessThanU 16623 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 16624 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 16625 v1.AddArg(x) 16626 v0.AddArg(v1) 16627 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 16628 v2.AddArg(y) 16629 v0.AddArg(v2) 16630 v.AddArg(v0) 16631 v3 := b.NewValue0(v.Pos, OpConst64, t) 16632 v3.AuxInt = 0 16633 v.AddArg(v3) 16634 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16635 v4.AuxInt = 64 16636 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 16637 v5.AddArg(y) 16638 v4.AddArg(v5) 16639 v.AddArg(v4) 16640 return true 16641 } 16642 } 16643 func rewriteValueARM64_OpRsh32Ux32_0(v *Value) bool { 16644 b := v.Block 16645 _ = b 16646 typ := &b.Func.Config.Types 16647 _ = typ 16648 // match: (Rsh32Ux32 <t> x y) 16649 // cond: 16650 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 16651 for { 16652 t := v.Type 16653 _ = v.Args[1] 16654 x := v.Args[0] 16655 y := v.Args[1] 16656 v.reset(OpARM64CSEL) 16657 v.Aux = OpARM64LessThanU 16658 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 16659 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 16660 v1.AddArg(x) 16661 v0.AddArg(v1) 16662 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 16663 v2.AddArg(y) 16664 v0.AddArg(v2) 16665 v.AddArg(v0) 16666 v3 := b.NewValue0(v.Pos, OpConst64, t) 16667 v3.AuxInt = 0 16668 v.AddArg(v3) 16669 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16670 v4.AuxInt = 64 16671 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 16672 v5.AddArg(y) 16673 v4.AddArg(v5) 16674 v.AddArg(v4) 16675 return true 16676 } 16677 } 16678 func rewriteValueARM64_OpRsh32Ux64_0(v *Value) bool { 16679 b := v.Block 16680 _ = b 16681 typ := &b.Func.Config.Types 16682 _ = typ 16683 // match: (Rsh32Ux64 <t> x y) 16684 // cond: 16685 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 16686 for { 16687 t := v.Type 16688 _ = v.Args[1] 16689 x := v.Args[0] 16690 y := v.Args[1] 16691 v.reset(OpARM64CSEL) 16692 v.Aux = OpARM64LessThanU 16693 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 16694 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 16695 v1.AddArg(x) 16696 v0.AddArg(v1) 16697 v0.AddArg(y) 16698 v.AddArg(v0) 16699 v2 := b.NewValue0(v.Pos, OpConst64, t) 16700 v2.AuxInt = 0 16701 v.AddArg(v2) 16702 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16703 v3.AuxInt = 64 16704 v3.AddArg(y) 16705 v.AddArg(v3) 16706 return true 16707 } 16708 } 16709 func rewriteValueARM64_OpRsh32Ux8_0(v *Value) bool { 16710 b := v.Block 16711 _ = b 16712 typ := &b.Func.Config.Types 16713 _ = typ 16714 // match: (Rsh32Ux8 <t> x y) 16715 // cond: 16716 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 16717 for { 16718 t := v.Type 16719 _ = v.Args[1] 16720 x := v.Args[0] 16721 y := v.Args[1] 16722 v.reset(OpARM64CSEL) 16723 v.Aux = OpARM64LessThanU 16724 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 16725 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 16726 v1.AddArg(x) 16727 v0.AddArg(v1) 16728 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 16729 v2.AddArg(y) 16730 v0.AddArg(v2) 16731 v.AddArg(v0) 16732 v3 := b.NewValue0(v.Pos, OpConst64, t) 16733 v3.AuxInt = 0 16734 v.AddArg(v3) 16735 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16736 v4.AuxInt = 64 16737 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 16738 v5.AddArg(y) 16739 v4.AddArg(v5) 16740 v.AddArg(v4) 16741 return true 16742 } 16743 } 16744 func rewriteValueARM64_OpRsh32x16_0(v *Value) bool { 16745 b := v.Block 16746 _ = b 16747 typ := &b.Func.Config.Types 16748 _ = typ 16749 // match: (Rsh32x16 x y) 16750 // cond: 16751 // result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 16752 for { 16753 _ = v.Args[1] 16754 x := v.Args[0] 16755 y := v.Args[1] 16756 v.reset(OpARM64SRA) 16757 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 16758 v0.AddArg(x) 16759 v.AddArg(v0) 16760 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 16761 v1.Aux = OpARM64LessThanU 16762 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 16763 v2.AddArg(y) 16764 v1.AddArg(v2) 16765 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 16766 v3.AuxInt = 63 16767 v1.AddArg(v3) 16768 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16769 v4.AuxInt = 64 16770 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 16771 v5.AddArg(y) 16772 v4.AddArg(v5) 16773 v1.AddArg(v4) 16774 v.AddArg(v1) 16775 return true 16776 } 16777 } 16778 func rewriteValueARM64_OpRsh32x32_0(v *Value) bool { 16779 b := v.Block 16780 _ = b 16781 typ := &b.Func.Config.Types 16782 _ = typ 16783 // match: (Rsh32x32 x y) 16784 // cond: 16785 // result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 16786 for { 16787 _ = v.Args[1] 16788 x := v.Args[0] 16789 y := v.Args[1] 16790 v.reset(OpARM64SRA) 16791 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 16792 v0.AddArg(x) 16793 v.AddArg(v0) 16794 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 16795 v1.Aux = OpARM64LessThanU 16796 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 16797 v2.AddArg(y) 16798 v1.AddArg(v2) 16799 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 16800 v3.AuxInt = 63 16801 v1.AddArg(v3) 16802 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16803 v4.AuxInt = 64 16804 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 16805 v5.AddArg(y) 16806 v4.AddArg(v5) 16807 v1.AddArg(v4) 16808 v.AddArg(v1) 16809 return true 16810 } 16811 } 16812 func rewriteValueARM64_OpRsh32x64_0(v *Value) bool { 16813 b := v.Block 16814 _ = b 16815 typ := &b.Func.Config.Types 16816 _ = typ 16817 // match: (Rsh32x64 x y) 16818 // cond: 16819 // result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 16820 for { 16821 _ = v.Args[1] 16822 x := v.Args[0] 16823 y := v.Args[1] 16824 v.reset(OpARM64SRA) 16825 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 16826 v0.AddArg(x) 16827 v.AddArg(v0) 16828 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 16829 v1.Aux = OpARM64LessThanU 16830 v1.AddArg(y) 16831 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 16832 v2.AuxInt = 63 16833 v1.AddArg(v2) 16834 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16835 v3.AuxInt = 64 16836 v3.AddArg(y) 16837 v1.AddArg(v3) 16838 v.AddArg(v1) 16839 return true 16840 } 16841 } 16842 func rewriteValueARM64_OpRsh32x8_0(v *Value) bool { 16843 b := v.Block 16844 _ = b 16845 typ := &b.Func.Config.Types 16846 _ = typ 16847 // match: (Rsh32x8 x y) 16848 // cond: 16849 // result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 16850 for { 16851 _ = v.Args[1] 16852 x := v.Args[0] 16853 y := v.Args[1] 16854 v.reset(OpARM64SRA) 16855 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 16856 v0.AddArg(x) 16857 v.AddArg(v0) 16858 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 16859 v1.Aux = OpARM64LessThanU 16860 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 16861 v2.AddArg(y) 16862 v1.AddArg(v2) 16863 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 16864 v3.AuxInt = 63 16865 v1.AddArg(v3) 16866 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16867 v4.AuxInt = 64 16868 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 16869 v5.AddArg(y) 16870 v4.AddArg(v5) 16871 v1.AddArg(v4) 16872 v.AddArg(v1) 16873 return true 16874 } 16875 } 16876 func rewriteValueARM64_OpRsh64Ux16_0(v *Value) bool { 16877 b := v.Block 16878 _ = b 16879 typ := &b.Func.Config.Types 16880 _ = typ 16881 // match: (Rsh64Ux16 <t> x y) 16882 // cond: 16883 // result: (CSEL {OpARM64LessThanU} (SRL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 16884 for { 16885 t := v.Type 16886 _ = v.Args[1] 16887 x := v.Args[0] 16888 y := v.Args[1] 16889 v.reset(OpARM64CSEL) 16890 v.Aux = OpARM64LessThanU 16891 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 16892 v0.AddArg(x) 16893 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 16894 v1.AddArg(y) 16895 v0.AddArg(v1) 16896 v.AddArg(v0) 16897 v2 := b.NewValue0(v.Pos, OpConst64, t) 16898 v2.AuxInt = 0 16899 v.AddArg(v2) 16900 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16901 v3.AuxInt = 64 16902 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 16903 v4.AddArg(y) 16904 v3.AddArg(v4) 16905 v.AddArg(v3) 16906 return true 16907 } 16908 } 16909 func rewriteValueARM64_OpRsh64Ux32_0(v *Value) bool { 16910 b := v.Block 16911 _ = b 16912 typ := &b.Func.Config.Types 16913 _ = typ 16914 // match: (Rsh64Ux32 <t> x y) 16915 // cond: 16916 // result: (CSEL {OpARM64LessThanU} (SRL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 16917 for { 16918 t := v.Type 16919 _ = v.Args[1] 16920 x := v.Args[0] 16921 y := v.Args[1] 16922 v.reset(OpARM64CSEL) 16923 v.Aux = OpARM64LessThanU 16924 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 16925 v0.AddArg(x) 16926 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 16927 v1.AddArg(y) 16928 v0.AddArg(v1) 16929 v.AddArg(v0) 16930 v2 := b.NewValue0(v.Pos, OpConst64, t) 16931 v2.AuxInt = 0 16932 v.AddArg(v2) 16933 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16934 v3.AuxInt = 64 16935 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 16936 v4.AddArg(y) 16937 v3.AddArg(v4) 16938 v.AddArg(v3) 16939 return true 16940 } 16941 } 16942 func rewriteValueARM64_OpRsh64Ux64_0(v *Value) bool { 16943 b := v.Block 16944 _ = b 16945 // match: (Rsh64Ux64 <t> x y) 16946 // cond: 16947 // result: (CSEL {OpARM64LessThanU} (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 16948 for { 16949 t := v.Type 16950 _ = v.Args[1] 16951 x := v.Args[0] 16952 y := v.Args[1] 16953 v.reset(OpARM64CSEL) 16954 v.Aux = OpARM64LessThanU 16955 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 16956 v0.AddArg(x) 16957 v0.AddArg(y) 16958 v.AddArg(v0) 16959 v1 := b.NewValue0(v.Pos, OpConst64, t) 16960 v1.AuxInt = 0 16961 v.AddArg(v1) 16962 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16963 v2.AuxInt = 64 16964 v2.AddArg(y) 16965 v.AddArg(v2) 16966 return true 16967 } 16968 } 16969 func rewriteValueARM64_OpRsh64Ux8_0(v *Value) bool { 16970 b := v.Block 16971 _ = b 16972 typ := &b.Func.Config.Types 16973 _ = typ 16974 // match: (Rsh64Ux8 <t> x y) 16975 // cond: 16976 // result: (CSEL {OpARM64LessThanU} (SRL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 16977 for { 16978 t := v.Type 16979 _ = v.Args[1] 16980 x := v.Args[0] 16981 y := v.Args[1] 16982 v.reset(OpARM64CSEL) 16983 v.Aux = OpARM64LessThanU 16984 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 16985 v0.AddArg(x) 16986 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 16987 v1.AddArg(y) 16988 v0.AddArg(v1) 16989 v.AddArg(v0) 16990 v2 := b.NewValue0(v.Pos, OpConst64, t) 16991 v2.AuxInt = 0 16992 v.AddArg(v2) 16993 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 16994 v3.AuxInt = 64 16995 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 16996 v4.AddArg(y) 16997 v3.AddArg(v4) 16998 v.AddArg(v3) 16999 return true 17000 } 17001 } 17002 func rewriteValueARM64_OpRsh64x16_0(v *Value) bool { 17003 b := v.Block 17004 _ = b 17005 typ := &b.Func.Config.Types 17006 _ = typ 17007 // match: (Rsh64x16 x y) 17008 // cond: 17009 // result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 17010 for { 17011 _ = v.Args[1] 17012 x := v.Args[0] 17013 y := v.Args[1] 17014 v.reset(OpARM64SRA) 17015 v.AddArg(x) 17016 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 17017 v0.Aux = OpARM64LessThanU 17018 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 17019 v1.AddArg(y) 17020 v0.AddArg(v1) 17021 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 17022 v2.AuxInt = 63 17023 v0.AddArg(v2) 17024 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 17025 v3.AuxInt = 64 17026 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 17027 v4.AddArg(y) 17028 v3.AddArg(v4) 17029 v0.AddArg(v3) 17030 v.AddArg(v0) 17031 return true 17032 } 17033 } 17034 func rewriteValueARM64_OpRsh64x32_0(v *Value) bool { 17035 b := v.Block 17036 _ = b 17037 typ := &b.Func.Config.Types 17038 _ = typ 17039 // match: (Rsh64x32 x y) 17040 // cond: 17041 // result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 17042 for { 17043 _ = v.Args[1] 17044 x := v.Args[0] 17045 y := v.Args[1] 17046 v.reset(OpARM64SRA) 17047 v.AddArg(x) 17048 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 17049 v0.Aux = OpARM64LessThanU 17050 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 17051 v1.AddArg(y) 17052 v0.AddArg(v1) 17053 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 17054 v2.AuxInt = 63 17055 v0.AddArg(v2) 17056 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 17057 v3.AuxInt = 64 17058 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 17059 v4.AddArg(y) 17060 v3.AddArg(v4) 17061 v0.AddArg(v3) 17062 v.AddArg(v0) 17063 return true 17064 } 17065 } 17066 func rewriteValueARM64_OpRsh64x64_0(v *Value) bool { 17067 b := v.Block 17068 _ = b 17069 // match: (Rsh64x64 x y) 17070 // cond: 17071 // result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 17072 for { 17073 _ = v.Args[1] 17074 x := v.Args[0] 17075 y := v.Args[1] 17076 v.reset(OpARM64SRA) 17077 v.AddArg(x) 17078 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 17079 v0.Aux = OpARM64LessThanU 17080 v0.AddArg(y) 17081 v1 := b.NewValue0(v.Pos, OpConst64, y.Type) 17082 v1.AuxInt = 63 17083 v0.AddArg(v1) 17084 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 17085 v2.AuxInt = 64 17086 v2.AddArg(y) 17087 v0.AddArg(v2) 17088 v.AddArg(v0) 17089 return true 17090 } 17091 } 17092 func rewriteValueARM64_OpRsh64x8_0(v *Value) bool { 17093 b := v.Block 17094 _ = b 17095 typ := &b.Func.Config.Types 17096 _ = typ 17097 // match: (Rsh64x8 x y) 17098 // cond: 17099 // result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 17100 for { 17101 _ = v.Args[1] 17102 x := v.Args[0] 17103 y := v.Args[1] 17104 v.reset(OpARM64SRA) 17105 v.AddArg(x) 17106 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 17107 v0.Aux = OpARM64LessThanU 17108 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 17109 v1.AddArg(y) 17110 v0.AddArg(v1) 17111 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 17112 v2.AuxInt = 63 17113 v0.AddArg(v2) 17114 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 17115 v3.AuxInt = 64 17116 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 17117 v4.AddArg(y) 17118 v3.AddArg(v4) 17119 v0.AddArg(v3) 17120 v.AddArg(v0) 17121 return true 17122 } 17123 } 17124 func rewriteValueARM64_OpRsh8Ux16_0(v *Value) bool { 17125 b := v.Block 17126 _ = b 17127 typ := &b.Func.Config.Types 17128 _ = typ 17129 // match: (Rsh8Ux16 <t> x y) 17130 // cond: 17131 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 17132 for { 17133 t := v.Type 17134 _ = v.Args[1] 17135 x := v.Args[0] 17136 y := v.Args[1] 17137 v.reset(OpARM64CSEL) 17138 v.Aux = OpARM64LessThanU 17139 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 17140 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 17141 v1.AddArg(x) 17142 v0.AddArg(v1) 17143 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 17144 v2.AddArg(y) 17145 v0.AddArg(v2) 17146 v.AddArg(v0) 17147 v3 := b.NewValue0(v.Pos, OpConst64, t) 17148 v3.AuxInt = 0 17149 v.AddArg(v3) 17150 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 17151 v4.AuxInt = 64 17152 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 17153 v5.AddArg(y) 17154 v4.AddArg(v5) 17155 v.AddArg(v4) 17156 return true 17157 } 17158 } 17159 func rewriteValueARM64_OpRsh8Ux32_0(v *Value) bool { 17160 b := v.Block 17161 _ = b 17162 typ := &b.Func.Config.Types 17163 _ = typ 17164 // match: (Rsh8Ux32 <t> x y) 17165 // cond: 17166 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 17167 for { 17168 t := v.Type 17169 _ = v.Args[1] 17170 x := v.Args[0] 17171 y := v.Args[1] 17172 v.reset(OpARM64CSEL) 17173 v.Aux = OpARM64LessThanU 17174 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 17175 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 17176 v1.AddArg(x) 17177 v0.AddArg(v1) 17178 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 17179 v2.AddArg(y) 17180 v0.AddArg(v2) 17181 v.AddArg(v0) 17182 v3 := b.NewValue0(v.Pos, OpConst64, t) 17183 v3.AuxInt = 0 17184 v.AddArg(v3) 17185 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 17186 v4.AuxInt = 64 17187 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 17188 v5.AddArg(y) 17189 v4.AddArg(v5) 17190 v.AddArg(v4) 17191 return true 17192 } 17193 } 17194 func rewriteValueARM64_OpRsh8Ux64_0(v *Value) bool { 17195 b := v.Block 17196 _ = b 17197 typ := &b.Func.Config.Types 17198 _ = typ 17199 // match: (Rsh8Ux64 <t> x y) 17200 // cond: 17201 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 17202 for { 17203 t := v.Type 17204 _ = v.Args[1] 17205 x := v.Args[0] 17206 y := v.Args[1] 17207 v.reset(OpARM64CSEL) 17208 v.Aux = OpARM64LessThanU 17209 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 17210 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 17211 v1.AddArg(x) 17212 v0.AddArg(v1) 17213 v0.AddArg(y) 17214 v.AddArg(v0) 17215 v2 := b.NewValue0(v.Pos, OpConst64, t) 17216 v2.AuxInt = 0 17217 v.AddArg(v2) 17218 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 17219 v3.AuxInt = 64 17220 v3.AddArg(y) 17221 v.AddArg(v3) 17222 return true 17223 } 17224 } 17225 func rewriteValueARM64_OpRsh8Ux8_0(v *Value) bool { 17226 b := v.Block 17227 _ = b 17228 typ := &b.Func.Config.Types 17229 _ = typ 17230 // match: (Rsh8Ux8 <t> x y) 17231 // cond: 17232 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 17233 for { 17234 t := v.Type 17235 _ = v.Args[1] 17236 x := v.Args[0] 17237 y := v.Args[1] 17238 v.reset(OpARM64CSEL) 17239 v.Aux = OpARM64LessThanU 17240 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 17241 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 17242 v1.AddArg(x) 17243 v0.AddArg(v1) 17244 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 17245 v2.AddArg(y) 17246 v0.AddArg(v2) 17247 v.AddArg(v0) 17248 v3 := b.NewValue0(v.Pos, OpConst64, t) 17249 v3.AuxInt = 0 17250 v.AddArg(v3) 17251 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 17252 v4.AuxInt = 64 17253 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 17254 v5.AddArg(y) 17255 v4.AddArg(v5) 17256 v.AddArg(v4) 17257 return true 17258 } 17259 } 17260 func rewriteValueARM64_OpRsh8x16_0(v *Value) bool { 17261 b := v.Block 17262 _ = b 17263 typ := &b.Func.Config.Types 17264 _ = typ 17265 // match: (Rsh8x16 x y) 17266 // cond: 17267 // result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 17268 for { 17269 _ = v.Args[1] 17270 x := v.Args[0] 17271 y := v.Args[1] 17272 v.reset(OpARM64SRA) 17273 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 17274 v0.AddArg(x) 17275 v.AddArg(v0) 17276 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 17277 v1.Aux = OpARM64LessThanU 17278 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 17279 v2.AddArg(y) 17280 v1.AddArg(v2) 17281 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 17282 v3.AuxInt = 63 17283 v1.AddArg(v3) 17284 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 17285 v4.AuxInt = 64 17286 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 17287 v5.AddArg(y) 17288 v4.AddArg(v5) 17289 v1.AddArg(v4) 17290 v.AddArg(v1) 17291 return true 17292 } 17293 } 17294 func rewriteValueARM64_OpRsh8x32_0(v *Value) bool { 17295 b := v.Block 17296 _ = b 17297 typ := &b.Func.Config.Types 17298 _ = typ 17299 // match: (Rsh8x32 x y) 17300 // cond: 17301 // result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 17302 for { 17303 _ = v.Args[1] 17304 x := v.Args[0] 17305 y := v.Args[1] 17306 v.reset(OpARM64SRA) 17307 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 17308 v0.AddArg(x) 17309 v.AddArg(v0) 17310 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 17311 v1.Aux = OpARM64LessThanU 17312 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 17313 v2.AddArg(y) 17314 v1.AddArg(v2) 17315 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 17316 v3.AuxInt = 63 17317 v1.AddArg(v3) 17318 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 17319 v4.AuxInt = 64 17320 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 17321 v5.AddArg(y) 17322 v4.AddArg(v5) 17323 v1.AddArg(v4) 17324 v.AddArg(v1) 17325 return true 17326 } 17327 } 17328 func rewriteValueARM64_OpRsh8x64_0(v *Value) bool { 17329 b := v.Block 17330 _ = b 17331 typ := &b.Func.Config.Types 17332 _ = typ 17333 // match: (Rsh8x64 x y) 17334 // cond: 17335 // result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 17336 for { 17337 _ = v.Args[1] 17338 x := v.Args[0] 17339 y := v.Args[1] 17340 v.reset(OpARM64SRA) 17341 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 17342 v0.AddArg(x) 17343 v.AddArg(v0) 17344 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 17345 v1.Aux = OpARM64LessThanU 17346 v1.AddArg(y) 17347 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 17348 v2.AuxInt = 63 17349 v1.AddArg(v2) 17350 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 17351 v3.AuxInt = 64 17352 v3.AddArg(y) 17353 v1.AddArg(v3) 17354 v.AddArg(v1) 17355 return true 17356 } 17357 } 17358 func rewriteValueARM64_OpRsh8x8_0(v *Value) bool { 17359 b := v.Block 17360 _ = b 17361 typ := &b.Func.Config.Types 17362 _ = typ 17363 // match: (Rsh8x8 x y) 17364 // cond: 17365 // result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 17366 for { 17367 _ = v.Args[1] 17368 x := v.Args[0] 17369 y := v.Args[1] 17370 v.reset(OpARM64SRA) 17371 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 17372 v0.AddArg(x) 17373 v.AddArg(v0) 17374 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 17375 v1.Aux = OpARM64LessThanU 17376 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 17377 v2.AddArg(y) 17378 v1.AddArg(v2) 17379 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 17380 v3.AuxInt = 63 17381 v1.AddArg(v3) 17382 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 17383 v4.AuxInt = 64 17384 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 17385 v5.AddArg(y) 17386 v4.AddArg(v5) 17387 v1.AddArg(v4) 17388 v.AddArg(v1) 17389 return true 17390 } 17391 } 17392 func rewriteValueARM64_OpSignExt16to32_0(v *Value) bool { 17393 // match: (SignExt16to32 x) 17394 // cond: 17395 // result: (MOVHreg x) 17396 for { 17397 x := v.Args[0] 17398 v.reset(OpARM64MOVHreg) 17399 v.AddArg(x) 17400 return true 17401 } 17402 } 17403 func rewriteValueARM64_OpSignExt16to64_0(v *Value) bool { 17404 // match: (SignExt16to64 x) 17405 // cond: 17406 // result: (MOVHreg x) 17407 for { 17408 x := v.Args[0] 17409 v.reset(OpARM64MOVHreg) 17410 v.AddArg(x) 17411 return true 17412 } 17413 } 17414 func rewriteValueARM64_OpSignExt32to64_0(v *Value) bool { 17415 // match: (SignExt32to64 x) 17416 // cond: 17417 // result: (MOVWreg x) 17418 for { 17419 x := v.Args[0] 17420 v.reset(OpARM64MOVWreg) 17421 v.AddArg(x) 17422 return true 17423 } 17424 } 17425 func rewriteValueARM64_OpSignExt8to16_0(v *Value) bool { 17426 // match: (SignExt8to16 x) 17427 // cond: 17428 // result: (MOVBreg x) 17429 for { 17430 x := v.Args[0] 17431 v.reset(OpARM64MOVBreg) 17432 v.AddArg(x) 17433 return true 17434 } 17435 } 17436 func rewriteValueARM64_OpSignExt8to32_0(v *Value) bool { 17437 // match: (SignExt8to32 x) 17438 // cond: 17439 // result: (MOVBreg x) 17440 for { 17441 x := v.Args[0] 17442 v.reset(OpARM64MOVBreg) 17443 v.AddArg(x) 17444 return true 17445 } 17446 } 17447 func rewriteValueARM64_OpSignExt8to64_0(v *Value) bool { 17448 // match: (SignExt8to64 x) 17449 // cond: 17450 // result: (MOVBreg x) 17451 for { 17452 x := v.Args[0] 17453 v.reset(OpARM64MOVBreg) 17454 v.AddArg(x) 17455 return true 17456 } 17457 } 17458 func rewriteValueARM64_OpSlicemask_0(v *Value) bool { 17459 b := v.Block 17460 _ = b 17461 // match: (Slicemask <t> x) 17462 // cond: 17463 // result: (SRAconst (NEG <t> x) [63]) 17464 for { 17465 t := v.Type 17466 x := v.Args[0] 17467 v.reset(OpARM64SRAconst) 17468 v.AuxInt = 63 17469 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 17470 v0.AddArg(x) 17471 v.AddArg(v0) 17472 return true 17473 } 17474 } 17475 func rewriteValueARM64_OpSqrt_0(v *Value) bool { 17476 // match: (Sqrt x) 17477 // cond: 17478 // result: (FSQRTD x) 17479 for { 17480 x := v.Args[0] 17481 v.reset(OpARM64FSQRTD) 17482 v.AddArg(x) 17483 return true 17484 } 17485 } 17486 func rewriteValueARM64_OpStaticCall_0(v *Value) bool { 17487 // match: (StaticCall [argwid] {target} mem) 17488 // cond: 17489 // result: (CALLstatic [argwid] {target} mem) 17490 for { 17491 argwid := v.AuxInt 17492 target := v.Aux 17493 mem := v.Args[0] 17494 v.reset(OpARM64CALLstatic) 17495 v.AuxInt = argwid 17496 v.Aux = target 17497 v.AddArg(mem) 17498 return true 17499 } 17500 } 17501 func rewriteValueARM64_OpStore_0(v *Value) bool { 17502 // match: (Store {t} ptr val mem) 17503 // cond: t.(*types.Type).Size() == 1 17504 // result: (MOVBstore ptr val mem) 17505 for { 17506 t := v.Aux 17507 _ = v.Args[2] 17508 ptr := v.Args[0] 17509 val := v.Args[1] 17510 mem := v.Args[2] 17511 if !(t.(*types.Type).Size() == 1) { 17512 break 17513 } 17514 v.reset(OpARM64MOVBstore) 17515 v.AddArg(ptr) 17516 v.AddArg(val) 17517 v.AddArg(mem) 17518 return true 17519 } 17520 // match: (Store {t} ptr val mem) 17521 // cond: t.(*types.Type).Size() == 2 17522 // result: (MOVHstore ptr val mem) 17523 for { 17524 t := v.Aux 17525 _ = v.Args[2] 17526 ptr := v.Args[0] 17527 val := v.Args[1] 17528 mem := v.Args[2] 17529 if !(t.(*types.Type).Size() == 2) { 17530 break 17531 } 17532 v.reset(OpARM64MOVHstore) 17533 v.AddArg(ptr) 17534 v.AddArg(val) 17535 v.AddArg(mem) 17536 return true 17537 } 17538 // match: (Store {t} ptr val mem) 17539 // cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type) 17540 // result: (MOVWstore ptr val mem) 17541 for { 17542 t := v.Aux 17543 _ = v.Args[2] 17544 ptr := v.Args[0] 17545 val := v.Args[1] 17546 mem := v.Args[2] 17547 if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) { 17548 break 17549 } 17550 v.reset(OpARM64MOVWstore) 17551 v.AddArg(ptr) 17552 v.AddArg(val) 17553 v.AddArg(mem) 17554 return true 17555 } 17556 // match: (Store {t} ptr val mem) 17557 // cond: t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type) 17558 // result: (MOVDstore ptr val mem) 17559 for { 17560 t := v.Aux 17561 _ = v.Args[2] 17562 ptr := v.Args[0] 17563 val := v.Args[1] 17564 mem := v.Args[2] 17565 if !(t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)) { 17566 break 17567 } 17568 v.reset(OpARM64MOVDstore) 17569 v.AddArg(ptr) 17570 v.AddArg(val) 17571 v.AddArg(mem) 17572 return true 17573 } 17574 // match: (Store {t} ptr val mem) 17575 // cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type) 17576 // result: (FMOVSstore ptr val mem) 17577 for { 17578 t := v.Aux 17579 _ = v.Args[2] 17580 ptr := v.Args[0] 17581 val := v.Args[1] 17582 mem := v.Args[2] 17583 if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) { 17584 break 17585 } 17586 v.reset(OpARM64FMOVSstore) 17587 v.AddArg(ptr) 17588 v.AddArg(val) 17589 v.AddArg(mem) 17590 return true 17591 } 17592 // match: (Store {t} ptr val mem) 17593 // cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type) 17594 // result: (FMOVDstore ptr val mem) 17595 for { 17596 t := v.Aux 17597 _ = v.Args[2] 17598 ptr := v.Args[0] 17599 val := v.Args[1] 17600 mem := v.Args[2] 17601 if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) { 17602 break 17603 } 17604 v.reset(OpARM64FMOVDstore) 17605 v.AddArg(ptr) 17606 v.AddArg(val) 17607 v.AddArg(mem) 17608 return true 17609 } 17610 return false 17611 } 17612 func rewriteValueARM64_OpSub16_0(v *Value) bool { 17613 // match: (Sub16 x y) 17614 // cond: 17615 // result: (SUB x y) 17616 for { 17617 _ = v.Args[1] 17618 x := v.Args[0] 17619 y := v.Args[1] 17620 v.reset(OpARM64SUB) 17621 v.AddArg(x) 17622 v.AddArg(y) 17623 return true 17624 } 17625 } 17626 func rewriteValueARM64_OpSub32_0(v *Value) bool { 17627 // match: (Sub32 x y) 17628 // cond: 17629 // result: (SUB x y) 17630 for { 17631 _ = v.Args[1] 17632 x := v.Args[0] 17633 y := v.Args[1] 17634 v.reset(OpARM64SUB) 17635 v.AddArg(x) 17636 v.AddArg(y) 17637 return true 17638 } 17639 } 17640 func rewriteValueARM64_OpSub32F_0(v *Value) bool { 17641 // match: (Sub32F x y) 17642 // cond: 17643 // result: (FSUBS x y) 17644 for { 17645 _ = v.Args[1] 17646 x := v.Args[0] 17647 y := v.Args[1] 17648 v.reset(OpARM64FSUBS) 17649 v.AddArg(x) 17650 v.AddArg(y) 17651 return true 17652 } 17653 } 17654 func rewriteValueARM64_OpSub64_0(v *Value) bool { 17655 // match: (Sub64 x y) 17656 // cond: 17657 // result: (SUB x y) 17658 for { 17659 _ = v.Args[1] 17660 x := v.Args[0] 17661 y := v.Args[1] 17662 v.reset(OpARM64SUB) 17663 v.AddArg(x) 17664 v.AddArg(y) 17665 return true 17666 } 17667 } 17668 func rewriteValueARM64_OpSub64F_0(v *Value) bool { 17669 // match: (Sub64F x y) 17670 // cond: 17671 // result: (FSUBD x y) 17672 for { 17673 _ = v.Args[1] 17674 x := v.Args[0] 17675 y := v.Args[1] 17676 v.reset(OpARM64FSUBD) 17677 v.AddArg(x) 17678 v.AddArg(y) 17679 return true 17680 } 17681 } 17682 func rewriteValueARM64_OpSub8_0(v *Value) bool { 17683 // match: (Sub8 x y) 17684 // cond: 17685 // result: (SUB x y) 17686 for { 17687 _ = v.Args[1] 17688 x := v.Args[0] 17689 y := v.Args[1] 17690 v.reset(OpARM64SUB) 17691 v.AddArg(x) 17692 v.AddArg(y) 17693 return true 17694 } 17695 } 17696 func rewriteValueARM64_OpSubPtr_0(v *Value) bool { 17697 // match: (SubPtr x y) 17698 // cond: 17699 // result: (SUB x y) 17700 for { 17701 _ = v.Args[1] 17702 x := v.Args[0] 17703 y := v.Args[1] 17704 v.reset(OpARM64SUB) 17705 v.AddArg(x) 17706 v.AddArg(y) 17707 return true 17708 } 17709 } 17710 func rewriteValueARM64_OpTrunc_0(v *Value) bool { 17711 // match: (Trunc x) 17712 // cond: 17713 // result: (FRINTZD x) 17714 for { 17715 x := v.Args[0] 17716 v.reset(OpARM64FRINTZD) 17717 v.AddArg(x) 17718 return true 17719 } 17720 } 17721 func rewriteValueARM64_OpTrunc16to8_0(v *Value) bool { 17722 // match: (Trunc16to8 x) 17723 // cond: 17724 // result: x 17725 for { 17726 x := v.Args[0] 17727 v.reset(OpCopy) 17728 v.Type = x.Type 17729 v.AddArg(x) 17730 return true 17731 } 17732 } 17733 func rewriteValueARM64_OpTrunc32to16_0(v *Value) bool { 17734 // match: (Trunc32to16 x) 17735 // cond: 17736 // result: x 17737 for { 17738 x := v.Args[0] 17739 v.reset(OpCopy) 17740 v.Type = x.Type 17741 v.AddArg(x) 17742 return true 17743 } 17744 } 17745 func rewriteValueARM64_OpTrunc32to8_0(v *Value) bool { 17746 // match: (Trunc32to8 x) 17747 // cond: 17748 // result: x 17749 for { 17750 x := v.Args[0] 17751 v.reset(OpCopy) 17752 v.Type = x.Type 17753 v.AddArg(x) 17754 return true 17755 } 17756 } 17757 func rewriteValueARM64_OpTrunc64to16_0(v *Value) bool { 17758 // match: (Trunc64to16 x) 17759 // cond: 17760 // result: x 17761 for { 17762 x := v.Args[0] 17763 v.reset(OpCopy) 17764 v.Type = x.Type 17765 v.AddArg(x) 17766 return true 17767 } 17768 } 17769 func rewriteValueARM64_OpTrunc64to32_0(v *Value) bool { 17770 // match: (Trunc64to32 x) 17771 // cond: 17772 // result: x 17773 for { 17774 x := v.Args[0] 17775 v.reset(OpCopy) 17776 v.Type = x.Type 17777 v.AddArg(x) 17778 return true 17779 } 17780 } 17781 func rewriteValueARM64_OpTrunc64to8_0(v *Value) bool { 17782 // match: (Trunc64to8 x) 17783 // cond: 17784 // result: x 17785 for { 17786 x := v.Args[0] 17787 v.reset(OpCopy) 17788 v.Type = x.Type 17789 v.AddArg(x) 17790 return true 17791 } 17792 } 17793 func rewriteValueARM64_OpWB_0(v *Value) bool { 17794 // match: (WB {fn} destptr srcptr mem) 17795 // cond: 17796 // result: (LoweredWB {fn} destptr srcptr mem) 17797 for { 17798 fn := v.Aux 17799 _ = v.Args[2] 17800 destptr := v.Args[0] 17801 srcptr := v.Args[1] 17802 mem := v.Args[2] 17803 v.reset(OpARM64LoweredWB) 17804 v.Aux = fn 17805 v.AddArg(destptr) 17806 v.AddArg(srcptr) 17807 v.AddArg(mem) 17808 return true 17809 } 17810 } 17811 func rewriteValueARM64_OpXor16_0(v *Value) bool { 17812 // match: (Xor16 x y) 17813 // cond: 17814 // result: (XOR x y) 17815 for { 17816 _ = v.Args[1] 17817 x := v.Args[0] 17818 y := v.Args[1] 17819 v.reset(OpARM64XOR) 17820 v.AddArg(x) 17821 v.AddArg(y) 17822 return true 17823 } 17824 } 17825 func rewriteValueARM64_OpXor32_0(v *Value) bool { 17826 // match: (Xor32 x y) 17827 // cond: 17828 // result: (XOR x y) 17829 for { 17830 _ = v.Args[1] 17831 x := v.Args[0] 17832 y := v.Args[1] 17833 v.reset(OpARM64XOR) 17834 v.AddArg(x) 17835 v.AddArg(y) 17836 return true 17837 } 17838 } 17839 func rewriteValueARM64_OpXor64_0(v *Value) bool { 17840 // match: (Xor64 x y) 17841 // cond: 17842 // result: (XOR x y) 17843 for { 17844 _ = v.Args[1] 17845 x := v.Args[0] 17846 y := v.Args[1] 17847 v.reset(OpARM64XOR) 17848 v.AddArg(x) 17849 v.AddArg(y) 17850 return true 17851 } 17852 } 17853 func rewriteValueARM64_OpXor8_0(v *Value) bool { 17854 // match: (Xor8 x y) 17855 // cond: 17856 // result: (XOR x y) 17857 for { 17858 _ = v.Args[1] 17859 x := v.Args[0] 17860 y := v.Args[1] 17861 v.reset(OpARM64XOR) 17862 v.AddArg(x) 17863 v.AddArg(y) 17864 return true 17865 } 17866 } 17867 func rewriteValueARM64_OpZero_0(v *Value) bool { 17868 b := v.Block 17869 _ = b 17870 typ := &b.Func.Config.Types 17871 _ = typ 17872 // match: (Zero [0] _ mem) 17873 // cond: 17874 // result: mem 17875 for { 17876 if v.AuxInt != 0 { 17877 break 17878 } 17879 _ = v.Args[1] 17880 mem := v.Args[1] 17881 v.reset(OpCopy) 17882 v.Type = mem.Type 17883 v.AddArg(mem) 17884 return true 17885 } 17886 // match: (Zero [1] ptr mem) 17887 // cond: 17888 // result: (MOVBstore ptr (MOVDconst [0]) mem) 17889 for { 17890 if v.AuxInt != 1 { 17891 break 17892 } 17893 _ = v.Args[1] 17894 ptr := v.Args[0] 17895 mem := v.Args[1] 17896 v.reset(OpARM64MOVBstore) 17897 v.AddArg(ptr) 17898 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 17899 v0.AuxInt = 0 17900 v.AddArg(v0) 17901 v.AddArg(mem) 17902 return true 17903 } 17904 // match: (Zero [2] ptr mem) 17905 // cond: 17906 // result: (MOVHstore ptr (MOVDconst [0]) mem) 17907 for { 17908 if v.AuxInt != 2 { 17909 break 17910 } 17911 _ = v.Args[1] 17912 ptr := v.Args[0] 17913 mem := v.Args[1] 17914 v.reset(OpARM64MOVHstore) 17915 v.AddArg(ptr) 17916 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 17917 v0.AuxInt = 0 17918 v.AddArg(v0) 17919 v.AddArg(mem) 17920 return true 17921 } 17922 // match: (Zero [4] ptr mem) 17923 // cond: 17924 // result: (MOVWstore ptr (MOVDconst [0]) mem) 17925 for { 17926 if v.AuxInt != 4 { 17927 break 17928 } 17929 _ = v.Args[1] 17930 ptr := v.Args[0] 17931 mem := v.Args[1] 17932 v.reset(OpARM64MOVWstore) 17933 v.AddArg(ptr) 17934 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 17935 v0.AuxInt = 0 17936 v.AddArg(v0) 17937 v.AddArg(mem) 17938 return true 17939 } 17940 // match: (Zero [8] ptr mem) 17941 // cond: 17942 // result: (MOVDstore ptr (MOVDconst [0]) mem) 17943 for { 17944 if v.AuxInt != 8 { 17945 break 17946 } 17947 _ = v.Args[1] 17948 ptr := v.Args[0] 17949 mem := v.Args[1] 17950 v.reset(OpARM64MOVDstore) 17951 v.AddArg(ptr) 17952 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 17953 v0.AuxInt = 0 17954 v.AddArg(v0) 17955 v.AddArg(mem) 17956 return true 17957 } 17958 // match: (Zero [3] ptr mem) 17959 // cond: 17960 // result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)) 17961 for { 17962 if v.AuxInt != 3 { 17963 break 17964 } 17965 _ = v.Args[1] 17966 ptr := v.Args[0] 17967 mem := v.Args[1] 17968 v.reset(OpARM64MOVBstore) 17969 v.AuxInt = 2 17970 v.AddArg(ptr) 17971 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 17972 v0.AuxInt = 0 17973 v.AddArg(v0) 17974 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 17975 v1.AddArg(ptr) 17976 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 17977 v2.AuxInt = 0 17978 v1.AddArg(v2) 17979 v1.AddArg(mem) 17980 v.AddArg(v1) 17981 return true 17982 } 17983 // match: (Zero [5] ptr mem) 17984 // cond: 17985 // result: (MOVBstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 17986 for { 17987 if v.AuxInt != 5 { 17988 break 17989 } 17990 _ = v.Args[1] 17991 ptr := v.Args[0] 17992 mem := v.Args[1] 17993 v.reset(OpARM64MOVBstore) 17994 v.AuxInt = 4 17995 v.AddArg(ptr) 17996 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 17997 v0.AuxInt = 0 17998 v.AddArg(v0) 17999 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 18000 v1.AddArg(ptr) 18001 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18002 v2.AuxInt = 0 18003 v1.AddArg(v2) 18004 v1.AddArg(mem) 18005 v.AddArg(v1) 18006 return true 18007 } 18008 // match: (Zero [6] ptr mem) 18009 // cond: 18010 // result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 18011 for { 18012 if v.AuxInt != 6 { 18013 break 18014 } 18015 _ = v.Args[1] 18016 ptr := v.Args[0] 18017 mem := v.Args[1] 18018 v.reset(OpARM64MOVHstore) 18019 v.AuxInt = 4 18020 v.AddArg(ptr) 18021 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18022 v0.AuxInt = 0 18023 v.AddArg(v0) 18024 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 18025 v1.AddArg(ptr) 18026 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18027 v2.AuxInt = 0 18028 v1.AddArg(v2) 18029 v1.AddArg(mem) 18030 v.AddArg(v1) 18031 return true 18032 } 18033 // match: (Zero [7] ptr mem) 18034 // cond: 18035 // result: (MOVBstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))) 18036 for { 18037 if v.AuxInt != 7 { 18038 break 18039 } 18040 _ = v.Args[1] 18041 ptr := v.Args[0] 18042 mem := v.Args[1] 18043 v.reset(OpARM64MOVBstore) 18044 v.AuxInt = 6 18045 v.AddArg(ptr) 18046 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18047 v0.AuxInt = 0 18048 v.AddArg(v0) 18049 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 18050 v1.AuxInt = 4 18051 v1.AddArg(ptr) 18052 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18053 v2.AuxInt = 0 18054 v1.AddArg(v2) 18055 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 18056 v3.AddArg(ptr) 18057 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18058 v4.AuxInt = 0 18059 v3.AddArg(v4) 18060 v3.AddArg(mem) 18061 v1.AddArg(v3) 18062 v.AddArg(v1) 18063 return true 18064 } 18065 // match: (Zero [9] ptr mem) 18066 // cond: 18067 // result: (MOVBstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 18068 for { 18069 if v.AuxInt != 9 { 18070 break 18071 } 18072 _ = v.Args[1] 18073 ptr := v.Args[0] 18074 mem := v.Args[1] 18075 v.reset(OpARM64MOVBstore) 18076 v.AuxInt = 8 18077 v.AddArg(ptr) 18078 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18079 v0.AuxInt = 0 18080 v.AddArg(v0) 18081 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 18082 v1.AddArg(ptr) 18083 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18084 v2.AuxInt = 0 18085 v1.AddArg(v2) 18086 v1.AddArg(mem) 18087 v.AddArg(v1) 18088 return true 18089 } 18090 return false 18091 } 18092 func rewriteValueARM64_OpZero_10(v *Value) bool { 18093 b := v.Block 18094 _ = b 18095 typ := &b.Func.Config.Types 18096 _ = typ 18097 // match: (Zero [10] ptr mem) 18098 // cond: 18099 // result: (MOVHstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 18100 for { 18101 if v.AuxInt != 10 { 18102 break 18103 } 18104 _ = v.Args[1] 18105 ptr := v.Args[0] 18106 mem := v.Args[1] 18107 v.reset(OpARM64MOVHstore) 18108 v.AuxInt = 8 18109 v.AddArg(ptr) 18110 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18111 v0.AuxInt = 0 18112 v.AddArg(v0) 18113 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 18114 v1.AddArg(ptr) 18115 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18116 v2.AuxInt = 0 18117 v1.AddArg(v2) 18118 v1.AddArg(mem) 18119 v.AddArg(v1) 18120 return true 18121 } 18122 // match: (Zero [11] ptr mem) 18123 // cond: 18124 // result: (MOVBstore [10] ptr (MOVDconst [0]) (MOVHstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 18125 for { 18126 if v.AuxInt != 11 { 18127 break 18128 } 18129 _ = v.Args[1] 18130 ptr := v.Args[0] 18131 mem := v.Args[1] 18132 v.reset(OpARM64MOVBstore) 18133 v.AuxInt = 10 18134 v.AddArg(ptr) 18135 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18136 v0.AuxInt = 0 18137 v.AddArg(v0) 18138 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 18139 v1.AuxInt = 8 18140 v1.AddArg(ptr) 18141 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18142 v2.AuxInt = 0 18143 v1.AddArg(v2) 18144 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 18145 v3.AddArg(ptr) 18146 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18147 v4.AuxInt = 0 18148 v3.AddArg(v4) 18149 v3.AddArg(mem) 18150 v1.AddArg(v3) 18151 v.AddArg(v1) 18152 return true 18153 } 18154 // match: (Zero [12] ptr mem) 18155 // cond: 18156 // result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 18157 for { 18158 if v.AuxInt != 12 { 18159 break 18160 } 18161 _ = v.Args[1] 18162 ptr := v.Args[0] 18163 mem := v.Args[1] 18164 v.reset(OpARM64MOVWstore) 18165 v.AuxInt = 8 18166 v.AddArg(ptr) 18167 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18168 v0.AuxInt = 0 18169 v.AddArg(v0) 18170 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 18171 v1.AddArg(ptr) 18172 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18173 v2.AuxInt = 0 18174 v1.AddArg(v2) 18175 v1.AddArg(mem) 18176 v.AddArg(v1) 18177 return true 18178 } 18179 // match: (Zero [13] ptr mem) 18180 // cond: 18181 // result: (MOVBstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 18182 for { 18183 if v.AuxInt != 13 { 18184 break 18185 } 18186 _ = v.Args[1] 18187 ptr := v.Args[0] 18188 mem := v.Args[1] 18189 v.reset(OpARM64MOVBstore) 18190 v.AuxInt = 12 18191 v.AddArg(ptr) 18192 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18193 v0.AuxInt = 0 18194 v.AddArg(v0) 18195 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 18196 v1.AuxInt = 8 18197 v1.AddArg(ptr) 18198 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18199 v2.AuxInt = 0 18200 v1.AddArg(v2) 18201 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 18202 v3.AddArg(ptr) 18203 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18204 v4.AuxInt = 0 18205 v3.AddArg(v4) 18206 v3.AddArg(mem) 18207 v1.AddArg(v3) 18208 v.AddArg(v1) 18209 return true 18210 } 18211 // match: (Zero [14] ptr mem) 18212 // cond: 18213 // result: (MOVHstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 18214 for { 18215 if v.AuxInt != 14 { 18216 break 18217 } 18218 _ = v.Args[1] 18219 ptr := v.Args[0] 18220 mem := v.Args[1] 18221 v.reset(OpARM64MOVHstore) 18222 v.AuxInt = 12 18223 v.AddArg(ptr) 18224 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18225 v0.AuxInt = 0 18226 v.AddArg(v0) 18227 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 18228 v1.AuxInt = 8 18229 v1.AddArg(ptr) 18230 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18231 v2.AuxInt = 0 18232 v1.AddArg(v2) 18233 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 18234 v3.AddArg(ptr) 18235 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18236 v4.AuxInt = 0 18237 v3.AddArg(v4) 18238 v3.AddArg(mem) 18239 v1.AddArg(v3) 18240 v.AddArg(v1) 18241 return true 18242 } 18243 // match: (Zero [15] ptr mem) 18244 // cond: 18245 // result: (MOVBstore [14] ptr (MOVDconst [0]) (MOVHstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)))) 18246 for { 18247 if v.AuxInt != 15 { 18248 break 18249 } 18250 _ = v.Args[1] 18251 ptr := v.Args[0] 18252 mem := v.Args[1] 18253 v.reset(OpARM64MOVBstore) 18254 v.AuxInt = 14 18255 v.AddArg(ptr) 18256 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18257 v0.AuxInt = 0 18258 v.AddArg(v0) 18259 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 18260 v1.AuxInt = 12 18261 v1.AddArg(ptr) 18262 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18263 v2.AuxInt = 0 18264 v1.AddArg(v2) 18265 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 18266 v3.AuxInt = 8 18267 v3.AddArg(ptr) 18268 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18269 v4.AuxInt = 0 18270 v3.AddArg(v4) 18271 v5 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 18272 v5.AddArg(ptr) 18273 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18274 v6.AuxInt = 0 18275 v5.AddArg(v6) 18276 v5.AddArg(mem) 18277 v3.AddArg(v5) 18278 v1.AddArg(v3) 18279 v.AddArg(v1) 18280 return true 18281 } 18282 // match: (Zero [16] ptr mem) 18283 // cond: 18284 // result: (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem) 18285 for { 18286 if v.AuxInt != 16 { 18287 break 18288 } 18289 _ = v.Args[1] 18290 ptr := v.Args[0] 18291 mem := v.Args[1] 18292 v.reset(OpARM64STP) 18293 v.AuxInt = 0 18294 v.AddArg(ptr) 18295 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18296 v0.AuxInt = 0 18297 v.AddArg(v0) 18298 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18299 v1.AuxInt = 0 18300 v.AddArg(v1) 18301 v.AddArg(mem) 18302 return true 18303 } 18304 // match: (Zero [32] ptr mem) 18305 // cond: 18306 // result: (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)) 18307 for { 18308 if v.AuxInt != 32 { 18309 break 18310 } 18311 _ = v.Args[1] 18312 ptr := v.Args[0] 18313 mem := v.Args[1] 18314 v.reset(OpARM64STP) 18315 v.AuxInt = 16 18316 v.AddArg(ptr) 18317 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18318 v0.AuxInt = 0 18319 v.AddArg(v0) 18320 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18321 v1.AuxInt = 0 18322 v.AddArg(v1) 18323 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 18324 v2.AuxInt = 0 18325 v2.AddArg(ptr) 18326 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18327 v3.AuxInt = 0 18328 v2.AddArg(v3) 18329 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18330 v4.AuxInt = 0 18331 v2.AddArg(v4) 18332 v2.AddArg(mem) 18333 v.AddArg(v2) 18334 return true 18335 } 18336 // match: (Zero [48] ptr mem) 18337 // cond: 18338 // result: (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem))) 18339 for { 18340 if v.AuxInt != 48 { 18341 break 18342 } 18343 _ = v.Args[1] 18344 ptr := v.Args[0] 18345 mem := v.Args[1] 18346 v.reset(OpARM64STP) 18347 v.AuxInt = 32 18348 v.AddArg(ptr) 18349 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18350 v0.AuxInt = 0 18351 v.AddArg(v0) 18352 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18353 v1.AuxInt = 0 18354 v.AddArg(v1) 18355 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 18356 v2.AuxInt = 16 18357 v2.AddArg(ptr) 18358 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18359 v3.AuxInt = 0 18360 v2.AddArg(v3) 18361 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18362 v4.AuxInt = 0 18363 v2.AddArg(v4) 18364 v5 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 18365 v5.AuxInt = 0 18366 v5.AddArg(ptr) 18367 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18368 v6.AuxInt = 0 18369 v5.AddArg(v6) 18370 v7 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18371 v7.AuxInt = 0 18372 v5.AddArg(v7) 18373 v5.AddArg(mem) 18374 v2.AddArg(v5) 18375 v.AddArg(v2) 18376 return true 18377 } 18378 // match: (Zero [64] ptr mem) 18379 // cond: 18380 // result: (STP [48] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)))) 18381 for { 18382 if v.AuxInt != 64 { 18383 break 18384 } 18385 _ = v.Args[1] 18386 ptr := v.Args[0] 18387 mem := v.Args[1] 18388 v.reset(OpARM64STP) 18389 v.AuxInt = 48 18390 v.AddArg(ptr) 18391 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18392 v0.AuxInt = 0 18393 v.AddArg(v0) 18394 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18395 v1.AuxInt = 0 18396 v.AddArg(v1) 18397 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 18398 v2.AuxInt = 32 18399 v2.AddArg(ptr) 18400 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18401 v3.AuxInt = 0 18402 v2.AddArg(v3) 18403 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18404 v4.AuxInt = 0 18405 v2.AddArg(v4) 18406 v5 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 18407 v5.AuxInt = 16 18408 v5.AddArg(ptr) 18409 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18410 v6.AuxInt = 0 18411 v5.AddArg(v6) 18412 v7 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18413 v7.AuxInt = 0 18414 v5.AddArg(v7) 18415 v8 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 18416 v8.AuxInt = 0 18417 v8.AddArg(ptr) 18418 v9 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18419 v9.AuxInt = 0 18420 v8.AddArg(v9) 18421 v10 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 18422 v10.AuxInt = 0 18423 v8.AddArg(v10) 18424 v8.AddArg(mem) 18425 v5.AddArg(v8) 18426 v2.AddArg(v5) 18427 v.AddArg(v2) 18428 return true 18429 } 18430 return false 18431 } 18432 func rewriteValueARM64_OpZero_20(v *Value) bool { 18433 b := v.Block 18434 _ = b 18435 config := b.Func.Config 18436 _ = config 18437 // match: (Zero [s] ptr mem) 18438 // cond: s%16 != 0 && s > 16 18439 // result: (Zero [s-s%16] (OffPtr <ptr.Type> ptr [s%16]) (Zero [s%16] ptr mem)) 18440 for { 18441 s := v.AuxInt 18442 _ = v.Args[1] 18443 ptr := v.Args[0] 18444 mem := v.Args[1] 18445 if !(s%16 != 0 && s > 16) { 18446 break 18447 } 18448 v.reset(OpZero) 18449 v.AuxInt = s - s%16 18450 v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type) 18451 v0.AuxInt = s % 16 18452 v0.AddArg(ptr) 18453 v.AddArg(v0) 18454 v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem) 18455 v1.AuxInt = s % 16 18456 v1.AddArg(ptr) 18457 v1.AddArg(mem) 18458 v.AddArg(v1) 18459 return true 18460 } 18461 // match: (Zero [s] ptr mem) 18462 // cond: s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice 18463 // result: (DUFFZERO [4 * (64 - int64(s/16))] ptr mem) 18464 for { 18465 s := v.AuxInt 18466 _ = v.Args[1] 18467 ptr := v.Args[0] 18468 mem := v.Args[1] 18469 if !(s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice) { 18470 break 18471 } 18472 v.reset(OpARM64DUFFZERO) 18473 v.AuxInt = 4 * (64 - int64(s/16)) 18474 v.AddArg(ptr) 18475 v.AddArg(mem) 18476 return true 18477 } 18478 // match: (Zero [s] ptr mem) 18479 // cond: s%16 == 0 && (s > 16*64 || config.noDuffDevice) 18480 // result: (LoweredZero ptr (ADDconst <ptr.Type> [s-16] ptr) mem) 18481 for { 18482 s := v.AuxInt 18483 _ = v.Args[1] 18484 ptr := v.Args[0] 18485 mem := v.Args[1] 18486 if !(s%16 == 0 && (s > 16*64 || config.noDuffDevice)) { 18487 break 18488 } 18489 v.reset(OpARM64LoweredZero) 18490 v.AddArg(ptr) 18491 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, ptr.Type) 18492 v0.AuxInt = s - 16 18493 v0.AddArg(ptr) 18494 v.AddArg(v0) 18495 v.AddArg(mem) 18496 return true 18497 } 18498 return false 18499 } 18500 func rewriteValueARM64_OpZeroExt16to32_0(v *Value) bool { 18501 // match: (ZeroExt16to32 x) 18502 // cond: 18503 // result: (MOVHUreg x) 18504 for { 18505 x := v.Args[0] 18506 v.reset(OpARM64MOVHUreg) 18507 v.AddArg(x) 18508 return true 18509 } 18510 } 18511 func rewriteValueARM64_OpZeroExt16to64_0(v *Value) bool { 18512 // match: (ZeroExt16to64 x) 18513 // cond: 18514 // result: (MOVHUreg x) 18515 for { 18516 x := v.Args[0] 18517 v.reset(OpARM64MOVHUreg) 18518 v.AddArg(x) 18519 return true 18520 } 18521 } 18522 func rewriteValueARM64_OpZeroExt32to64_0(v *Value) bool { 18523 // match: (ZeroExt32to64 x) 18524 // cond: 18525 // result: (MOVWUreg x) 18526 for { 18527 x := v.Args[0] 18528 v.reset(OpARM64MOVWUreg) 18529 v.AddArg(x) 18530 return true 18531 } 18532 } 18533 func rewriteValueARM64_OpZeroExt8to16_0(v *Value) bool { 18534 // match: (ZeroExt8to16 x) 18535 // cond: 18536 // result: (MOVBUreg x) 18537 for { 18538 x := v.Args[0] 18539 v.reset(OpARM64MOVBUreg) 18540 v.AddArg(x) 18541 return true 18542 } 18543 } 18544 func rewriteValueARM64_OpZeroExt8to32_0(v *Value) bool { 18545 // match: (ZeroExt8to32 x) 18546 // cond: 18547 // result: (MOVBUreg x) 18548 for { 18549 x := v.Args[0] 18550 v.reset(OpARM64MOVBUreg) 18551 v.AddArg(x) 18552 return true 18553 } 18554 } 18555 func rewriteValueARM64_OpZeroExt8to64_0(v *Value) bool { 18556 // match: (ZeroExt8to64 x) 18557 // cond: 18558 // result: (MOVBUreg x) 18559 for { 18560 x := v.Args[0] 18561 v.reset(OpARM64MOVBUreg) 18562 v.AddArg(x) 18563 return true 18564 } 18565 } 18566 func rewriteBlockARM64(b *Block) bool { 18567 config := b.Func.Config 18568 _ = config 18569 fe := b.Func.fe 18570 _ = fe 18571 typ := &config.Types 18572 _ = typ 18573 switch b.Kind { 18574 case BlockARM64EQ: 18575 // match: (EQ (CMPconst [0] x) yes no) 18576 // cond: 18577 // result: (Z x yes no) 18578 for { 18579 v := b.Control 18580 if v.Op != OpARM64CMPconst { 18581 break 18582 } 18583 if v.AuxInt != 0 { 18584 break 18585 } 18586 x := v.Args[0] 18587 b.Kind = BlockARM64Z 18588 b.SetControl(x) 18589 b.Aux = nil 18590 return true 18591 } 18592 // match: (EQ (CMPWconst [0] x) yes no) 18593 // cond: 18594 // result: (ZW x yes no) 18595 for { 18596 v := b.Control 18597 if v.Op != OpARM64CMPWconst { 18598 break 18599 } 18600 if v.AuxInt != 0 { 18601 break 18602 } 18603 x := v.Args[0] 18604 b.Kind = BlockARM64ZW 18605 b.SetControl(x) 18606 b.Aux = nil 18607 return true 18608 } 18609 // match: (EQ (FlagEQ) yes no) 18610 // cond: 18611 // result: (First nil yes no) 18612 for { 18613 v := b.Control 18614 if v.Op != OpARM64FlagEQ { 18615 break 18616 } 18617 b.Kind = BlockFirst 18618 b.SetControl(nil) 18619 b.Aux = nil 18620 return true 18621 } 18622 // match: (EQ (FlagLT_ULT) yes no) 18623 // cond: 18624 // result: (First nil no yes) 18625 for { 18626 v := b.Control 18627 if v.Op != OpARM64FlagLT_ULT { 18628 break 18629 } 18630 b.Kind = BlockFirst 18631 b.SetControl(nil) 18632 b.Aux = nil 18633 b.swapSuccessors() 18634 return true 18635 } 18636 // match: (EQ (FlagLT_UGT) yes no) 18637 // cond: 18638 // result: (First nil no yes) 18639 for { 18640 v := b.Control 18641 if v.Op != OpARM64FlagLT_UGT { 18642 break 18643 } 18644 b.Kind = BlockFirst 18645 b.SetControl(nil) 18646 b.Aux = nil 18647 b.swapSuccessors() 18648 return true 18649 } 18650 // match: (EQ (FlagGT_ULT) yes no) 18651 // cond: 18652 // result: (First nil no yes) 18653 for { 18654 v := b.Control 18655 if v.Op != OpARM64FlagGT_ULT { 18656 break 18657 } 18658 b.Kind = BlockFirst 18659 b.SetControl(nil) 18660 b.Aux = nil 18661 b.swapSuccessors() 18662 return true 18663 } 18664 // match: (EQ (FlagGT_UGT) yes no) 18665 // cond: 18666 // result: (First nil no yes) 18667 for { 18668 v := b.Control 18669 if v.Op != OpARM64FlagGT_UGT { 18670 break 18671 } 18672 b.Kind = BlockFirst 18673 b.SetControl(nil) 18674 b.Aux = nil 18675 b.swapSuccessors() 18676 return true 18677 } 18678 // match: (EQ (InvertFlags cmp) yes no) 18679 // cond: 18680 // result: (EQ cmp yes no) 18681 for { 18682 v := b.Control 18683 if v.Op != OpARM64InvertFlags { 18684 break 18685 } 18686 cmp := v.Args[0] 18687 b.Kind = BlockARM64EQ 18688 b.SetControl(cmp) 18689 b.Aux = nil 18690 return true 18691 } 18692 case BlockARM64GE: 18693 // match: (GE (CMPWconst [0] x) yes no) 18694 // cond: 18695 // result: (TBZ {int64(31)} x yes no) 18696 for { 18697 v := b.Control 18698 if v.Op != OpARM64CMPWconst { 18699 break 18700 } 18701 if v.AuxInt != 0 { 18702 break 18703 } 18704 x := v.Args[0] 18705 b.Kind = BlockARM64TBZ 18706 b.SetControl(x) 18707 b.Aux = int64(31) 18708 return true 18709 } 18710 // match: (GE (CMPconst [0] x) yes no) 18711 // cond: 18712 // result: (TBZ {int64(63)} x yes no) 18713 for { 18714 v := b.Control 18715 if v.Op != OpARM64CMPconst { 18716 break 18717 } 18718 if v.AuxInt != 0 { 18719 break 18720 } 18721 x := v.Args[0] 18722 b.Kind = BlockARM64TBZ 18723 b.SetControl(x) 18724 b.Aux = int64(63) 18725 return true 18726 } 18727 // match: (GE (FlagEQ) yes no) 18728 // cond: 18729 // result: (First nil yes no) 18730 for { 18731 v := b.Control 18732 if v.Op != OpARM64FlagEQ { 18733 break 18734 } 18735 b.Kind = BlockFirst 18736 b.SetControl(nil) 18737 b.Aux = nil 18738 return true 18739 } 18740 // match: (GE (FlagLT_ULT) yes no) 18741 // cond: 18742 // result: (First nil no yes) 18743 for { 18744 v := b.Control 18745 if v.Op != OpARM64FlagLT_ULT { 18746 break 18747 } 18748 b.Kind = BlockFirst 18749 b.SetControl(nil) 18750 b.Aux = nil 18751 b.swapSuccessors() 18752 return true 18753 } 18754 // match: (GE (FlagLT_UGT) yes no) 18755 // cond: 18756 // result: (First nil no yes) 18757 for { 18758 v := b.Control 18759 if v.Op != OpARM64FlagLT_UGT { 18760 break 18761 } 18762 b.Kind = BlockFirst 18763 b.SetControl(nil) 18764 b.Aux = nil 18765 b.swapSuccessors() 18766 return true 18767 } 18768 // match: (GE (FlagGT_ULT) yes no) 18769 // cond: 18770 // result: (First nil yes no) 18771 for { 18772 v := b.Control 18773 if v.Op != OpARM64FlagGT_ULT { 18774 break 18775 } 18776 b.Kind = BlockFirst 18777 b.SetControl(nil) 18778 b.Aux = nil 18779 return true 18780 } 18781 // match: (GE (FlagGT_UGT) yes no) 18782 // cond: 18783 // result: (First nil yes no) 18784 for { 18785 v := b.Control 18786 if v.Op != OpARM64FlagGT_UGT { 18787 break 18788 } 18789 b.Kind = BlockFirst 18790 b.SetControl(nil) 18791 b.Aux = nil 18792 return true 18793 } 18794 // match: (GE (InvertFlags cmp) yes no) 18795 // cond: 18796 // result: (LE cmp yes no) 18797 for { 18798 v := b.Control 18799 if v.Op != OpARM64InvertFlags { 18800 break 18801 } 18802 cmp := v.Args[0] 18803 b.Kind = BlockARM64LE 18804 b.SetControl(cmp) 18805 b.Aux = nil 18806 return true 18807 } 18808 case BlockARM64GT: 18809 // match: (GT (FlagEQ) yes no) 18810 // cond: 18811 // result: (First nil no yes) 18812 for { 18813 v := b.Control 18814 if v.Op != OpARM64FlagEQ { 18815 break 18816 } 18817 b.Kind = BlockFirst 18818 b.SetControl(nil) 18819 b.Aux = nil 18820 b.swapSuccessors() 18821 return true 18822 } 18823 // match: (GT (FlagLT_ULT) yes no) 18824 // cond: 18825 // result: (First nil no yes) 18826 for { 18827 v := b.Control 18828 if v.Op != OpARM64FlagLT_ULT { 18829 break 18830 } 18831 b.Kind = BlockFirst 18832 b.SetControl(nil) 18833 b.Aux = nil 18834 b.swapSuccessors() 18835 return true 18836 } 18837 // match: (GT (FlagLT_UGT) yes no) 18838 // cond: 18839 // result: (First nil no yes) 18840 for { 18841 v := b.Control 18842 if v.Op != OpARM64FlagLT_UGT { 18843 break 18844 } 18845 b.Kind = BlockFirst 18846 b.SetControl(nil) 18847 b.Aux = nil 18848 b.swapSuccessors() 18849 return true 18850 } 18851 // match: (GT (FlagGT_ULT) yes no) 18852 // cond: 18853 // result: (First nil yes no) 18854 for { 18855 v := b.Control 18856 if v.Op != OpARM64FlagGT_ULT { 18857 break 18858 } 18859 b.Kind = BlockFirst 18860 b.SetControl(nil) 18861 b.Aux = nil 18862 return true 18863 } 18864 // match: (GT (FlagGT_UGT) yes no) 18865 // cond: 18866 // result: (First nil yes no) 18867 for { 18868 v := b.Control 18869 if v.Op != OpARM64FlagGT_UGT { 18870 break 18871 } 18872 b.Kind = BlockFirst 18873 b.SetControl(nil) 18874 b.Aux = nil 18875 return true 18876 } 18877 // match: (GT (InvertFlags cmp) yes no) 18878 // cond: 18879 // result: (LT cmp yes no) 18880 for { 18881 v := b.Control 18882 if v.Op != OpARM64InvertFlags { 18883 break 18884 } 18885 cmp := v.Args[0] 18886 b.Kind = BlockARM64LT 18887 b.SetControl(cmp) 18888 b.Aux = nil 18889 return true 18890 } 18891 case BlockIf: 18892 // match: (If (Equal cc) yes no) 18893 // cond: 18894 // result: (EQ cc yes no) 18895 for { 18896 v := b.Control 18897 if v.Op != OpARM64Equal { 18898 break 18899 } 18900 cc := v.Args[0] 18901 b.Kind = BlockARM64EQ 18902 b.SetControl(cc) 18903 b.Aux = nil 18904 return true 18905 } 18906 // match: (If (NotEqual cc) yes no) 18907 // cond: 18908 // result: (NE cc yes no) 18909 for { 18910 v := b.Control 18911 if v.Op != OpARM64NotEqual { 18912 break 18913 } 18914 cc := v.Args[0] 18915 b.Kind = BlockARM64NE 18916 b.SetControl(cc) 18917 b.Aux = nil 18918 return true 18919 } 18920 // match: (If (LessThan cc) yes no) 18921 // cond: 18922 // result: (LT cc yes no) 18923 for { 18924 v := b.Control 18925 if v.Op != OpARM64LessThan { 18926 break 18927 } 18928 cc := v.Args[0] 18929 b.Kind = BlockARM64LT 18930 b.SetControl(cc) 18931 b.Aux = nil 18932 return true 18933 } 18934 // match: (If (LessThanU cc) yes no) 18935 // cond: 18936 // result: (ULT cc yes no) 18937 for { 18938 v := b.Control 18939 if v.Op != OpARM64LessThanU { 18940 break 18941 } 18942 cc := v.Args[0] 18943 b.Kind = BlockARM64ULT 18944 b.SetControl(cc) 18945 b.Aux = nil 18946 return true 18947 } 18948 // match: (If (LessEqual cc) yes no) 18949 // cond: 18950 // result: (LE cc yes no) 18951 for { 18952 v := b.Control 18953 if v.Op != OpARM64LessEqual { 18954 break 18955 } 18956 cc := v.Args[0] 18957 b.Kind = BlockARM64LE 18958 b.SetControl(cc) 18959 b.Aux = nil 18960 return true 18961 } 18962 // match: (If (LessEqualU cc) yes no) 18963 // cond: 18964 // result: (ULE cc yes no) 18965 for { 18966 v := b.Control 18967 if v.Op != OpARM64LessEqualU { 18968 break 18969 } 18970 cc := v.Args[0] 18971 b.Kind = BlockARM64ULE 18972 b.SetControl(cc) 18973 b.Aux = nil 18974 return true 18975 } 18976 // match: (If (GreaterThan cc) yes no) 18977 // cond: 18978 // result: (GT cc yes no) 18979 for { 18980 v := b.Control 18981 if v.Op != OpARM64GreaterThan { 18982 break 18983 } 18984 cc := v.Args[0] 18985 b.Kind = BlockARM64GT 18986 b.SetControl(cc) 18987 b.Aux = nil 18988 return true 18989 } 18990 // match: (If (GreaterThanU cc) yes no) 18991 // cond: 18992 // result: (UGT cc yes no) 18993 for { 18994 v := b.Control 18995 if v.Op != OpARM64GreaterThanU { 18996 break 18997 } 18998 cc := v.Args[0] 18999 b.Kind = BlockARM64UGT 19000 b.SetControl(cc) 19001 b.Aux = nil 19002 return true 19003 } 19004 // match: (If (GreaterEqual cc) yes no) 19005 // cond: 19006 // result: (GE cc yes no) 19007 for { 19008 v := b.Control 19009 if v.Op != OpARM64GreaterEqual { 19010 break 19011 } 19012 cc := v.Args[0] 19013 b.Kind = BlockARM64GE 19014 b.SetControl(cc) 19015 b.Aux = nil 19016 return true 19017 } 19018 // match: (If (GreaterEqualU cc) yes no) 19019 // cond: 19020 // result: (UGE cc yes no) 19021 for { 19022 v := b.Control 19023 if v.Op != OpARM64GreaterEqualU { 19024 break 19025 } 19026 cc := v.Args[0] 19027 b.Kind = BlockARM64UGE 19028 b.SetControl(cc) 19029 b.Aux = nil 19030 return true 19031 } 19032 // match: (If cond yes no) 19033 // cond: 19034 // result: (NZ cond yes no) 19035 for { 19036 v := b.Control 19037 _ = v 19038 cond := b.Control 19039 b.Kind = BlockARM64NZ 19040 b.SetControl(cond) 19041 b.Aux = nil 19042 return true 19043 } 19044 case BlockARM64LE: 19045 // match: (LE (FlagEQ) yes no) 19046 // cond: 19047 // result: (First nil yes no) 19048 for { 19049 v := b.Control 19050 if v.Op != OpARM64FlagEQ { 19051 break 19052 } 19053 b.Kind = BlockFirst 19054 b.SetControl(nil) 19055 b.Aux = nil 19056 return true 19057 } 19058 // match: (LE (FlagLT_ULT) yes no) 19059 // cond: 19060 // result: (First nil yes no) 19061 for { 19062 v := b.Control 19063 if v.Op != OpARM64FlagLT_ULT { 19064 break 19065 } 19066 b.Kind = BlockFirst 19067 b.SetControl(nil) 19068 b.Aux = nil 19069 return true 19070 } 19071 // match: (LE (FlagLT_UGT) yes no) 19072 // cond: 19073 // result: (First nil yes no) 19074 for { 19075 v := b.Control 19076 if v.Op != OpARM64FlagLT_UGT { 19077 break 19078 } 19079 b.Kind = BlockFirst 19080 b.SetControl(nil) 19081 b.Aux = nil 19082 return true 19083 } 19084 // match: (LE (FlagGT_ULT) yes no) 19085 // cond: 19086 // result: (First nil no yes) 19087 for { 19088 v := b.Control 19089 if v.Op != OpARM64FlagGT_ULT { 19090 break 19091 } 19092 b.Kind = BlockFirst 19093 b.SetControl(nil) 19094 b.Aux = nil 19095 b.swapSuccessors() 19096 return true 19097 } 19098 // match: (LE (FlagGT_UGT) yes no) 19099 // cond: 19100 // result: (First nil no yes) 19101 for { 19102 v := b.Control 19103 if v.Op != OpARM64FlagGT_UGT { 19104 break 19105 } 19106 b.Kind = BlockFirst 19107 b.SetControl(nil) 19108 b.Aux = nil 19109 b.swapSuccessors() 19110 return true 19111 } 19112 // match: (LE (InvertFlags cmp) yes no) 19113 // cond: 19114 // result: (GE cmp yes no) 19115 for { 19116 v := b.Control 19117 if v.Op != OpARM64InvertFlags { 19118 break 19119 } 19120 cmp := v.Args[0] 19121 b.Kind = BlockARM64GE 19122 b.SetControl(cmp) 19123 b.Aux = nil 19124 return true 19125 } 19126 case BlockARM64LT: 19127 // match: (LT (CMPWconst [0] x) yes no) 19128 // cond: 19129 // result: (TBNZ {int64(31)} x yes no) 19130 for { 19131 v := b.Control 19132 if v.Op != OpARM64CMPWconst { 19133 break 19134 } 19135 if v.AuxInt != 0 { 19136 break 19137 } 19138 x := v.Args[0] 19139 b.Kind = BlockARM64TBNZ 19140 b.SetControl(x) 19141 b.Aux = int64(31) 19142 return true 19143 } 19144 // match: (LT (CMPconst [0] x) yes no) 19145 // cond: 19146 // result: (TBNZ {int64(63)} x yes no) 19147 for { 19148 v := b.Control 19149 if v.Op != OpARM64CMPconst { 19150 break 19151 } 19152 if v.AuxInt != 0 { 19153 break 19154 } 19155 x := v.Args[0] 19156 b.Kind = BlockARM64TBNZ 19157 b.SetControl(x) 19158 b.Aux = int64(63) 19159 return true 19160 } 19161 // match: (LT (FlagEQ) yes no) 19162 // cond: 19163 // result: (First nil no yes) 19164 for { 19165 v := b.Control 19166 if v.Op != OpARM64FlagEQ { 19167 break 19168 } 19169 b.Kind = BlockFirst 19170 b.SetControl(nil) 19171 b.Aux = nil 19172 b.swapSuccessors() 19173 return true 19174 } 19175 // match: (LT (FlagLT_ULT) yes no) 19176 // cond: 19177 // result: (First nil yes no) 19178 for { 19179 v := b.Control 19180 if v.Op != OpARM64FlagLT_ULT { 19181 break 19182 } 19183 b.Kind = BlockFirst 19184 b.SetControl(nil) 19185 b.Aux = nil 19186 return true 19187 } 19188 // match: (LT (FlagLT_UGT) yes no) 19189 // cond: 19190 // result: (First nil yes no) 19191 for { 19192 v := b.Control 19193 if v.Op != OpARM64FlagLT_UGT { 19194 break 19195 } 19196 b.Kind = BlockFirst 19197 b.SetControl(nil) 19198 b.Aux = nil 19199 return true 19200 } 19201 // match: (LT (FlagGT_ULT) yes no) 19202 // cond: 19203 // result: (First nil no yes) 19204 for { 19205 v := b.Control 19206 if v.Op != OpARM64FlagGT_ULT { 19207 break 19208 } 19209 b.Kind = BlockFirst 19210 b.SetControl(nil) 19211 b.Aux = nil 19212 b.swapSuccessors() 19213 return true 19214 } 19215 // match: (LT (FlagGT_UGT) yes no) 19216 // cond: 19217 // result: (First nil no yes) 19218 for { 19219 v := b.Control 19220 if v.Op != OpARM64FlagGT_UGT { 19221 break 19222 } 19223 b.Kind = BlockFirst 19224 b.SetControl(nil) 19225 b.Aux = nil 19226 b.swapSuccessors() 19227 return true 19228 } 19229 // match: (LT (InvertFlags cmp) yes no) 19230 // cond: 19231 // result: (GT cmp yes no) 19232 for { 19233 v := b.Control 19234 if v.Op != OpARM64InvertFlags { 19235 break 19236 } 19237 cmp := v.Args[0] 19238 b.Kind = BlockARM64GT 19239 b.SetControl(cmp) 19240 b.Aux = nil 19241 return true 19242 } 19243 case BlockARM64NE: 19244 // match: (NE (CMPconst [0] x) yes no) 19245 // cond: 19246 // result: (NZ x yes no) 19247 for { 19248 v := b.Control 19249 if v.Op != OpARM64CMPconst { 19250 break 19251 } 19252 if v.AuxInt != 0 { 19253 break 19254 } 19255 x := v.Args[0] 19256 b.Kind = BlockARM64NZ 19257 b.SetControl(x) 19258 b.Aux = nil 19259 return true 19260 } 19261 // match: (NE (CMPWconst [0] x) yes no) 19262 // cond: 19263 // result: (NZW x yes no) 19264 for { 19265 v := b.Control 19266 if v.Op != OpARM64CMPWconst { 19267 break 19268 } 19269 if v.AuxInt != 0 { 19270 break 19271 } 19272 x := v.Args[0] 19273 b.Kind = BlockARM64NZW 19274 b.SetControl(x) 19275 b.Aux = nil 19276 return true 19277 } 19278 // match: (NE (FlagEQ) yes no) 19279 // cond: 19280 // result: (First nil no yes) 19281 for { 19282 v := b.Control 19283 if v.Op != OpARM64FlagEQ { 19284 break 19285 } 19286 b.Kind = BlockFirst 19287 b.SetControl(nil) 19288 b.Aux = nil 19289 b.swapSuccessors() 19290 return true 19291 } 19292 // match: (NE (FlagLT_ULT) yes no) 19293 // cond: 19294 // result: (First nil yes no) 19295 for { 19296 v := b.Control 19297 if v.Op != OpARM64FlagLT_ULT { 19298 break 19299 } 19300 b.Kind = BlockFirst 19301 b.SetControl(nil) 19302 b.Aux = nil 19303 return true 19304 } 19305 // match: (NE (FlagLT_UGT) yes no) 19306 // cond: 19307 // result: (First nil yes no) 19308 for { 19309 v := b.Control 19310 if v.Op != OpARM64FlagLT_UGT { 19311 break 19312 } 19313 b.Kind = BlockFirst 19314 b.SetControl(nil) 19315 b.Aux = nil 19316 return true 19317 } 19318 // match: (NE (FlagGT_ULT) yes no) 19319 // cond: 19320 // result: (First nil yes no) 19321 for { 19322 v := b.Control 19323 if v.Op != OpARM64FlagGT_ULT { 19324 break 19325 } 19326 b.Kind = BlockFirst 19327 b.SetControl(nil) 19328 b.Aux = nil 19329 return true 19330 } 19331 // match: (NE (FlagGT_UGT) yes no) 19332 // cond: 19333 // result: (First nil yes no) 19334 for { 19335 v := b.Control 19336 if v.Op != OpARM64FlagGT_UGT { 19337 break 19338 } 19339 b.Kind = BlockFirst 19340 b.SetControl(nil) 19341 b.Aux = nil 19342 return true 19343 } 19344 // match: (NE (InvertFlags cmp) yes no) 19345 // cond: 19346 // result: (NE cmp yes no) 19347 for { 19348 v := b.Control 19349 if v.Op != OpARM64InvertFlags { 19350 break 19351 } 19352 cmp := v.Args[0] 19353 b.Kind = BlockARM64NE 19354 b.SetControl(cmp) 19355 b.Aux = nil 19356 return true 19357 } 19358 case BlockARM64NZ: 19359 // match: (NZ (Equal cc) yes no) 19360 // cond: 19361 // result: (EQ cc yes no) 19362 for { 19363 v := b.Control 19364 if v.Op != OpARM64Equal { 19365 break 19366 } 19367 cc := v.Args[0] 19368 b.Kind = BlockARM64EQ 19369 b.SetControl(cc) 19370 b.Aux = nil 19371 return true 19372 } 19373 // match: (NZ (NotEqual cc) yes no) 19374 // cond: 19375 // result: (NE cc yes no) 19376 for { 19377 v := b.Control 19378 if v.Op != OpARM64NotEqual { 19379 break 19380 } 19381 cc := v.Args[0] 19382 b.Kind = BlockARM64NE 19383 b.SetControl(cc) 19384 b.Aux = nil 19385 return true 19386 } 19387 // match: (NZ (LessThan cc) yes no) 19388 // cond: 19389 // result: (LT cc yes no) 19390 for { 19391 v := b.Control 19392 if v.Op != OpARM64LessThan { 19393 break 19394 } 19395 cc := v.Args[0] 19396 b.Kind = BlockARM64LT 19397 b.SetControl(cc) 19398 b.Aux = nil 19399 return true 19400 } 19401 // match: (NZ (LessThanU cc) yes no) 19402 // cond: 19403 // result: (ULT cc yes no) 19404 for { 19405 v := b.Control 19406 if v.Op != OpARM64LessThanU { 19407 break 19408 } 19409 cc := v.Args[0] 19410 b.Kind = BlockARM64ULT 19411 b.SetControl(cc) 19412 b.Aux = nil 19413 return true 19414 } 19415 // match: (NZ (LessEqual cc) yes no) 19416 // cond: 19417 // result: (LE cc yes no) 19418 for { 19419 v := b.Control 19420 if v.Op != OpARM64LessEqual { 19421 break 19422 } 19423 cc := v.Args[0] 19424 b.Kind = BlockARM64LE 19425 b.SetControl(cc) 19426 b.Aux = nil 19427 return true 19428 } 19429 // match: (NZ (LessEqualU cc) yes no) 19430 // cond: 19431 // result: (ULE cc yes no) 19432 for { 19433 v := b.Control 19434 if v.Op != OpARM64LessEqualU { 19435 break 19436 } 19437 cc := v.Args[0] 19438 b.Kind = BlockARM64ULE 19439 b.SetControl(cc) 19440 b.Aux = nil 19441 return true 19442 } 19443 // match: (NZ (GreaterThan cc) yes no) 19444 // cond: 19445 // result: (GT cc yes no) 19446 for { 19447 v := b.Control 19448 if v.Op != OpARM64GreaterThan { 19449 break 19450 } 19451 cc := v.Args[0] 19452 b.Kind = BlockARM64GT 19453 b.SetControl(cc) 19454 b.Aux = nil 19455 return true 19456 } 19457 // match: (NZ (GreaterThanU cc) yes no) 19458 // cond: 19459 // result: (UGT cc yes no) 19460 for { 19461 v := b.Control 19462 if v.Op != OpARM64GreaterThanU { 19463 break 19464 } 19465 cc := v.Args[0] 19466 b.Kind = BlockARM64UGT 19467 b.SetControl(cc) 19468 b.Aux = nil 19469 return true 19470 } 19471 // match: (NZ (GreaterEqual cc) yes no) 19472 // cond: 19473 // result: (GE cc yes no) 19474 for { 19475 v := b.Control 19476 if v.Op != OpARM64GreaterEqual { 19477 break 19478 } 19479 cc := v.Args[0] 19480 b.Kind = BlockARM64GE 19481 b.SetControl(cc) 19482 b.Aux = nil 19483 return true 19484 } 19485 // match: (NZ (GreaterEqualU cc) yes no) 19486 // cond: 19487 // result: (UGE cc yes no) 19488 for { 19489 v := b.Control 19490 if v.Op != OpARM64GreaterEqualU { 19491 break 19492 } 19493 cc := v.Args[0] 19494 b.Kind = BlockARM64UGE 19495 b.SetControl(cc) 19496 b.Aux = nil 19497 return true 19498 } 19499 // match: (NZ (ANDconst [c] x) yes no) 19500 // cond: oneBit(c) 19501 // result: (TBNZ {ntz(c)} x yes no) 19502 for { 19503 v := b.Control 19504 if v.Op != OpARM64ANDconst { 19505 break 19506 } 19507 c := v.AuxInt 19508 x := v.Args[0] 19509 if !(oneBit(c)) { 19510 break 19511 } 19512 b.Kind = BlockARM64TBNZ 19513 b.SetControl(x) 19514 b.Aux = ntz(c) 19515 return true 19516 } 19517 // match: (NZ (MOVDconst [0]) yes no) 19518 // cond: 19519 // result: (First nil no yes) 19520 for { 19521 v := b.Control 19522 if v.Op != OpARM64MOVDconst { 19523 break 19524 } 19525 if v.AuxInt != 0 { 19526 break 19527 } 19528 b.Kind = BlockFirst 19529 b.SetControl(nil) 19530 b.Aux = nil 19531 b.swapSuccessors() 19532 return true 19533 } 19534 // match: (NZ (MOVDconst [c]) yes no) 19535 // cond: c != 0 19536 // result: (First nil yes no) 19537 for { 19538 v := b.Control 19539 if v.Op != OpARM64MOVDconst { 19540 break 19541 } 19542 c := v.AuxInt 19543 if !(c != 0) { 19544 break 19545 } 19546 b.Kind = BlockFirst 19547 b.SetControl(nil) 19548 b.Aux = nil 19549 return true 19550 } 19551 case BlockARM64NZW: 19552 // match: (NZW (ANDconst [c] x) yes no) 19553 // cond: oneBit(int64(uint32(c))) 19554 // result: (TBNZ {ntz(int64(uint32(c)))} x yes no) 19555 for { 19556 v := b.Control 19557 if v.Op != OpARM64ANDconst { 19558 break 19559 } 19560 c := v.AuxInt 19561 x := v.Args[0] 19562 if !(oneBit(int64(uint32(c)))) { 19563 break 19564 } 19565 b.Kind = BlockARM64TBNZ 19566 b.SetControl(x) 19567 b.Aux = ntz(int64(uint32(c))) 19568 return true 19569 } 19570 // match: (NZW (MOVDconst [c]) yes no) 19571 // cond: int32(c) == 0 19572 // result: (First nil no yes) 19573 for { 19574 v := b.Control 19575 if v.Op != OpARM64MOVDconst { 19576 break 19577 } 19578 c := v.AuxInt 19579 if !(int32(c) == 0) { 19580 break 19581 } 19582 b.Kind = BlockFirst 19583 b.SetControl(nil) 19584 b.Aux = nil 19585 b.swapSuccessors() 19586 return true 19587 } 19588 // match: (NZW (MOVDconst [c]) yes no) 19589 // cond: int32(c) != 0 19590 // result: (First nil yes no) 19591 for { 19592 v := b.Control 19593 if v.Op != OpARM64MOVDconst { 19594 break 19595 } 19596 c := v.AuxInt 19597 if !(int32(c) != 0) { 19598 break 19599 } 19600 b.Kind = BlockFirst 19601 b.SetControl(nil) 19602 b.Aux = nil 19603 return true 19604 } 19605 case BlockARM64UGE: 19606 // match: (UGE (FlagEQ) yes no) 19607 // cond: 19608 // result: (First nil yes no) 19609 for { 19610 v := b.Control 19611 if v.Op != OpARM64FlagEQ { 19612 break 19613 } 19614 b.Kind = BlockFirst 19615 b.SetControl(nil) 19616 b.Aux = nil 19617 return true 19618 } 19619 // match: (UGE (FlagLT_ULT) yes no) 19620 // cond: 19621 // result: (First nil no yes) 19622 for { 19623 v := b.Control 19624 if v.Op != OpARM64FlagLT_ULT { 19625 break 19626 } 19627 b.Kind = BlockFirst 19628 b.SetControl(nil) 19629 b.Aux = nil 19630 b.swapSuccessors() 19631 return true 19632 } 19633 // match: (UGE (FlagLT_UGT) yes no) 19634 // cond: 19635 // result: (First nil yes no) 19636 for { 19637 v := b.Control 19638 if v.Op != OpARM64FlagLT_UGT { 19639 break 19640 } 19641 b.Kind = BlockFirst 19642 b.SetControl(nil) 19643 b.Aux = nil 19644 return true 19645 } 19646 // match: (UGE (FlagGT_ULT) yes no) 19647 // cond: 19648 // result: (First nil no yes) 19649 for { 19650 v := b.Control 19651 if v.Op != OpARM64FlagGT_ULT { 19652 break 19653 } 19654 b.Kind = BlockFirst 19655 b.SetControl(nil) 19656 b.Aux = nil 19657 b.swapSuccessors() 19658 return true 19659 } 19660 // match: (UGE (FlagGT_UGT) yes no) 19661 // cond: 19662 // result: (First nil yes no) 19663 for { 19664 v := b.Control 19665 if v.Op != OpARM64FlagGT_UGT { 19666 break 19667 } 19668 b.Kind = BlockFirst 19669 b.SetControl(nil) 19670 b.Aux = nil 19671 return true 19672 } 19673 // match: (UGE (InvertFlags cmp) yes no) 19674 // cond: 19675 // result: (ULE cmp yes no) 19676 for { 19677 v := b.Control 19678 if v.Op != OpARM64InvertFlags { 19679 break 19680 } 19681 cmp := v.Args[0] 19682 b.Kind = BlockARM64ULE 19683 b.SetControl(cmp) 19684 b.Aux = nil 19685 return true 19686 } 19687 case BlockARM64UGT: 19688 // match: (UGT (FlagEQ) yes no) 19689 // cond: 19690 // result: (First nil no yes) 19691 for { 19692 v := b.Control 19693 if v.Op != OpARM64FlagEQ { 19694 break 19695 } 19696 b.Kind = BlockFirst 19697 b.SetControl(nil) 19698 b.Aux = nil 19699 b.swapSuccessors() 19700 return true 19701 } 19702 // match: (UGT (FlagLT_ULT) yes no) 19703 // cond: 19704 // result: (First nil no yes) 19705 for { 19706 v := b.Control 19707 if v.Op != OpARM64FlagLT_ULT { 19708 break 19709 } 19710 b.Kind = BlockFirst 19711 b.SetControl(nil) 19712 b.Aux = nil 19713 b.swapSuccessors() 19714 return true 19715 } 19716 // match: (UGT (FlagLT_UGT) yes no) 19717 // cond: 19718 // result: (First nil yes no) 19719 for { 19720 v := b.Control 19721 if v.Op != OpARM64FlagLT_UGT { 19722 break 19723 } 19724 b.Kind = BlockFirst 19725 b.SetControl(nil) 19726 b.Aux = nil 19727 return true 19728 } 19729 // match: (UGT (FlagGT_ULT) yes no) 19730 // cond: 19731 // result: (First nil no yes) 19732 for { 19733 v := b.Control 19734 if v.Op != OpARM64FlagGT_ULT { 19735 break 19736 } 19737 b.Kind = BlockFirst 19738 b.SetControl(nil) 19739 b.Aux = nil 19740 b.swapSuccessors() 19741 return true 19742 } 19743 // match: (UGT (FlagGT_UGT) yes no) 19744 // cond: 19745 // result: (First nil yes no) 19746 for { 19747 v := b.Control 19748 if v.Op != OpARM64FlagGT_UGT { 19749 break 19750 } 19751 b.Kind = BlockFirst 19752 b.SetControl(nil) 19753 b.Aux = nil 19754 return true 19755 } 19756 // match: (UGT (InvertFlags cmp) yes no) 19757 // cond: 19758 // result: (ULT cmp yes no) 19759 for { 19760 v := b.Control 19761 if v.Op != OpARM64InvertFlags { 19762 break 19763 } 19764 cmp := v.Args[0] 19765 b.Kind = BlockARM64ULT 19766 b.SetControl(cmp) 19767 b.Aux = nil 19768 return true 19769 } 19770 case BlockARM64ULE: 19771 // match: (ULE (FlagEQ) yes no) 19772 // cond: 19773 // result: (First nil yes no) 19774 for { 19775 v := b.Control 19776 if v.Op != OpARM64FlagEQ { 19777 break 19778 } 19779 b.Kind = BlockFirst 19780 b.SetControl(nil) 19781 b.Aux = nil 19782 return true 19783 } 19784 // match: (ULE (FlagLT_ULT) yes no) 19785 // cond: 19786 // result: (First nil yes no) 19787 for { 19788 v := b.Control 19789 if v.Op != OpARM64FlagLT_ULT { 19790 break 19791 } 19792 b.Kind = BlockFirst 19793 b.SetControl(nil) 19794 b.Aux = nil 19795 return true 19796 } 19797 // match: (ULE (FlagLT_UGT) yes no) 19798 // cond: 19799 // result: (First nil no yes) 19800 for { 19801 v := b.Control 19802 if v.Op != OpARM64FlagLT_UGT { 19803 break 19804 } 19805 b.Kind = BlockFirst 19806 b.SetControl(nil) 19807 b.Aux = nil 19808 b.swapSuccessors() 19809 return true 19810 } 19811 // match: (ULE (FlagGT_ULT) yes no) 19812 // cond: 19813 // result: (First nil yes no) 19814 for { 19815 v := b.Control 19816 if v.Op != OpARM64FlagGT_ULT { 19817 break 19818 } 19819 b.Kind = BlockFirst 19820 b.SetControl(nil) 19821 b.Aux = nil 19822 return true 19823 } 19824 // match: (ULE (FlagGT_UGT) yes no) 19825 // cond: 19826 // result: (First nil no yes) 19827 for { 19828 v := b.Control 19829 if v.Op != OpARM64FlagGT_UGT { 19830 break 19831 } 19832 b.Kind = BlockFirst 19833 b.SetControl(nil) 19834 b.Aux = nil 19835 b.swapSuccessors() 19836 return true 19837 } 19838 // match: (ULE (InvertFlags cmp) yes no) 19839 // cond: 19840 // result: (UGE cmp yes no) 19841 for { 19842 v := b.Control 19843 if v.Op != OpARM64InvertFlags { 19844 break 19845 } 19846 cmp := v.Args[0] 19847 b.Kind = BlockARM64UGE 19848 b.SetControl(cmp) 19849 b.Aux = nil 19850 return true 19851 } 19852 case BlockARM64ULT: 19853 // match: (ULT (FlagEQ) yes no) 19854 // cond: 19855 // result: (First nil no yes) 19856 for { 19857 v := b.Control 19858 if v.Op != OpARM64FlagEQ { 19859 break 19860 } 19861 b.Kind = BlockFirst 19862 b.SetControl(nil) 19863 b.Aux = nil 19864 b.swapSuccessors() 19865 return true 19866 } 19867 // match: (ULT (FlagLT_ULT) yes no) 19868 // cond: 19869 // result: (First nil yes no) 19870 for { 19871 v := b.Control 19872 if v.Op != OpARM64FlagLT_ULT { 19873 break 19874 } 19875 b.Kind = BlockFirst 19876 b.SetControl(nil) 19877 b.Aux = nil 19878 return true 19879 } 19880 // match: (ULT (FlagLT_UGT) yes no) 19881 // cond: 19882 // result: (First nil no yes) 19883 for { 19884 v := b.Control 19885 if v.Op != OpARM64FlagLT_UGT { 19886 break 19887 } 19888 b.Kind = BlockFirst 19889 b.SetControl(nil) 19890 b.Aux = nil 19891 b.swapSuccessors() 19892 return true 19893 } 19894 // match: (ULT (FlagGT_ULT) yes no) 19895 // cond: 19896 // result: (First nil yes no) 19897 for { 19898 v := b.Control 19899 if v.Op != OpARM64FlagGT_ULT { 19900 break 19901 } 19902 b.Kind = BlockFirst 19903 b.SetControl(nil) 19904 b.Aux = nil 19905 return true 19906 } 19907 // match: (ULT (FlagGT_UGT) yes no) 19908 // cond: 19909 // result: (First nil no yes) 19910 for { 19911 v := b.Control 19912 if v.Op != OpARM64FlagGT_UGT { 19913 break 19914 } 19915 b.Kind = BlockFirst 19916 b.SetControl(nil) 19917 b.Aux = nil 19918 b.swapSuccessors() 19919 return true 19920 } 19921 // match: (ULT (InvertFlags cmp) yes no) 19922 // cond: 19923 // result: (UGT cmp yes no) 19924 for { 19925 v := b.Control 19926 if v.Op != OpARM64InvertFlags { 19927 break 19928 } 19929 cmp := v.Args[0] 19930 b.Kind = BlockARM64UGT 19931 b.SetControl(cmp) 19932 b.Aux = nil 19933 return true 19934 } 19935 case BlockARM64Z: 19936 // match: (Z (ANDconst [c] x) yes no) 19937 // cond: oneBit(c) 19938 // result: (TBZ {ntz(c)} x yes no) 19939 for { 19940 v := b.Control 19941 if v.Op != OpARM64ANDconst { 19942 break 19943 } 19944 c := v.AuxInt 19945 x := v.Args[0] 19946 if !(oneBit(c)) { 19947 break 19948 } 19949 b.Kind = BlockARM64TBZ 19950 b.SetControl(x) 19951 b.Aux = ntz(c) 19952 return true 19953 } 19954 // match: (Z (MOVDconst [0]) yes no) 19955 // cond: 19956 // result: (First nil yes no) 19957 for { 19958 v := b.Control 19959 if v.Op != OpARM64MOVDconst { 19960 break 19961 } 19962 if v.AuxInt != 0 { 19963 break 19964 } 19965 b.Kind = BlockFirst 19966 b.SetControl(nil) 19967 b.Aux = nil 19968 return true 19969 } 19970 // match: (Z (MOVDconst [c]) yes no) 19971 // cond: c != 0 19972 // result: (First nil no yes) 19973 for { 19974 v := b.Control 19975 if v.Op != OpARM64MOVDconst { 19976 break 19977 } 19978 c := v.AuxInt 19979 if !(c != 0) { 19980 break 19981 } 19982 b.Kind = BlockFirst 19983 b.SetControl(nil) 19984 b.Aux = nil 19985 b.swapSuccessors() 19986 return true 19987 } 19988 case BlockARM64ZW: 19989 // match: (ZW (ANDconst [c] x) yes no) 19990 // cond: oneBit(int64(uint32(c))) 19991 // result: (TBZ {ntz(int64(uint32(c)))} x yes no) 19992 for { 19993 v := b.Control 19994 if v.Op != OpARM64ANDconst { 19995 break 19996 } 19997 c := v.AuxInt 19998 x := v.Args[0] 19999 if !(oneBit(int64(uint32(c)))) { 20000 break 20001 } 20002 b.Kind = BlockARM64TBZ 20003 b.SetControl(x) 20004 b.Aux = ntz(int64(uint32(c))) 20005 return true 20006 } 20007 // match: (ZW (MOVDconst [c]) yes no) 20008 // cond: int32(c) == 0 20009 // result: (First nil yes no) 20010 for { 20011 v := b.Control 20012 if v.Op != OpARM64MOVDconst { 20013 break 20014 } 20015 c := v.AuxInt 20016 if !(int32(c) == 0) { 20017 break 20018 } 20019 b.Kind = BlockFirst 20020 b.SetControl(nil) 20021 b.Aux = nil 20022 return true 20023 } 20024 // match: (ZW (MOVDconst [c]) yes no) 20025 // cond: int32(c) != 0 20026 // result: (First nil no yes) 20027 for { 20028 v := b.Control 20029 if v.Op != OpARM64MOVDconst { 20030 break 20031 } 20032 c := v.AuxInt 20033 if !(int32(c) != 0) { 20034 break 20035 } 20036 b.Kind = BlockFirst 20037 b.SetControl(nil) 20038 b.Aux = nil 20039 b.swapSuccessors() 20040 return true 20041 } 20042 } 20043 return false 20044 }