github.com/yukk001/go1.10.8@v0.0.0-20190813125351-6df2d3982e20/src/cmd/compile/internal/ssa/rewriteARM64.go (about) 1 // Code generated from gen/ARM64.rules; DO NOT EDIT. 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "math" 7 import "cmd/internal/obj" 8 import "cmd/internal/objabi" 9 import "cmd/compile/internal/types" 10 11 var _ = math.MinInt8 // in case not otherwise used 12 var _ = obj.ANOP // in case not otherwise used 13 var _ = objabi.GOROOT // in case not otherwise used 14 var _ = types.TypeMem // in case not otherwise used 15 16 func rewriteValueARM64(v *Value) bool { 17 switch v.Op { 18 case OpARM64ADD: 19 return rewriteValueARM64_OpARM64ADD_0(v) 20 case OpARM64ADDconst: 21 return rewriteValueARM64_OpARM64ADDconst_0(v) 22 case OpARM64ADDshiftLL: 23 return rewriteValueARM64_OpARM64ADDshiftLL_0(v) 24 case OpARM64ADDshiftRA: 25 return rewriteValueARM64_OpARM64ADDshiftRA_0(v) 26 case OpARM64ADDshiftRL: 27 return rewriteValueARM64_OpARM64ADDshiftRL_0(v) 28 case OpARM64AND: 29 return rewriteValueARM64_OpARM64AND_0(v) || rewriteValueARM64_OpARM64AND_10(v) 30 case OpARM64ANDconst: 31 return rewriteValueARM64_OpARM64ANDconst_0(v) 32 case OpARM64ANDshiftLL: 33 return rewriteValueARM64_OpARM64ANDshiftLL_0(v) 34 case OpARM64ANDshiftRA: 35 return rewriteValueARM64_OpARM64ANDshiftRA_0(v) 36 case OpARM64ANDshiftRL: 37 return rewriteValueARM64_OpARM64ANDshiftRL_0(v) 38 case OpARM64BIC: 39 return rewriteValueARM64_OpARM64BIC_0(v) 40 case OpARM64BICconst: 41 return rewriteValueARM64_OpARM64BICconst_0(v) 42 case OpARM64BICshiftLL: 43 return rewriteValueARM64_OpARM64BICshiftLL_0(v) 44 case OpARM64BICshiftRA: 45 return rewriteValueARM64_OpARM64BICshiftRA_0(v) 46 case OpARM64BICshiftRL: 47 return rewriteValueARM64_OpARM64BICshiftRL_0(v) 48 case OpARM64CMP: 49 return rewriteValueARM64_OpARM64CMP_0(v) 50 case OpARM64CMPW: 51 return rewriteValueARM64_OpARM64CMPW_0(v) 52 case OpARM64CMPWconst: 53 return rewriteValueARM64_OpARM64CMPWconst_0(v) 54 case OpARM64CMPconst: 55 return rewriteValueARM64_OpARM64CMPconst_0(v) 56 case OpARM64CMPshiftLL: 57 return rewriteValueARM64_OpARM64CMPshiftLL_0(v) 58 case OpARM64CMPshiftRA: 59 return rewriteValueARM64_OpARM64CMPshiftRA_0(v) 60 case OpARM64CMPshiftRL: 61 return rewriteValueARM64_OpARM64CMPshiftRL_0(v) 62 case OpARM64CSELULT: 63 return rewriteValueARM64_OpARM64CSELULT_0(v) 64 case OpARM64CSELULT0: 65 return rewriteValueARM64_OpARM64CSELULT0_0(v) 66 case OpARM64DIV: 67 return rewriteValueARM64_OpARM64DIV_0(v) 68 case OpARM64DIVW: 69 return rewriteValueARM64_OpARM64DIVW_0(v) 70 case OpARM64Equal: 71 return rewriteValueARM64_OpARM64Equal_0(v) 72 case OpARM64FMOVDload: 73 return rewriteValueARM64_OpARM64FMOVDload_0(v) 74 case OpARM64FMOVDstore: 75 return rewriteValueARM64_OpARM64FMOVDstore_0(v) 76 case OpARM64FMOVSload: 77 return rewriteValueARM64_OpARM64FMOVSload_0(v) 78 case OpARM64FMOVSstore: 79 return rewriteValueARM64_OpARM64FMOVSstore_0(v) 80 case OpARM64GreaterEqual: 81 return rewriteValueARM64_OpARM64GreaterEqual_0(v) 82 case OpARM64GreaterEqualU: 83 return rewriteValueARM64_OpARM64GreaterEqualU_0(v) 84 case OpARM64GreaterThan: 85 return rewriteValueARM64_OpARM64GreaterThan_0(v) 86 case OpARM64GreaterThanU: 87 return rewriteValueARM64_OpARM64GreaterThanU_0(v) 88 case OpARM64LessEqual: 89 return rewriteValueARM64_OpARM64LessEqual_0(v) 90 case OpARM64LessEqualU: 91 return rewriteValueARM64_OpARM64LessEqualU_0(v) 92 case OpARM64LessThan: 93 return rewriteValueARM64_OpARM64LessThan_0(v) 94 case OpARM64LessThanU: 95 return rewriteValueARM64_OpARM64LessThanU_0(v) 96 case OpARM64MOD: 97 return rewriteValueARM64_OpARM64MOD_0(v) 98 case OpARM64MODW: 99 return rewriteValueARM64_OpARM64MODW_0(v) 100 case OpARM64MOVBUload: 101 return rewriteValueARM64_OpARM64MOVBUload_0(v) 102 case OpARM64MOVBUreg: 103 return rewriteValueARM64_OpARM64MOVBUreg_0(v) 104 case OpARM64MOVBload: 105 return rewriteValueARM64_OpARM64MOVBload_0(v) 106 case OpARM64MOVBreg: 107 return rewriteValueARM64_OpARM64MOVBreg_0(v) 108 case OpARM64MOVBstore: 109 return rewriteValueARM64_OpARM64MOVBstore_0(v) 110 case OpARM64MOVBstorezero: 111 return rewriteValueARM64_OpARM64MOVBstorezero_0(v) 112 case OpARM64MOVDload: 113 return rewriteValueARM64_OpARM64MOVDload_0(v) 114 case OpARM64MOVDreg: 115 return rewriteValueARM64_OpARM64MOVDreg_0(v) 116 case OpARM64MOVDstore: 117 return rewriteValueARM64_OpARM64MOVDstore_0(v) 118 case OpARM64MOVDstorezero: 119 return rewriteValueARM64_OpARM64MOVDstorezero_0(v) 120 case OpARM64MOVHUload: 121 return rewriteValueARM64_OpARM64MOVHUload_0(v) 122 case OpARM64MOVHUreg: 123 return rewriteValueARM64_OpARM64MOVHUreg_0(v) 124 case OpARM64MOVHload: 125 return rewriteValueARM64_OpARM64MOVHload_0(v) 126 case OpARM64MOVHreg: 127 return rewriteValueARM64_OpARM64MOVHreg_0(v) 128 case OpARM64MOVHstore: 129 return rewriteValueARM64_OpARM64MOVHstore_0(v) 130 case OpARM64MOVHstorezero: 131 return rewriteValueARM64_OpARM64MOVHstorezero_0(v) 132 case OpARM64MOVQstorezero: 133 return rewriteValueARM64_OpARM64MOVQstorezero_0(v) 134 case OpARM64MOVWUload: 135 return rewriteValueARM64_OpARM64MOVWUload_0(v) 136 case OpARM64MOVWUreg: 137 return rewriteValueARM64_OpARM64MOVWUreg_0(v) 138 case OpARM64MOVWload: 139 return rewriteValueARM64_OpARM64MOVWload_0(v) 140 case OpARM64MOVWreg: 141 return rewriteValueARM64_OpARM64MOVWreg_0(v) || rewriteValueARM64_OpARM64MOVWreg_10(v) 142 case OpARM64MOVWstore: 143 return rewriteValueARM64_OpARM64MOVWstore_0(v) 144 case OpARM64MOVWstorezero: 145 return rewriteValueARM64_OpARM64MOVWstorezero_0(v) 146 case OpARM64MUL: 147 return rewriteValueARM64_OpARM64MUL_0(v) || rewriteValueARM64_OpARM64MUL_10(v) || rewriteValueARM64_OpARM64MUL_20(v) 148 case OpARM64MULW: 149 return rewriteValueARM64_OpARM64MULW_0(v) || rewriteValueARM64_OpARM64MULW_10(v) || rewriteValueARM64_OpARM64MULW_20(v) 150 case OpARM64MVN: 151 return rewriteValueARM64_OpARM64MVN_0(v) 152 case OpARM64NEG: 153 return rewriteValueARM64_OpARM64NEG_0(v) 154 case OpARM64NotEqual: 155 return rewriteValueARM64_OpARM64NotEqual_0(v) 156 case OpARM64OR: 157 return rewriteValueARM64_OpARM64OR_0(v) || rewriteValueARM64_OpARM64OR_10(v) 158 case OpARM64ORconst: 159 return rewriteValueARM64_OpARM64ORconst_0(v) 160 case OpARM64ORshiftLL: 161 return rewriteValueARM64_OpARM64ORshiftLL_0(v) || rewriteValueARM64_OpARM64ORshiftLL_10(v) 162 case OpARM64ORshiftRA: 163 return rewriteValueARM64_OpARM64ORshiftRA_0(v) 164 case OpARM64ORshiftRL: 165 return rewriteValueARM64_OpARM64ORshiftRL_0(v) 166 case OpARM64SLL: 167 return rewriteValueARM64_OpARM64SLL_0(v) 168 case OpARM64SLLconst: 169 return rewriteValueARM64_OpARM64SLLconst_0(v) 170 case OpARM64SRA: 171 return rewriteValueARM64_OpARM64SRA_0(v) 172 case OpARM64SRAconst: 173 return rewriteValueARM64_OpARM64SRAconst_0(v) 174 case OpARM64SRL: 175 return rewriteValueARM64_OpARM64SRL_0(v) 176 case OpARM64SRLconst: 177 return rewriteValueARM64_OpARM64SRLconst_0(v) 178 case OpARM64STP: 179 return rewriteValueARM64_OpARM64STP_0(v) 180 case OpARM64SUB: 181 return rewriteValueARM64_OpARM64SUB_0(v) 182 case OpARM64SUBconst: 183 return rewriteValueARM64_OpARM64SUBconst_0(v) 184 case OpARM64SUBshiftLL: 185 return rewriteValueARM64_OpARM64SUBshiftLL_0(v) 186 case OpARM64SUBshiftRA: 187 return rewriteValueARM64_OpARM64SUBshiftRA_0(v) 188 case OpARM64SUBshiftRL: 189 return rewriteValueARM64_OpARM64SUBshiftRL_0(v) 190 case OpARM64UDIV: 191 return rewriteValueARM64_OpARM64UDIV_0(v) 192 case OpARM64UDIVW: 193 return rewriteValueARM64_OpARM64UDIVW_0(v) 194 case OpARM64UMOD: 195 return rewriteValueARM64_OpARM64UMOD_0(v) 196 case OpARM64UMODW: 197 return rewriteValueARM64_OpARM64UMODW_0(v) 198 case OpARM64XOR: 199 return rewriteValueARM64_OpARM64XOR_0(v) 200 case OpARM64XORconst: 201 return rewriteValueARM64_OpARM64XORconst_0(v) 202 case OpARM64XORshiftLL: 203 return rewriteValueARM64_OpARM64XORshiftLL_0(v) 204 case OpARM64XORshiftRA: 205 return rewriteValueARM64_OpARM64XORshiftRA_0(v) 206 case OpARM64XORshiftRL: 207 return rewriteValueARM64_OpARM64XORshiftRL_0(v) 208 case OpAdd16: 209 return rewriteValueARM64_OpAdd16_0(v) 210 case OpAdd32: 211 return rewriteValueARM64_OpAdd32_0(v) 212 case OpAdd32F: 213 return rewriteValueARM64_OpAdd32F_0(v) 214 case OpAdd64: 215 return rewriteValueARM64_OpAdd64_0(v) 216 case OpAdd64F: 217 return rewriteValueARM64_OpAdd64F_0(v) 218 case OpAdd8: 219 return rewriteValueARM64_OpAdd8_0(v) 220 case OpAddPtr: 221 return rewriteValueARM64_OpAddPtr_0(v) 222 case OpAddr: 223 return rewriteValueARM64_OpAddr_0(v) 224 case OpAnd16: 225 return rewriteValueARM64_OpAnd16_0(v) 226 case OpAnd32: 227 return rewriteValueARM64_OpAnd32_0(v) 228 case OpAnd64: 229 return rewriteValueARM64_OpAnd64_0(v) 230 case OpAnd8: 231 return rewriteValueARM64_OpAnd8_0(v) 232 case OpAndB: 233 return rewriteValueARM64_OpAndB_0(v) 234 case OpAtomicAdd32: 235 return rewriteValueARM64_OpAtomicAdd32_0(v) 236 case OpAtomicAdd64: 237 return rewriteValueARM64_OpAtomicAdd64_0(v) 238 case OpAtomicAnd8: 239 return rewriteValueARM64_OpAtomicAnd8_0(v) 240 case OpAtomicCompareAndSwap32: 241 return rewriteValueARM64_OpAtomicCompareAndSwap32_0(v) 242 case OpAtomicCompareAndSwap64: 243 return rewriteValueARM64_OpAtomicCompareAndSwap64_0(v) 244 case OpAtomicExchange32: 245 return rewriteValueARM64_OpAtomicExchange32_0(v) 246 case OpAtomicExchange64: 247 return rewriteValueARM64_OpAtomicExchange64_0(v) 248 case OpAtomicLoad32: 249 return rewriteValueARM64_OpAtomicLoad32_0(v) 250 case OpAtomicLoad64: 251 return rewriteValueARM64_OpAtomicLoad64_0(v) 252 case OpAtomicLoadPtr: 253 return rewriteValueARM64_OpAtomicLoadPtr_0(v) 254 case OpAtomicOr8: 255 return rewriteValueARM64_OpAtomicOr8_0(v) 256 case OpAtomicStore32: 257 return rewriteValueARM64_OpAtomicStore32_0(v) 258 case OpAtomicStore64: 259 return rewriteValueARM64_OpAtomicStore64_0(v) 260 case OpAtomicStorePtrNoWB: 261 return rewriteValueARM64_OpAtomicStorePtrNoWB_0(v) 262 case OpAvg64u: 263 return rewriteValueARM64_OpAvg64u_0(v) 264 case OpBitLen64: 265 return rewriteValueARM64_OpBitLen64_0(v) 266 case OpBitRev16: 267 return rewriteValueARM64_OpBitRev16_0(v) 268 case OpBitRev32: 269 return rewriteValueARM64_OpBitRev32_0(v) 270 case OpBitRev64: 271 return rewriteValueARM64_OpBitRev64_0(v) 272 case OpBitRev8: 273 return rewriteValueARM64_OpBitRev8_0(v) 274 case OpBswap32: 275 return rewriteValueARM64_OpBswap32_0(v) 276 case OpBswap64: 277 return rewriteValueARM64_OpBswap64_0(v) 278 case OpClosureCall: 279 return rewriteValueARM64_OpClosureCall_0(v) 280 case OpCom16: 281 return rewriteValueARM64_OpCom16_0(v) 282 case OpCom32: 283 return rewriteValueARM64_OpCom32_0(v) 284 case OpCom64: 285 return rewriteValueARM64_OpCom64_0(v) 286 case OpCom8: 287 return rewriteValueARM64_OpCom8_0(v) 288 case OpConst16: 289 return rewriteValueARM64_OpConst16_0(v) 290 case OpConst32: 291 return rewriteValueARM64_OpConst32_0(v) 292 case OpConst32F: 293 return rewriteValueARM64_OpConst32F_0(v) 294 case OpConst64: 295 return rewriteValueARM64_OpConst64_0(v) 296 case OpConst64F: 297 return rewriteValueARM64_OpConst64F_0(v) 298 case OpConst8: 299 return rewriteValueARM64_OpConst8_0(v) 300 case OpConstBool: 301 return rewriteValueARM64_OpConstBool_0(v) 302 case OpConstNil: 303 return rewriteValueARM64_OpConstNil_0(v) 304 case OpConvert: 305 return rewriteValueARM64_OpConvert_0(v) 306 case OpCtz32: 307 return rewriteValueARM64_OpCtz32_0(v) 308 case OpCtz64: 309 return rewriteValueARM64_OpCtz64_0(v) 310 case OpCvt32Fto32: 311 return rewriteValueARM64_OpCvt32Fto32_0(v) 312 case OpCvt32Fto32U: 313 return rewriteValueARM64_OpCvt32Fto32U_0(v) 314 case OpCvt32Fto64: 315 return rewriteValueARM64_OpCvt32Fto64_0(v) 316 case OpCvt32Fto64F: 317 return rewriteValueARM64_OpCvt32Fto64F_0(v) 318 case OpCvt32Fto64U: 319 return rewriteValueARM64_OpCvt32Fto64U_0(v) 320 case OpCvt32Uto32F: 321 return rewriteValueARM64_OpCvt32Uto32F_0(v) 322 case OpCvt32Uto64F: 323 return rewriteValueARM64_OpCvt32Uto64F_0(v) 324 case OpCvt32to32F: 325 return rewriteValueARM64_OpCvt32to32F_0(v) 326 case OpCvt32to64F: 327 return rewriteValueARM64_OpCvt32to64F_0(v) 328 case OpCvt64Fto32: 329 return rewriteValueARM64_OpCvt64Fto32_0(v) 330 case OpCvt64Fto32F: 331 return rewriteValueARM64_OpCvt64Fto32F_0(v) 332 case OpCvt64Fto32U: 333 return rewriteValueARM64_OpCvt64Fto32U_0(v) 334 case OpCvt64Fto64: 335 return rewriteValueARM64_OpCvt64Fto64_0(v) 336 case OpCvt64Fto64U: 337 return rewriteValueARM64_OpCvt64Fto64U_0(v) 338 case OpCvt64Uto32F: 339 return rewriteValueARM64_OpCvt64Uto32F_0(v) 340 case OpCvt64Uto64F: 341 return rewriteValueARM64_OpCvt64Uto64F_0(v) 342 case OpCvt64to32F: 343 return rewriteValueARM64_OpCvt64to32F_0(v) 344 case OpCvt64to64F: 345 return rewriteValueARM64_OpCvt64to64F_0(v) 346 case OpDiv16: 347 return rewriteValueARM64_OpDiv16_0(v) 348 case OpDiv16u: 349 return rewriteValueARM64_OpDiv16u_0(v) 350 case OpDiv32: 351 return rewriteValueARM64_OpDiv32_0(v) 352 case OpDiv32F: 353 return rewriteValueARM64_OpDiv32F_0(v) 354 case OpDiv32u: 355 return rewriteValueARM64_OpDiv32u_0(v) 356 case OpDiv64: 357 return rewriteValueARM64_OpDiv64_0(v) 358 case OpDiv64F: 359 return rewriteValueARM64_OpDiv64F_0(v) 360 case OpDiv64u: 361 return rewriteValueARM64_OpDiv64u_0(v) 362 case OpDiv8: 363 return rewriteValueARM64_OpDiv8_0(v) 364 case OpDiv8u: 365 return rewriteValueARM64_OpDiv8u_0(v) 366 case OpEq16: 367 return rewriteValueARM64_OpEq16_0(v) 368 case OpEq32: 369 return rewriteValueARM64_OpEq32_0(v) 370 case OpEq32F: 371 return rewriteValueARM64_OpEq32F_0(v) 372 case OpEq64: 373 return rewriteValueARM64_OpEq64_0(v) 374 case OpEq64F: 375 return rewriteValueARM64_OpEq64F_0(v) 376 case OpEq8: 377 return rewriteValueARM64_OpEq8_0(v) 378 case OpEqB: 379 return rewriteValueARM64_OpEqB_0(v) 380 case OpEqPtr: 381 return rewriteValueARM64_OpEqPtr_0(v) 382 case OpGeq16: 383 return rewriteValueARM64_OpGeq16_0(v) 384 case OpGeq16U: 385 return rewriteValueARM64_OpGeq16U_0(v) 386 case OpGeq32: 387 return rewriteValueARM64_OpGeq32_0(v) 388 case OpGeq32F: 389 return rewriteValueARM64_OpGeq32F_0(v) 390 case OpGeq32U: 391 return rewriteValueARM64_OpGeq32U_0(v) 392 case OpGeq64: 393 return rewriteValueARM64_OpGeq64_0(v) 394 case OpGeq64F: 395 return rewriteValueARM64_OpGeq64F_0(v) 396 case OpGeq64U: 397 return rewriteValueARM64_OpGeq64U_0(v) 398 case OpGeq8: 399 return rewriteValueARM64_OpGeq8_0(v) 400 case OpGeq8U: 401 return rewriteValueARM64_OpGeq8U_0(v) 402 case OpGetCallerSP: 403 return rewriteValueARM64_OpGetCallerSP_0(v) 404 case OpGetClosurePtr: 405 return rewriteValueARM64_OpGetClosurePtr_0(v) 406 case OpGreater16: 407 return rewriteValueARM64_OpGreater16_0(v) 408 case OpGreater16U: 409 return rewriteValueARM64_OpGreater16U_0(v) 410 case OpGreater32: 411 return rewriteValueARM64_OpGreater32_0(v) 412 case OpGreater32F: 413 return rewriteValueARM64_OpGreater32F_0(v) 414 case OpGreater32U: 415 return rewriteValueARM64_OpGreater32U_0(v) 416 case OpGreater64: 417 return rewriteValueARM64_OpGreater64_0(v) 418 case OpGreater64F: 419 return rewriteValueARM64_OpGreater64F_0(v) 420 case OpGreater64U: 421 return rewriteValueARM64_OpGreater64U_0(v) 422 case OpGreater8: 423 return rewriteValueARM64_OpGreater8_0(v) 424 case OpGreater8U: 425 return rewriteValueARM64_OpGreater8U_0(v) 426 case OpHmul32: 427 return rewriteValueARM64_OpHmul32_0(v) 428 case OpHmul32u: 429 return rewriteValueARM64_OpHmul32u_0(v) 430 case OpHmul64: 431 return rewriteValueARM64_OpHmul64_0(v) 432 case OpHmul64u: 433 return rewriteValueARM64_OpHmul64u_0(v) 434 case OpInterCall: 435 return rewriteValueARM64_OpInterCall_0(v) 436 case OpIsInBounds: 437 return rewriteValueARM64_OpIsInBounds_0(v) 438 case OpIsNonNil: 439 return rewriteValueARM64_OpIsNonNil_0(v) 440 case OpIsSliceInBounds: 441 return rewriteValueARM64_OpIsSliceInBounds_0(v) 442 case OpLeq16: 443 return rewriteValueARM64_OpLeq16_0(v) 444 case OpLeq16U: 445 return rewriteValueARM64_OpLeq16U_0(v) 446 case OpLeq32: 447 return rewriteValueARM64_OpLeq32_0(v) 448 case OpLeq32F: 449 return rewriteValueARM64_OpLeq32F_0(v) 450 case OpLeq32U: 451 return rewriteValueARM64_OpLeq32U_0(v) 452 case OpLeq64: 453 return rewriteValueARM64_OpLeq64_0(v) 454 case OpLeq64F: 455 return rewriteValueARM64_OpLeq64F_0(v) 456 case OpLeq64U: 457 return rewriteValueARM64_OpLeq64U_0(v) 458 case OpLeq8: 459 return rewriteValueARM64_OpLeq8_0(v) 460 case OpLeq8U: 461 return rewriteValueARM64_OpLeq8U_0(v) 462 case OpLess16: 463 return rewriteValueARM64_OpLess16_0(v) 464 case OpLess16U: 465 return rewriteValueARM64_OpLess16U_0(v) 466 case OpLess32: 467 return rewriteValueARM64_OpLess32_0(v) 468 case OpLess32F: 469 return rewriteValueARM64_OpLess32F_0(v) 470 case OpLess32U: 471 return rewriteValueARM64_OpLess32U_0(v) 472 case OpLess64: 473 return rewriteValueARM64_OpLess64_0(v) 474 case OpLess64F: 475 return rewriteValueARM64_OpLess64F_0(v) 476 case OpLess64U: 477 return rewriteValueARM64_OpLess64U_0(v) 478 case OpLess8: 479 return rewriteValueARM64_OpLess8_0(v) 480 case OpLess8U: 481 return rewriteValueARM64_OpLess8U_0(v) 482 case OpLoad: 483 return rewriteValueARM64_OpLoad_0(v) 484 case OpLsh16x16: 485 return rewriteValueARM64_OpLsh16x16_0(v) 486 case OpLsh16x32: 487 return rewriteValueARM64_OpLsh16x32_0(v) 488 case OpLsh16x64: 489 return rewriteValueARM64_OpLsh16x64_0(v) 490 case OpLsh16x8: 491 return rewriteValueARM64_OpLsh16x8_0(v) 492 case OpLsh32x16: 493 return rewriteValueARM64_OpLsh32x16_0(v) 494 case OpLsh32x32: 495 return rewriteValueARM64_OpLsh32x32_0(v) 496 case OpLsh32x64: 497 return rewriteValueARM64_OpLsh32x64_0(v) 498 case OpLsh32x8: 499 return rewriteValueARM64_OpLsh32x8_0(v) 500 case OpLsh64x16: 501 return rewriteValueARM64_OpLsh64x16_0(v) 502 case OpLsh64x32: 503 return rewriteValueARM64_OpLsh64x32_0(v) 504 case OpLsh64x64: 505 return rewriteValueARM64_OpLsh64x64_0(v) 506 case OpLsh64x8: 507 return rewriteValueARM64_OpLsh64x8_0(v) 508 case OpLsh8x16: 509 return rewriteValueARM64_OpLsh8x16_0(v) 510 case OpLsh8x32: 511 return rewriteValueARM64_OpLsh8x32_0(v) 512 case OpLsh8x64: 513 return rewriteValueARM64_OpLsh8x64_0(v) 514 case OpLsh8x8: 515 return rewriteValueARM64_OpLsh8x8_0(v) 516 case OpMod16: 517 return rewriteValueARM64_OpMod16_0(v) 518 case OpMod16u: 519 return rewriteValueARM64_OpMod16u_0(v) 520 case OpMod32: 521 return rewriteValueARM64_OpMod32_0(v) 522 case OpMod32u: 523 return rewriteValueARM64_OpMod32u_0(v) 524 case OpMod64: 525 return rewriteValueARM64_OpMod64_0(v) 526 case OpMod64u: 527 return rewriteValueARM64_OpMod64u_0(v) 528 case OpMod8: 529 return rewriteValueARM64_OpMod8_0(v) 530 case OpMod8u: 531 return rewriteValueARM64_OpMod8u_0(v) 532 case OpMove: 533 return rewriteValueARM64_OpMove_0(v) || rewriteValueARM64_OpMove_10(v) 534 case OpMul16: 535 return rewriteValueARM64_OpMul16_0(v) 536 case OpMul32: 537 return rewriteValueARM64_OpMul32_0(v) 538 case OpMul32F: 539 return rewriteValueARM64_OpMul32F_0(v) 540 case OpMul64: 541 return rewriteValueARM64_OpMul64_0(v) 542 case OpMul64F: 543 return rewriteValueARM64_OpMul64F_0(v) 544 case OpMul8: 545 return rewriteValueARM64_OpMul8_0(v) 546 case OpNeg16: 547 return rewriteValueARM64_OpNeg16_0(v) 548 case OpNeg32: 549 return rewriteValueARM64_OpNeg32_0(v) 550 case OpNeg32F: 551 return rewriteValueARM64_OpNeg32F_0(v) 552 case OpNeg64: 553 return rewriteValueARM64_OpNeg64_0(v) 554 case OpNeg64F: 555 return rewriteValueARM64_OpNeg64F_0(v) 556 case OpNeg8: 557 return rewriteValueARM64_OpNeg8_0(v) 558 case OpNeq16: 559 return rewriteValueARM64_OpNeq16_0(v) 560 case OpNeq32: 561 return rewriteValueARM64_OpNeq32_0(v) 562 case OpNeq32F: 563 return rewriteValueARM64_OpNeq32F_0(v) 564 case OpNeq64: 565 return rewriteValueARM64_OpNeq64_0(v) 566 case OpNeq64F: 567 return rewriteValueARM64_OpNeq64F_0(v) 568 case OpNeq8: 569 return rewriteValueARM64_OpNeq8_0(v) 570 case OpNeqB: 571 return rewriteValueARM64_OpNeqB_0(v) 572 case OpNeqPtr: 573 return rewriteValueARM64_OpNeqPtr_0(v) 574 case OpNilCheck: 575 return rewriteValueARM64_OpNilCheck_0(v) 576 case OpNot: 577 return rewriteValueARM64_OpNot_0(v) 578 case OpOffPtr: 579 return rewriteValueARM64_OpOffPtr_0(v) 580 case OpOr16: 581 return rewriteValueARM64_OpOr16_0(v) 582 case OpOr32: 583 return rewriteValueARM64_OpOr32_0(v) 584 case OpOr64: 585 return rewriteValueARM64_OpOr64_0(v) 586 case OpOr8: 587 return rewriteValueARM64_OpOr8_0(v) 588 case OpOrB: 589 return rewriteValueARM64_OpOrB_0(v) 590 case OpRound32F: 591 return rewriteValueARM64_OpRound32F_0(v) 592 case OpRound64F: 593 return rewriteValueARM64_OpRound64F_0(v) 594 case OpRsh16Ux16: 595 return rewriteValueARM64_OpRsh16Ux16_0(v) 596 case OpRsh16Ux32: 597 return rewriteValueARM64_OpRsh16Ux32_0(v) 598 case OpRsh16Ux64: 599 return rewriteValueARM64_OpRsh16Ux64_0(v) 600 case OpRsh16Ux8: 601 return rewriteValueARM64_OpRsh16Ux8_0(v) 602 case OpRsh16x16: 603 return rewriteValueARM64_OpRsh16x16_0(v) 604 case OpRsh16x32: 605 return rewriteValueARM64_OpRsh16x32_0(v) 606 case OpRsh16x64: 607 return rewriteValueARM64_OpRsh16x64_0(v) 608 case OpRsh16x8: 609 return rewriteValueARM64_OpRsh16x8_0(v) 610 case OpRsh32Ux16: 611 return rewriteValueARM64_OpRsh32Ux16_0(v) 612 case OpRsh32Ux32: 613 return rewriteValueARM64_OpRsh32Ux32_0(v) 614 case OpRsh32Ux64: 615 return rewriteValueARM64_OpRsh32Ux64_0(v) 616 case OpRsh32Ux8: 617 return rewriteValueARM64_OpRsh32Ux8_0(v) 618 case OpRsh32x16: 619 return rewriteValueARM64_OpRsh32x16_0(v) 620 case OpRsh32x32: 621 return rewriteValueARM64_OpRsh32x32_0(v) 622 case OpRsh32x64: 623 return rewriteValueARM64_OpRsh32x64_0(v) 624 case OpRsh32x8: 625 return rewriteValueARM64_OpRsh32x8_0(v) 626 case OpRsh64Ux16: 627 return rewriteValueARM64_OpRsh64Ux16_0(v) 628 case OpRsh64Ux32: 629 return rewriteValueARM64_OpRsh64Ux32_0(v) 630 case OpRsh64Ux64: 631 return rewriteValueARM64_OpRsh64Ux64_0(v) 632 case OpRsh64Ux8: 633 return rewriteValueARM64_OpRsh64Ux8_0(v) 634 case OpRsh64x16: 635 return rewriteValueARM64_OpRsh64x16_0(v) 636 case OpRsh64x32: 637 return rewriteValueARM64_OpRsh64x32_0(v) 638 case OpRsh64x64: 639 return rewriteValueARM64_OpRsh64x64_0(v) 640 case OpRsh64x8: 641 return rewriteValueARM64_OpRsh64x8_0(v) 642 case OpRsh8Ux16: 643 return rewriteValueARM64_OpRsh8Ux16_0(v) 644 case OpRsh8Ux32: 645 return rewriteValueARM64_OpRsh8Ux32_0(v) 646 case OpRsh8Ux64: 647 return rewriteValueARM64_OpRsh8Ux64_0(v) 648 case OpRsh8Ux8: 649 return rewriteValueARM64_OpRsh8Ux8_0(v) 650 case OpRsh8x16: 651 return rewriteValueARM64_OpRsh8x16_0(v) 652 case OpRsh8x32: 653 return rewriteValueARM64_OpRsh8x32_0(v) 654 case OpRsh8x64: 655 return rewriteValueARM64_OpRsh8x64_0(v) 656 case OpRsh8x8: 657 return rewriteValueARM64_OpRsh8x8_0(v) 658 case OpSignExt16to32: 659 return rewriteValueARM64_OpSignExt16to32_0(v) 660 case OpSignExt16to64: 661 return rewriteValueARM64_OpSignExt16to64_0(v) 662 case OpSignExt32to64: 663 return rewriteValueARM64_OpSignExt32to64_0(v) 664 case OpSignExt8to16: 665 return rewriteValueARM64_OpSignExt8to16_0(v) 666 case OpSignExt8to32: 667 return rewriteValueARM64_OpSignExt8to32_0(v) 668 case OpSignExt8to64: 669 return rewriteValueARM64_OpSignExt8to64_0(v) 670 case OpSlicemask: 671 return rewriteValueARM64_OpSlicemask_0(v) 672 case OpSqrt: 673 return rewriteValueARM64_OpSqrt_0(v) 674 case OpStaticCall: 675 return rewriteValueARM64_OpStaticCall_0(v) 676 case OpStore: 677 return rewriteValueARM64_OpStore_0(v) 678 case OpSub16: 679 return rewriteValueARM64_OpSub16_0(v) 680 case OpSub32: 681 return rewriteValueARM64_OpSub32_0(v) 682 case OpSub32F: 683 return rewriteValueARM64_OpSub32F_0(v) 684 case OpSub64: 685 return rewriteValueARM64_OpSub64_0(v) 686 case OpSub64F: 687 return rewriteValueARM64_OpSub64F_0(v) 688 case OpSub8: 689 return rewriteValueARM64_OpSub8_0(v) 690 case OpSubPtr: 691 return rewriteValueARM64_OpSubPtr_0(v) 692 case OpTrunc16to8: 693 return rewriteValueARM64_OpTrunc16to8_0(v) 694 case OpTrunc32to16: 695 return rewriteValueARM64_OpTrunc32to16_0(v) 696 case OpTrunc32to8: 697 return rewriteValueARM64_OpTrunc32to8_0(v) 698 case OpTrunc64to16: 699 return rewriteValueARM64_OpTrunc64to16_0(v) 700 case OpTrunc64to32: 701 return rewriteValueARM64_OpTrunc64to32_0(v) 702 case OpTrunc64to8: 703 return rewriteValueARM64_OpTrunc64to8_0(v) 704 case OpXor16: 705 return rewriteValueARM64_OpXor16_0(v) 706 case OpXor32: 707 return rewriteValueARM64_OpXor32_0(v) 708 case OpXor64: 709 return rewriteValueARM64_OpXor64_0(v) 710 case OpXor8: 711 return rewriteValueARM64_OpXor8_0(v) 712 case OpZero: 713 return rewriteValueARM64_OpZero_0(v) || rewriteValueARM64_OpZero_10(v) || rewriteValueARM64_OpZero_20(v) 714 case OpZeroExt16to32: 715 return rewriteValueARM64_OpZeroExt16to32_0(v) 716 case OpZeroExt16to64: 717 return rewriteValueARM64_OpZeroExt16to64_0(v) 718 case OpZeroExt32to64: 719 return rewriteValueARM64_OpZeroExt32to64_0(v) 720 case OpZeroExt8to16: 721 return rewriteValueARM64_OpZeroExt8to16_0(v) 722 case OpZeroExt8to32: 723 return rewriteValueARM64_OpZeroExt8to32_0(v) 724 case OpZeroExt8to64: 725 return rewriteValueARM64_OpZeroExt8to64_0(v) 726 } 727 return false 728 } 729 func rewriteValueARM64_OpARM64ADD_0(v *Value) bool { 730 // match: (ADD x (MOVDconst [c])) 731 // cond: 732 // result: (ADDconst [c] x) 733 for { 734 _ = v.Args[1] 735 x := v.Args[0] 736 v_1 := v.Args[1] 737 if v_1.Op != OpARM64MOVDconst { 738 break 739 } 740 c := v_1.AuxInt 741 v.reset(OpARM64ADDconst) 742 v.AuxInt = c 743 v.AddArg(x) 744 return true 745 } 746 // match: (ADD (MOVDconst [c]) x) 747 // cond: 748 // result: (ADDconst [c] x) 749 for { 750 _ = v.Args[1] 751 v_0 := v.Args[0] 752 if v_0.Op != OpARM64MOVDconst { 753 break 754 } 755 c := v_0.AuxInt 756 x := v.Args[1] 757 v.reset(OpARM64ADDconst) 758 v.AuxInt = c 759 v.AddArg(x) 760 return true 761 } 762 // match: (ADD x (NEG y)) 763 // cond: 764 // result: (SUB x y) 765 for { 766 _ = v.Args[1] 767 x := v.Args[0] 768 v_1 := v.Args[1] 769 if v_1.Op != OpARM64NEG { 770 break 771 } 772 y := v_1.Args[0] 773 v.reset(OpARM64SUB) 774 v.AddArg(x) 775 v.AddArg(y) 776 return true 777 } 778 // match: (ADD (NEG y) x) 779 // cond: 780 // result: (SUB x y) 781 for { 782 _ = v.Args[1] 783 v_0 := v.Args[0] 784 if v_0.Op != OpARM64NEG { 785 break 786 } 787 y := v_0.Args[0] 788 x := v.Args[1] 789 v.reset(OpARM64SUB) 790 v.AddArg(x) 791 v.AddArg(y) 792 return true 793 } 794 // match: (ADD x (SLLconst [c] y)) 795 // cond: 796 // result: (ADDshiftLL x y [c]) 797 for { 798 _ = v.Args[1] 799 x := v.Args[0] 800 v_1 := v.Args[1] 801 if v_1.Op != OpARM64SLLconst { 802 break 803 } 804 c := v_1.AuxInt 805 y := v_1.Args[0] 806 v.reset(OpARM64ADDshiftLL) 807 v.AuxInt = c 808 v.AddArg(x) 809 v.AddArg(y) 810 return true 811 } 812 // match: (ADD (SLLconst [c] y) x) 813 // cond: 814 // result: (ADDshiftLL x y [c]) 815 for { 816 _ = v.Args[1] 817 v_0 := v.Args[0] 818 if v_0.Op != OpARM64SLLconst { 819 break 820 } 821 c := v_0.AuxInt 822 y := v_0.Args[0] 823 x := v.Args[1] 824 v.reset(OpARM64ADDshiftLL) 825 v.AuxInt = c 826 v.AddArg(x) 827 v.AddArg(y) 828 return true 829 } 830 // match: (ADD x (SRLconst [c] y)) 831 // cond: 832 // result: (ADDshiftRL x y [c]) 833 for { 834 _ = v.Args[1] 835 x := v.Args[0] 836 v_1 := v.Args[1] 837 if v_1.Op != OpARM64SRLconst { 838 break 839 } 840 c := v_1.AuxInt 841 y := v_1.Args[0] 842 v.reset(OpARM64ADDshiftRL) 843 v.AuxInt = c 844 v.AddArg(x) 845 v.AddArg(y) 846 return true 847 } 848 // match: (ADD (SRLconst [c] y) x) 849 // cond: 850 // result: (ADDshiftRL x y [c]) 851 for { 852 _ = v.Args[1] 853 v_0 := v.Args[0] 854 if v_0.Op != OpARM64SRLconst { 855 break 856 } 857 c := v_0.AuxInt 858 y := v_0.Args[0] 859 x := v.Args[1] 860 v.reset(OpARM64ADDshiftRL) 861 v.AuxInt = c 862 v.AddArg(x) 863 v.AddArg(y) 864 return true 865 } 866 // match: (ADD x (SRAconst [c] y)) 867 // cond: 868 // result: (ADDshiftRA x y [c]) 869 for { 870 _ = v.Args[1] 871 x := v.Args[0] 872 v_1 := v.Args[1] 873 if v_1.Op != OpARM64SRAconst { 874 break 875 } 876 c := v_1.AuxInt 877 y := v_1.Args[0] 878 v.reset(OpARM64ADDshiftRA) 879 v.AuxInt = c 880 v.AddArg(x) 881 v.AddArg(y) 882 return true 883 } 884 // match: (ADD (SRAconst [c] y) x) 885 // cond: 886 // result: (ADDshiftRA x y [c]) 887 for { 888 _ = v.Args[1] 889 v_0 := v.Args[0] 890 if v_0.Op != OpARM64SRAconst { 891 break 892 } 893 c := v_0.AuxInt 894 y := v_0.Args[0] 895 x := v.Args[1] 896 v.reset(OpARM64ADDshiftRA) 897 v.AuxInt = c 898 v.AddArg(x) 899 v.AddArg(y) 900 return true 901 } 902 return false 903 } 904 func rewriteValueARM64_OpARM64ADDconst_0(v *Value) bool { 905 // match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr)) 906 // cond: 907 // result: (MOVDaddr [off1+off2] {sym} ptr) 908 for { 909 off1 := v.AuxInt 910 v_0 := v.Args[0] 911 if v_0.Op != OpARM64MOVDaddr { 912 break 913 } 914 off2 := v_0.AuxInt 915 sym := v_0.Aux 916 ptr := v_0.Args[0] 917 v.reset(OpARM64MOVDaddr) 918 v.AuxInt = off1 + off2 919 v.Aux = sym 920 v.AddArg(ptr) 921 return true 922 } 923 // match: (ADDconst [0] x) 924 // cond: 925 // result: x 926 for { 927 if v.AuxInt != 0 { 928 break 929 } 930 x := v.Args[0] 931 v.reset(OpCopy) 932 v.Type = x.Type 933 v.AddArg(x) 934 return true 935 } 936 // match: (ADDconst [c] (MOVDconst [d])) 937 // cond: 938 // result: (MOVDconst [c+d]) 939 for { 940 c := v.AuxInt 941 v_0 := v.Args[0] 942 if v_0.Op != OpARM64MOVDconst { 943 break 944 } 945 d := v_0.AuxInt 946 v.reset(OpARM64MOVDconst) 947 v.AuxInt = c + d 948 return true 949 } 950 // match: (ADDconst [c] (ADDconst [d] x)) 951 // cond: 952 // result: (ADDconst [c+d] x) 953 for { 954 c := v.AuxInt 955 v_0 := v.Args[0] 956 if v_0.Op != OpARM64ADDconst { 957 break 958 } 959 d := v_0.AuxInt 960 x := v_0.Args[0] 961 v.reset(OpARM64ADDconst) 962 v.AuxInt = c + d 963 v.AddArg(x) 964 return true 965 } 966 // match: (ADDconst [c] (SUBconst [d] x)) 967 // cond: 968 // result: (ADDconst [c-d] x) 969 for { 970 c := v.AuxInt 971 v_0 := v.Args[0] 972 if v_0.Op != OpARM64SUBconst { 973 break 974 } 975 d := v_0.AuxInt 976 x := v_0.Args[0] 977 v.reset(OpARM64ADDconst) 978 v.AuxInt = c - d 979 v.AddArg(x) 980 return true 981 } 982 return false 983 } 984 func rewriteValueARM64_OpARM64ADDshiftLL_0(v *Value) bool { 985 b := v.Block 986 _ = b 987 // match: (ADDshiftLL (MOVDconst [c]) x [d]) 988 // cond: 989 // result: (ADDconst [c] (SLLconst <x.Type> x [d])) 990 for { 991 d := v.AuxInt 992 _ = v.Args[1] 993 v_0 := v.Args[0] 994 if v_0.Op != OpARM64MOVDconst { 995 break 996 } 997 c := v_0.AuxInt 998 x := v.Args[1] 999 v.reset(OpARM64ADDconst) 1000 v.AuxInt = c 1001 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 1002 v0.AuxInt = d 1003 v0.AddArg(x) 1004 v.AddArg(v0) 1005 return true 1006 } 1007 // match: (ADDshiftLL x (MOVDconst [c]) [d]) 1008 // cond: 1009 // result: (ADDconst x [int64(uint64(c)<<uint64(d))]) 1010 for { 1011 d := v.AuxInt 1012 _ = v.Args[1] 1013 x := v.Args[0] 1014 v_1 := v.Args[1] 1015 if v_1.Op != OpARM64MOVDconst { 1016 break 1017 } 1018 c := v_1.AuxInt 1019 v.reset(OpARM64ADDconst) 1020 v.AuxInt = int64(uint64(c) << uint64(d)) 1021 v.AddArg(x) 1022 return true 1023 } 1024 // match: (ADDshiftLL [c] (SRLconst x [64-c]) x) 1025 // cond: 1026 // result: (RORconst [64-c] x) 1027 for { 1028 c := v.AuxInt 1029 _ = v.Args[1] 1030 v_0 := v.Args[0] 1031 if v_0.Op != OpARM64SRLconst { 1032 break 1033 } 1034 if v_0.AuxInt != 64-c { 1035 break 1036 } 1037 x := v_0.Args[0] 1038 if x != v.Args[1] { 1039 break 1040 } 1041 v.reset(OpARM64RORconst) 1042 v.AuxInt = 64 - c 1043 v.AddArg(x) 1044 return true 1045 } 1046 // match: (ADDshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x) 1047 // cond: c < 32 && t.Size() == 4 1048 // result: (RORWconst [32-c] x) 1049 for { 1050 t := v.Type 1051 c := v.AuxInt 1052 _ = v.Args[1] 1053 v_0 := v.Args[0] 1054 if v_0.Op != OpARM64SRLconst { 1055 break 1056 } 1057 if v_0.AuxInt != 32-c { 1058 break 1059 } 1060 v_0_0 := v_0.Args[0] 1061 if v_0_0.Op != OpARM64MOVWUreg { 1062 break 1063 } 1064 x := v_0_0.Args[0] 1065 if x != v.Args[1] { 1066 break 1067 } 1068 if !(c < 32 && t.Size() == 4) { 1069 break 1070 } 1071 v.reset(OpARM64RORWconst) 1072 v.AuxInt = 32 - c 1073 v.AddArg(x) 1074 return true 1075 } 1076 return false 1077 } 1078 func rewriteValueARM64_OpARM64ADDshiftRA_0(v *Value) bool { 1079 b := v.Block 1080 _ = b 1081 // match: (ADDshiftRA (MOVDconst [c]) x [d]) 1082 // cond: 1083 // result: (ADDconst [c] (SRAconst <x.Type> x [d])) 1084 for { 1085 d := v.AuxInt 1086 _ = v.Args[1] 1087 v_0 := v.Args[0] 1088 if v_0.Op != OpARM64MOVDconst { 1089 break 1090 } 1091 c := v_0.AuxInt 1092 x := v.Args[1] 1093 v.reset(OpARM64ADDconst) 1094 v.AuxInt = c 1095 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 1096 v0.AuxInt = d 1097 v0.AddArg(x) 1098 v.AddArg(v0) 1099 return true 1100 } 1101 // match: (ADDshiftRA x (MOVDconst [c]) [d]) 1102 // cond: 1103 // result: (ADDconst x [int64(int64(c)>>uint64(d))]) 1104 for { 1105 d := v.AuxInt 1106 _ = v.Args[1] 1107 x := v.Args[0] 1108 v_1 := v.Args[1] 1109 if v_1.Op != OpARM64MOVDconst { 1110 break 1111 } 1112 c := v_1.AuxInt 1113 v.reset(OpARM64ADDconst) 1114 v.AuxInt = int64(int64(c) >> uint64(d)) 1115 v.AddArg(x) 1116 return true 1117 } 1118 return false 1119 } 1120 func rewriteValueARM64_OpARM64ADDshiftRL_0(v *Value) bool { 1121 b := v.Block 1122 _ = b 1123 // match: (ADDshiftRL (MOVDconst [c]) x [d]) 1124 // cond: 1125 // result: (ADDconst [c] (SRLconst <x.Type> x [d])) 1126 for { 1127 d := v.AuxInt 1128 _ = v.Args[1] 1129 v_0 := v.Args[0] 1130 if v_0.Op != OpARM64MOVDconst { 1131 break 1132 } 1133 c := v_0.AuxInt 1134 x := v.Args[1] 1135 v.reset(OpARM64ADDconst) 1136 v.AuxInt = c 1137 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 1138 v0.AuxInt = d 1139 v0.AddArg(x) 1140 v.AddArg(v0) 1141 return true 1142 } 1143 // match: (ADDshiftRL x (MOVDconst [c]) [d]) 1144 // cond: 1145 // result: (ADDconst x [int64(uint64(c)>>uint64(d))]) 1146 for { 1147 d := v.AuxInt 1148 _ = v.Args[1] 1149 x := v.Args[0] 1150 v_1 := v.Args[1] 1151 if v_1.Op != OpARM64MOVDconst { 1152 break 1153 } 1154 c := v_1.AuxInt 1155 v.reset(OpARM64ADDconst) 1156 v.AuxInt = int64(uint64(c) >> uint64(d)) 1157 v.AddArg(x) 1158 return true 1159 } 1160 // match: (ADDshiftRL [c] (SLLconst x [64-c]) x) 1161 // cond: 1162 // result: (RORconst [ c] x) 1163 for { 1164 c := v.AuxInt 1165 _ = v.Args[1] 1166 v_0 := v.Args[0] 1167 if v_0.Op != OpARM64SLLconst { 1168 break 1169 } 1170 if v_0.AuxInt != 64-c { 1171 break 1172 } 1173 x := v_0.Args[0] 1174 if x != v.Args[1] { 1175 break 1176 } 1177 v.reset(OpARM64RORconst) 1178 v.AuxInt = c 1179 v.AddArg(x) 1180 return true 1181 } 1182 // match: (ADDshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 1183 // cond: c < 32 && t.Size() == 4 1184 // result: (RORWconst [ c] x) 1185 for { 1186 t := v.Type 1187 c := v.AuxInt 1188 _ = v.Args[1] 1189 v_0 := v.Args[0] 1190 if v_0.Op != OpARM64SLLconst { 1191 break 1192 } 1193 if v_0.AuxInt != 32-c { 1194 break 1195 } 1196 x := v_0.Args[0] 1197 v_1 := v.Args[1] 1198 if v_1.Op != OpARM64MOVWUreg { 1199 break 1200 } 1201 if x != v_1.Args[0] { 1202 break 1203 } 1204 if !(c < 32 && t.Size() == 4) { 1205 break 1206 } 1207 v.reset(OpARM64RORWconst) 1208 v.AuxInt = c 1209 v.AddArg(x) 1210 return true 1211 } 1212 return false 1213 } 1214 func rewriteValueARM64_OpARM64AND_0(v *Value) bool { 1215 // match: (AND x (MOVDconst [c])) 1216 // cond: 1217 // result: (ANDconst [c] x) 1218 for { 1219 _ = v.Args[1] 1220 x := v.Args[0] 1221 v_1 := v.Args[1] 1222 if v_1.Op != OpARM64MOVDconst { 1223 break 1224 } 1225 c := v_1.AuxInt 1226 v.reset(OpARM64ANDconst) 1227 v.AuxInt = c 1228 v.AddArg(x) 1229 return true 1230 } 1231 // match: (AND (MOVDconst [c]) x) 1232 // cond: 1233 // result: (ANDconst [c] x) 1234 for { 1235 _ = v.Args[1] 1236 v_0 := v.Args[0] 1237 if v_0.Op != OpARM64MOVDconst { 1238 break 1239 } 1240 c := v_0.AuxInt 1241 x := v.Args[1] 1242 v.reset(OpARM64ANDconst) 1243 v.AuxInt = c 1244 v.AddArg(x) 1245 return true 1246 } 1247 // match: (AND x x) 1248 // cond: 1249 // result: x 1250 for { 1251 _ = v.Args[1] 1252 x := v.Args[0] 1253 if x != v.Args[1] { 1254 break 1255 } 1256 v.reset(OpCopy) 1257 v.Type = x.Type 1258 v.AddArg(x) 1259 return true 1260 } 1261 // match: (AND x (MVN y)) 1262 // cond: 1263 // result: (BIC x y) 1264 for { 1265 _ = v.Args[1] 1266 x := v.Args[0] 1267 v_1 := v.Args[1] 1268 if v_1.Op != OpARM64MVN { 1269 break 1270 } 1271 y := v_1.Args[0] 1272 v.reset(OpARM64BIC) 1273 v.AddArg(x) 1274 v.AddArg(y) 1275 return true 1276 } 1277 // match: (AND (MVN y) x) 1278 // cond: 1279 // result: (BIC x y) 1280 for { 1281 _ = v.Args[1] 1282 v_0 := v.Args[0] 1283 if v_0.Op != OpARM64MVN { 1284 break 1285 } 1286 y := v_0.Args[0] 1287 x := v.Args[1] 1288 v.reset(OpARM64BIC) 1289 v.AddArg(x) 1290 v.AddArg(y) 1291 return true 1292 } 1293 // match: (AND x (SLLconst [c] y)) 1294 // cond: 1295 // result: (ANDshiftLL x y [c]) 1296 for { 1297 _ = v.Args[1] 1298 x := v.Args[0] 1299 v_1 := v.Args[1] 1300 if v_1.Op != OpARM64SLLconst { 1301 break 1302 } 1303 c := v_1.AuxInt 1304 y := v_1.Args[0] 1305 v.reset(OpARM64ANDshiftLL) 1306 v.AuxInt = c 1307 v.AddArg(x) 1308 v.AddArg(y) 1309 return true 1310 } 1311 // match: (AND (SLLconst [c] y) x) 1312 // cond: 1313 // result: (ANDshiftLL x y [c]) 1314 for { 1315 _ = v.Args[1] 1316 v_0 := v.Args[0] 1317 if v_0.Op != OpARM64SLLconst { 1318 break 1319 } 1320 c := v_0.AuxInt 1321 y := v_0.Args[0] 1322 x := v.Args[1] 1323 v.reset(OpARM64ANDshiftLL) 1324 v.AuxInt = c 1325 v.AddArg(x) 1326 v.AddArg(y) 1327 return true 1328 } 1329 // match: (AND x (SRLconst [c] y)) 1330 // cond: 1331 // result: (ANDshiftRL x y [c]) 1332 for { 1333 _ = v.Args[1] 1334 x := v.Args[0] 1335 v_1 := v.Args[1] 1336 if v_1.Op != OpARM64SRLconst { 1337 break 1338 } 1339 c := v_1.AuxInt 1340 y := v_1.Args[0] 1341 v.reset(OpARM64ANDshiftRL) 1342 v.AuxInt = c 1343 v.AddArg(x) 1344 v.AddArg(y) 1345 return true 1346 } 1347 // match: (AND (SRLconst [c] y) x) 1348 // cond: 1349 // result: (ANDshiftRL x y [c]) 1350 for { 1351 _ = v.Args[1] 1352 v_0 := v.Args[0] 1353 if v_0.Op != OpARM64SRLconst { 1354 break 1355 } 1356 c := v_0.AuxInt 1357 y := v_0.Args[0] 1358 x := v.Args[1] 1359 v.reset(OpARM64ANDshiftRL) 1360 v.AuxInt = c 1361 v.AddArg(x) 1362 v.AddArg(y) 1363 return true 1364 } 1365 // match: (AND x (SRAconst [c] y)) 1366 // cond: 1367 // result: (ANDshiftRA x y [c]) 1368 for { 1369 _ = v.Args[1] 1370 x := v.Args[0] 1371 v_1 := v.Args[1] 1372 if v_1.Op != OpARM64SRAconst { 1373 break 1374 } 1375 c := v_1.AuxInt 1376 y := v_1.Args[0] 1377 v.reset(OpARM64ANDshiftRA) 1378 v.AuxInt = c 1379 v.AddArg(x) 1380 v.AddArg(y) 1381 return true 1382 } 1383 return false 1384 } 1385 func rewriteValueARM64_OpARM64AND_10(v *Value) bool { 1386 // match: (AND (SRAconst [c] y) x) 1387 // cond: 1388 // result: (ANDshiftRA x y [c]) 1389 for { 1390 _ = v.Args[1] 1391 v_0 := v.Args[0] 1392 if v_0.Op != OpARM64SRAconst { 1393 break 1394 } 1395 c := v_0.AuxInt 1396 y := v_0.Args[0] 1397 x := v.Args[1] 1398 v.reset(OpARM64ANDshiftRA) 1399 v.AuxInt = c 1400 v.AddArg(x) 1401 v.AddArg(y) 1402 return true 1403 } 1404 return false 1405 } 1406 func rewriteValueARM64_OpARM64ANDconst_0(v *Value) bool { 1407 // match: (ANDconst [0] _) 1408 // cond: 1409 // result: (MOVDconst [0]) 1410 for { 1411 if v.AuxInt != 0 { 1412 break 1413 } 1414 v.reset(OpARM64MOVDconst) 1415 v.AuxInt = 0 1416 return true 1417 } 1418 // match: (ANDconst [-1] x) 1419 // cond: 1420 // result: x 1421 for { 1422 if v.AuxInt != -1 { 1423 break 1424 } 1425 x := v.Args[0] 1426 v.reset(OpCopy) 1427 v.Type = x.Type 1428 v.AddArg(x) 1429 return true 1430 } 1431 // match: (ANDconst [c] (MOVDconst [d])) 1432 // cond: 1433 // result: (MOVDconst [c&d]) 1434 for { 1435 c := v.AuxInt 1436 v_0 := v.Args[0] 1437 if v_0.Op != OpARM64MOVDconst { 1438 break 1439 } 1440 d := v_0.AuxInt 1441 v.reset(OpARM64MOVDconst) 1442 v.AuxInt = c & d 1443 return true 1444 } 1445 // match: (ANDconst [c] (ANDconst [d] x)) 1446 // cond: 1447 // result: (ANDconst [c&d] x) 1448 for { 1449 c := v.AuxInt 1450 v_0 := v.Args[0] 1451 if v_0.Op != OpARM64ANDconst { 1452 break 1453 } 1454 d := v_0.AuxInt 1455 x := v_0.Args[0] 1456 v.reset(OpARM64ANDconst) 1457 v.AuxInt = c & d 1458 v.AddArg(x) 1459 return true 1460 } 1461 return false 1462 } 1463 func rewriteValueARM64_OpARM64ANDshiftLL_0(v *Value) bool { 1464 b := v.Block 1465 _ = b 1466 // match: (ANDshiftLL (MOVDconst [c]) x [d]) 1467 // cond: 1468 // result: (ANDconst [c] (SLLconst <x.Type> x [d])) 1469 for { 1470 d := v.AuxInt 1471 _ = v.Args[1] 1472 v_0 := v.Args[0] 1473 if v_0.Op != OpARM64MOVDconst { 1474 break 1475 } 1476 c := v_0.AuxInt 1477 x := v.Args[1] 1478 v.reset(OpARM64ANDconst) 1479 v.AuxInt = c 1480 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 1481 v0.AuxInt = d 1482 v0.AddArg(x) 1483 v.AddArg(v0) 1484 return true 1485 } 1486 // match: (ANDshiftLL x (MOVDconst [c]) [d]) 1487 // cond: 1488 // result: (ANDconst x [int64(uint64(c)<<uint64(d))]) 1489 for { 1490 d := v.AuxInt 1491 _ = v.Args[1] 1492 x := v.Args[0] 1493 v_1 := v.Args[1] 1494 if v_1.Op != OpARM64MOVDconst { 1495 break 1496 } 1497 c := v_1.AuxInt 1498 v.reset(OpARM64ANDconst) 1499 v.AuxInt = int64(uint64(c) << uint64(d)) 1500 v.AddArg(x) 1501 return true 1502 } 1503 // match: (ANDshiftLL x y:(SLLconst x [c]) [d]) 1504 // cond: c==d 1505 // result: y 1506 for { 1507 d := v.AuxInt 1508 _ = v.Args[1] 1509 x := v.Args[0] 1510 y := v.Args[1] 1511 if y.Op != OpARM64SLLconst { 1512 break 1513 } 1514 c := y.AuxInt 1515 if x != y.Args[0] { 1516 break 1517 } 1518 if !(c == d) { 1519 break 1520 } 1521 v.reset(OpCopy) 1522 v.Type = y.Type 1523 v.AddArg(y) 1524 return true 1525 } 1526 return false 1527 } 1528 func rewriteValueARM64_OpARM64ANDshiftRA_0(v *Value) bool { 1529 b := v.Block 1530 _ = b 1531 // match: (ANDshiftRA (MOVDconst [c]) x [d]) 1532 // cond: 1533 // result: (ANDconst [c] (SRAconst <x.Type> x [d])) 1534 for { 1535 d := v.AuxInt 1536 _ = v.Args[1] 1537 v_0 := v.Args[0] 1538 if v_0.Op != OpARM64MOVDconst { 1539 break 1540 } 1541 c := v_0.AuxInt 1542 x := v.Args[1] 1543 v.reset(OpARM64ANDconst) 1544 v.AuxInt = c 1545 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 1546 v0.AuxInt = d 1547 v0.AddArg(x) 1548 v.AddArg(v0) 1549 return true 1550 } 1551 // match: (ANDshiftRA x (MOVDconst [c]) [d]) 1552 // cond: 1553 // result: (ANDconst x [int64(int64(c)>>uint64(d))]) 1554 for { 1555 d := v.AuxInt 1556 _ = v.Args[1] 1557 x := v.Args[0] 1558 v_1 := v.Args[1] 1559 if v_1.Op != OpARM64MOVDconst { 1560 break 1561 } 1562 c := v_1.AuxInt 1563 v.reset(OpARM64ANDconst) 1564 v.AuxInt = int64(int64(c) >> uint64(d)) 1565 v.AddArg(x) 1566 return true 1567 } 1568 // match: (ANDshiftRA x y:(SRAconst x [c]) [d]) 1569 // cond: c==d 1570 // result: y 1571 for { 1572 d := v.AuxInt 1573 _ = v.Args[1] 1574 x := v.Args[0] 1575 y := v.Args[1] 1576 if y.Op != OpARM64SRAconst { 1577 break 1578 } 1579 c := y.AuxInt 1580 if x != y.Args[0] { 1581 break 1582 } 1583 if !(c == d) { 1584 break 1585 } 1586 v.reset(OpCopy) 1587 v.Type = y.Type 1588 v.AddArg(y) 1589 return true 1590 } 1591 return false 1592 } 1593 func rewriteValueARM64_OpARM64ANDshiftRL_0(v *Value) bool { 1594 b := v.Block 1595 _ = b 1596 // match: (ANDshiftRL (MOVDconst [c]) x [d]) 1597 // cond: 1598 // result: (ANDconst [c] (SRLconst <x.Type> x [d])) 1599 for { 1600 d := v.AuxInt 1601 _ = v.Args[1] 1602 v_0 := v.Args[0] 1603 if v_0.Op != OpARM64MOVDconst { 1604 break 1605 } 1606 c := v_0.AuxInt 1607 x := v.Args[1] 1608 v.reset(OpARM64ANDconst) 1609 v.AuxInt = c 1610 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 1611 v0.AuxInt = d 1612 v0.AddArg(x) 1613 v.AddArg(v0) 1614 return true 1615 } 1616 // match: (ANDshiftRL x (MOVDconst [c]) [d]) 1617 // cond: 1618 // result: (ANDconst x [int64(uint64(c)>>uint64(d))]) 1619 for { 1620 d := v.AuxInt 1621 _ = v.Args[1] 1622 x := v.Args[0] 1623 v_1 := v.Args[1] 1624 if v_1.Op != OpARM64MOVDconst { 1625 break 1626 } 1627 c := v_1.AuxInt 1628 v.reset(OpARM64ANDconst) 1629 v.AuxInt = int64(uint64(c) >> uint64(d)) 1630 v.AddArg(x) 1631 return true 1632 } 1633 // match: (ANDshiftRL x y:(SRLconst x [c]) [d]) 1634 // cond: c==d 1635 // result: y 1636 for { 1637 d := v.AuxInt 1638 _ = v.Args[1] 1639 x := v.Args[0] 1640 y := v.Args[1] 1641 if y.Op != OpARM64SRLconst { 1642 break 1643 } 1644 c := y.AuxInt 1645 if x != y.Args[0] { 1646 break 1647 } 1648 if !(c == d) { 1649 break 1650 } 1651 v.reset(OpCopy) 1652 v.Type = y.Type 1653 v.AddArg(y) 1654 return true 1655 } 1656 return false 1657 } 1658 func rewriteValueARM64_OpARM64BIC_0(v *Value) bool { 1659 // match: (BIC x (MOVDconst [c])) 1660 // cond: 1661 // result: (BICconst [c] x) 1662 for { 1663 _ = v.Args[1] 1664 x := v.Args[0] 1665 v_1 := v.Args[1] 1666 if v_1.Op != OpARM64MOVDconst { 1667 break 1668 } 1669 c := v_1.AuxInt 1670 v.reset(OpARM64BICconst) 1671 v.AuxInt = c 1672 v.AddArg(x) 1673 return true 1674 } 1675 // match: (BIC x x) 1676 // cond: 1677 // result: (MOVDconst [0]) 1678 for { 1679 _ = v.Args[1] 1680 x := v.Args[0] 1681 if x != v.Args[1] { 1682 break 1683 } 1684 v.reset(OpARM64MOVDconst) 1685 v.AuxInt = 0 1686 return true 1687 } 1688 // match: (BIC x (SLLconst [c] y)) 1689 // cond: 1690 // result: (BICshiftLL x y [c]) 1691 for { 1692 _ = v.Args[1] 1693 x := v.Args[0] 1694 v_1 := v.Args[1] 1695 if v_1.Op != OpARM64SLLconst { 1696 break 1697 } 1698 c := v_1.AuxInt 1699 y := v_1.Args[0] 1700 v.reset(OpARM64BICshiftLL) 1701 v.AuxInt = c 1702 v.AddArg(x) 1703 v.AddArg(y) 1704 return true 1705 } 1706 // match: (BIC x (SRLconst [c] y)) 1707 // cond: 1708 // result: (BICshiftRL x y [c]) 1709 for { 1710 _ = v.Args[1] 1711 x := v.Args[0] 1712 v_1 := v.Args[1] 1713 if v_1.Op != OpARM64SRLconst { 1714 break 1715 } 1716 c := v_1.AuxInt 1717 y := v_1.Args[0] 1718 v.reset(OpARM64BICshiftRL) 1719 v.AuxInt = c 1720 v.AddArg(x) 1721 v.AddArg(y) 1722 return true 1723 } 1724 // match: (BIC x (SRAconst [c] y)) 1725 // cond: 1726 // result: (BICshiftRA x y [c]) 1727 for { 1728 _ = v.Args[1] 1729 x := v.Args[0] 1730 v_1 := v.Args[1] 1731 if v_1.Op != OpARM64SRAconst { 1732 break 1733 } 1734 c := v_1.AuxInt 1735 y := v_1.Args[0] 1736 v.reset(OpARM64BICshiftRA) 1737 v.AuxInt = c 1738 v.AddArg(x) 1739 v.AddArg(y) 1740 return true 1741 } 1742 return false 1743 } 1744 func rewriteValueARM64_OpARM64BICconst_0(v *Value) bool { 1745 // match: (BICconst [0] x) 1746 // cond: 1747 // result: x 1748 for { 1749 if v.AuxInt != 0 { 1750 break 1751 } 1752 x := v.Args[0] 1753 v.reset(OpCopy) 1754 v.Type = x.Type 1755 v.AddArg(x) 1756 return true 1757 } 1758 // match: (BICconst [-1] _) 1759 // cond: 1760 // result: (MOVDconst [0]) 1761 for { 1762 if v.AuxInt != -1 { 1763 break 1764 } 1765 v.reset(OpARM64MOVDconst) 1766 v.AuxInt = 0 1767 return true 1768 } 1769 // match: (BICconst [c] (MOVDconst [d])) 1770 // cond: 1771 // result: (MOVDconst [d&^c]) 1772 for { 1773 c := v.AuxInt 1774 v_0 := v.Args[0] 1775 if v_0.Op != OpARM64MOVDconst { 1776 break 1777 } 1778 d := v_0.AuxInt 1779 v.reset(OpARM64MOVDconst) 1780 v.AuxInt = d &^ c 1781 return true 1782 } 1783 return false 1784 } 1785 func rewriteValueARM64_OpARM64BICshiftLL_0(v *Value) bool { 1786 // match: (BICshiftLL x (MOVDconst [c]) [d]) 1787 // cond: 1788 // result: (BICconst x [int64(uint64(c)<<uint64(d))]) 1789 for { 1790 d := v.AuxInt 1791 _ = v.Args[1] 1792 x := v.Args[0] 1793 v_1 := v.Args[1] 1794 if v_1.Op != OpARM64MOVDconst { 1795 break 1796 } 1797 c := v_1.AuxInt 1798 v.reset(OpARM64BICconst) 1799 v.AuxInt = int64(uint64(c) << uint64(d)) 1800 v.AddArg(x) 1801 return true 1802 } 1803 // match: (BICshiftLL x (SLLconst x [c]) [d]) 1804 // cond: c==d 1805 // result: (MOVDconst [0]) 1806 for { 1807 d := v.AuxInt 1808 _ = v.Args[1] 1809 x := v.Args[0] 1810 v_1 := v.Args[1] 1811 if v_1.Op != OpARM64SLLconst { 1812 break 1813 } 1814 c := v_1.AuxInt 1815 if x != v_1.Args[0] { 1816 break 1817 } 1818 if !(c == d) { 1819 break 1820 } 1821 v.reset(OpARM64MOVDconst) 1822 v.AuxInt = 0 1823 return true 1824 } 1825 return false 1826 } 1827 func rewriteValueARM64_OpARM64BICshiftRA_0(v *Value) bool { 1828 // match: (BICshiftRA x (MOVDconst [c]) [d]) 1829 // cond: 1830 // result: (BICconst x [int64(int64(c)>>uint64(d))]) 1831 for { 1832 d := v.AuxInt 1833 _ = v.Args[1] 1834 x := v.Args[0] 1835 v_1 := v.Args[1] 1836 if v_1.Op != OpARM64MOVDconst { 1837 break 1838 } 1839 c := v_1.AuxInt 1840 v.reset(OpARM64BICconst) 1841 v.AuxInt = int64(int64(c) >> uint64(d)) 1842 v.AddArg(x) 1843 return true 1844 } 1845 // match: (BICshiftRA x (SRAconst x [c]) [d]) 1846 // cond: c==d 1847 // result: (MOVDconst [0]) 1848 for { 1849 d := v.AuxInt 1850 _ = v.Args[1] 1851 x := v.Args[0] 1852 v_1 := v.Args[1] 1853 if v_1.Op != OpARM64SRAconst { 1854 break 1855 } 1856 c := v_1.AuxInt 1857 if x != v_1.Args[0] { 1858 break 1859 } 1860 if !(c == d) { 1861 break 1862 } 1863 v.reset(OpARM64MOVDconst) 1864 v.AuxInt = 0 1865 return true 1866 } 1867 return false 1868 } 1869 func rewriteValueARM64_OpARM64BICshiftRL_0(v *Value) bool { 1870 // match: (BICshiftRL x (MOVDconst [c]) [d]) 1871 // cond: 1872 // result: (BICconst x [int64(uint64(c)>>uint64(d))]) 1873 for { 1874 d := v.AuxInt 1875 _ = v.Args[1] 1876 x := v.Args[0] 1877 v_1 := v.Args[1] 1878 if v_1.Op != OpARM64MOVDconst { 1879 break 1880 } 1881 c := v_1.AuxInt 1882 v.reset(OpARM64BICconst) 1883 v.AuxInt = int64(uint64(c) >> uint64(d)) 1884 v.AddArg(x) 1885 return true 1886 } 1887 // match: (BICshiftRL x (SRLconst x [c]) [d]) 1888 // cond: c==d 1889 // result: (MOVDconst [0]) 1890 for { 1891 d := v.AuxInt 1892 _ = v.Args[1] 1893 x := v.Args[0] 1894 v_1 := v.Args[1] 1895 if v_1.Op != OpARM64SRLconst { 1896 break 1897 } 1898 c := v_1.AuxInt 1899 if x != v_1.Args[0] { 1900 break 1901 } 1902 if !(c == d) { 1903 break 1904 } 1905 v.reset(OpARM64MOVDconst) 1906 v.AuxInt = 0 1907 return true 1908 } 1909 return false 1910 } 1911 func rewriteValueARM64_OpARM64CMP_0(v *Value) bool { 1912 b := v.Block 1913 _ = b 1914 // match: (CMP x (MOVDconst [c])) 1915 // cond: 1916 // result: (CMPconst [c] x) 1917 for { 1918 _ = v.Args[1] 1919 x := v.Args[0] 1920 v_1 := v.Args[1] 1921 if v_1.Op != OpARM64MOVDconst { 1922 break 1923 } 1924 c := v_1.AuxInt 1925 v.reset(OpARM64CMPconst) 1926 v.AuxInt = c 1927 v.AddArg(x) 1928 return true 1929 } 1930 // match: (CMP (MOVDconst [c]) x) 1931 // cond: 1932 // result: (InvertFlags (CMPconst [c] x)) 1933 for { 1934 _ = v.Args[1] 1935 v_0 := v.Args[0] 1936 if v_0.Op != OpARM64MOVDconst { 1937 break 1938 } 1939 c := v_0.AuxInt 1940 x := v.Args[1] 1941 v.reset(OpARM64InvertFlags) 1942 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 1943 v0.AuxInt = c 1944 v0.AddArg(x) 1945 v.AddArg(v0) 1946 return true 1947 } 1948 // match: (CMP x (SLLconst [c] y)) 1949 // cond: 1950 // result: (CMPshiftLL x y [c]) 1951 for { 1952 _ = v.Args[1] 1953 x := v.Args[0] 1954 v_1 := v.Args[1] 1955 if v_1.Op != OpARM64SLLconst { 1956 break 1957 } 1958 c := v_1.AuxInt 1959 y := v_1.Args[0] 1960 v.reset(OpARM64CMPshiftLL) 1961 v.AuxInt = c 1962 v.AddArg(x) 1963 v.AddArg(y) 1964 return true 1965 } 1966 // match: (CMP (SLLconst [c] y) x) 1967 // cond: 1968 // result: (InvertFlags (CMPshiftLL x y [c])) 1969 for { 1970 _ = v.Args[1] 1971 v_0 := v.Args[0] 1972 if v_0.Op != OpARM64SLLconst { 1973 break 1974 } 1975 c := v_0.AuxInt 1976 y := v_0.Args[0] 1977 x := v.Args[1] 1978 v.reset(OpARM64InvertFlags) 1979 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftLL, types.TypeFlags) 1980 v0.AuxInt = c 1981 v0.AddArg(x) 1982 v0.AddArg(y) 1983 v.AddArg(v0) 1984 return true 1985 } 1986 // match: (CMP x (SRLconst [c] y)) 1987 // cond: 1988 // result: (CMPshiftRL x y [c]) 1989 for { 1990 _ = v.Args[1] 1991 x := v.Args[0] 1992 v_1 := v.Args[1] 1993 if v_1.Op != OpARM64SRLconst { 1994 break 1995 } 1996 c := v_1.AuxInt 1997 y := v_1.Args[0] 1998 v.reset(OpARM64CMPshiftRL) 1999 v.AuxInt = c 2000 v.AddArg(x) 2001 v.AddArg(y) 2002 return true 2003 } 2004 // match: (CMP (SRLconst [c] y) x) 2005 // cond: 2006 // result: (InvertFlags (CMPshiftRL x y [c])) 2007 for { 2008 _ = v.Args[1] 2009 v_0 := v.Args[0] 2010 if v_0.Op != OpARM64SRLconst { 2011 break 2012 } 2013 c := v_0.AuxInt 2014 y := v_0.Args[0] 2015 x := v.Args[1] 2016 v.reset(OpARM64InvertFlags) 2017 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRL, types.TypeFlags) 2018 v0.AuxInt = c 2019 v0.AddArg(x) 2020 v0.AddArg(y) 2021 v.AddArg(v0) 2022 return true 2023 } 2024 // match: (CMP x (SRAconst [c] y)) 2025 // cond: 2026 // result: (CMPshiftRA x y [c]) 2027 for { 2028 _ = v.Args[1] 2029 x := v.Args[0] 2030 v_1 := v.Args[1] 2031 if v_1.Op != OpARM64SRAconst { 2032 break 2033 } 2034 c := v_1.AuxInt 2035 y := v_1.Args[0] 2036 v.reset(OpARM64CMPshiftRA) 2037 v.AuxInt = c 2038 v.AddArg(x) 2039 v.AddArg(y) 2040 return true 2041 } 2042 // match: (CMP (SRAconst [c] y) x) 2043 // cond: 2044 // result: (InvertFlags (CMPshiftRA x y [c])) 2045 for { 2046 _ = v.Args[1] 2047 v_0 := v.Args[0] 2048 if v_0.Op != OpARM64SRAconst { 2049 break 2050 } 2051 c := v_0.AuxInt 2052 y := v_0.Args[0] 2053 x := v.Args[1] 2054 v.reset(OpARM64InvertFlags) 2055 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRA, types.TypeFlags) 2056 v0.AuxInt = c 2057 v0.AddArg(x) 2058 v0.AddArg(y) 2059 v.AddArg(v0) 2060 return true 2061 } 2062 return false 2063 } 2064 func rewriteValueARM64_OpARM64CMPW_0(v *Value) bool { 2065 b := v.Block 2066 _ = b 2067 // match: (CMPW x (MOVDconst [c])) 2068 // cond: 2069 // result: (CMPWconst [int64(int32(c))] x) 2070 for { 2071 _ = v.Args[1] 2072 x := v.Args[0] 2073 v_1 := v.Args[1] 2074 if v_1.Op != OpARM64MOVDconst { 2075 break 2076 } 2077 c := v_1.AuxInt 2078 v.reset(OpARM64CMPWconst) 2079 v.AuxInt = int64(int32(c)) 2080 v.AddArg(x) 2081 return true 2082 } 2083 // match: (CMPW (MOVDconst [c]) x) 2084 // cond: 2085 // result: (InvertFlags (CMPWconst [int64(int32(c))] x)) 2086 for { 2087 _ = v.Args[1] 2088 v_0 := v.Args[0] 2089 if v_0.Op != OpARM64MOVDconst { 2090 break 2091 } 2092 c := v_0.AuxInt 2093 x := v.Args[1] 2094 v.reset(OpARM64InvertFlags) 2095 v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags) 2096 v0.AuxInt = int64(int32(c)) 2097 v0.AddArg(x) 2098 v.AddArg(v0) 2099 return true 2100 } 2101 return false 2102 } 2103 func rewriteValueARM64_OpARM64CMPWconst_0(v *Value) bool { 2104 // match: (CMPWconst (MOVDconst [x]) [y]) 2105 // cond: int32(x)==int32(y) 2106 // result: (FlagEQ) 2107 for { 2108 y := v.AuxInt 2109 v_0 := v.Args[0] 2110 if v_0.Op != OpARM64MOVDconst { 2111 break 2112 } 2113 x := v_0.AuxInt 2114 if !(int32(x) == int32(y)) { 2115 break 2116 } 2117 v.reset(OpARM64FlagEQ) 2118 return true 2119 } 2120 // match: (CMPWconst (MOVDconst [x]) [y]) 2121 // cond: int32(x)<int32(y) && uint32(x)<uint32(y) 2122 // result: (FlagLT_ULT) 2123 for { 2124 y := v.AuxInt 2125 v_0 := v.Args[0] 2126 if v_0.Op != OpARM64MOVDconst { 2127 break 2128 } 2129 x := v_0.AuxInt 2130 if !(int32(x) < int32(y) && uint32(x) < uint32(y)) { 2131 break 2132 } 2133 v.reset(OpARM64FlagLT_ULT) 2134 return true 2135 } 2136 // match: (CMPWconst (MOVDconst [x]) [y]) 2137 // cond: int32(x)<int32(y) && uint32(x)>uint32(y) 2138 // result: (FlagLT_UGT) 2139 for { 2140 y := v.AuxInt 2141 v_0 := v.Args[0] 2142 if v_0.Op != OpARM64MOVDconst { 2143 break 2144 } 2145 x := v_0.AuxInt 2146 if !(int32(x) < int32(y) && uint32(x) > uint32(y)) { 2147 break 2148 } 2149 v.reset(OpARM64FlagLT_UGT) 2150 return true 2151 } 2152 // match: (CMPWconst (MOVDconst [x]) [y]) 2153 // cond: int32(x)>int32(y) && uint32(x)<uint32(y) 2154 // result: (FlagGT_ULT) 2155 for { 2156 y := v.AuxInt 2157 v_0 := v.Args[0] 2158 if v_0.Op != OpARM64MOVDconst { 2159 break 2160 } 2161 x := v_0.AuxInt 2162 if !(int32(x) > int32(y) && uint32(x) < uint32(y)) { 2163 break 2164 } 2165 v.reset(OpARM64FlagGT_ULT) 2166 return true 2167 } 2168 // match: (CMPWconst (MOVDconst [x]) [y]) 2169 // cond: int32(x)>int32(y) && uint32(x)>uint32(y) 2170 // result: (FlagGT_UGT) 2171 for { 2172 y := v.AuxInt 2173 v_0 := v.Args[0] 2174 if v_0.Op != OpARM64MOVDconst { 2175 break 2176 } 2177 x := v_0.AuxInt 2178 if !(int32(x) > int32(y) && uint32(x) > uint32(y)) { 2179 break 2180 } 2181 v.reset(OpARM64FlagGT_UGT) 2182 return true 2183 } 2184 // match: (CMPWconst (MOVBUreg _) [c]) 2185 // cond: 0xff < int32(c) 2186 // result: (FlagLT_ULT) 2187 for { 2188 c := v.AuxInt 2189 v_0 := v.Args[0] 2190 if v_0.Op != OpARM64MOVBUreg { 2191 break 2192 } 2193 if !(0xff < int32(c)) { 2194 break 2195 } 2196 v.reset(OpARM64FlagLT_ULT) 2197 return true 2198 } 2199 // match: (CMPWconst (MOVHUreg _) [c]) 2200 // cond: 0xffff < int32(c) 2201 // result: (FlagLT_ULT) 2202 for { 2203 c := v.AuxInt 2204 v_0 := v.Args[0] 2205 if v_0.Op != OpARM64MOVHUreg { 2206 break 2207 } 2208 if !(0xffff < int32(c)) { 2209 break 2210 } 2211 v.reset(OpARM64FlagLT_ULT) 2212 return true 2213 } 2214 return false 2215 } 2216 func rewriteValueARM64_OpARM64CMPconst_0(v *Value) bool { 2217 // match: (CMPconst (MOVDconst [x]) [y]) 2218 // cond: x==y 2219 // result: (FlagEQ) 2220 for { 2221 y := v.AuxInt 2222 v_0 := v.Args[0] 2223 if v_0.Op != OpARM64MOVDconst { 2224 break 2225 } 2226 x := v_0.AuxInt 2227 if !(x == y) { 2228 break 2229 } 2230 v.reset(OpARM64FlagEQ) 2231 return true 2232 } 2233 // match: (CMPconst (MOVDconst [x]) [y]) 2234 // cond: int64(x)<int64(y) && uint64(x)<uint64(y) 2235 // result: (FlagLT_ULT) 2236 for { 2237 y := v.AuxInt 2238 v_0 := v.Args[0] 2239 if v_0.Op != OpARM64MOVDconst { 2240 break 2241 } 2242 x := v_0.AuxInt 2243 if !(int64(x) < int64(y) && uint64(x) < uint64(y)) { 2244 break 2245 } 2246 v.reset(OpARM64FlagLT_ULT) 2247 return true 2248 } 2249 // match: (CMPconst (MOVDconst [x]) [y]) 2250 // cond: int64(x)<int64(y) && uint64(x)>uint64(y) 2251 // result: (FlagLT_UGT) 2252 for { 2253 y := v.AuxInt 2254 v_0 := v.Args[0] 2255 if v_0.Op != OpARM64MOVDconst { 2256 break 2257 } 2258 x := v_0.AuxInt 2259 if !(int64(x) < int64(y) && uint64(x) > uint64(y)) { 2260 break 2261 } 2262 v.reset(OpARM64FlagLT_UGT) 2263 return true 2264 } 2265 // match: (CMPconst (MOVDconst [x]) [y]) 2266 // cond: int64(x)>int64(y) && uint64(x)<uint64(y) 2267 // result: (FlagGT_ULT) 2268 for { 2269 y := v.AuxInt 2270 v_0 := v.Args[0] 2271 if v_0.Op != OpARM64MOVDconst { 2272 break 2273 } 2274 x := v_0.AuxInt 2275 if !(int64(x) > int64(y) && uint64(x) < uint64(y)) { 2276 break 2277 } 2278 v.reset(OpARM64FlagGT_ULT) 2279 return true 2280 } 2281 // match: (CMPconst (MOVDconst [x]) [y]) 2282 // cond: int64(x)>int64(y) && uint64(x)>uint64(y) 2283 // result: (FlagGT_UGT) 2284 for { 2285 y := v.AuxInt 2286 v_0 := v.Args[0] 2287 if v_0.Op != OpARM64MOVDconst { 2288 break 2289 } 2290 x := v_0.AuxInt 2291 if !(int64(x) > int64(y) && uint64(x) > uint64(y)) { 2292 break 2293 } 2294 v.reset(OpARM64FlagGT_UGT) 2295 return true 2296 } 2297 // match: (CMPconst (MOVBUreg _) [c]) 2298 // cond: 0xff < c 2299 // result: (FlagLT_ULT) 2300 for { 2301 c := v.AuxInt 2302 v_0 := v.Args[0] 2303 if v_0.Op != OpARM64MOVBUreg { 2304 break 2305 } 2306 if !(0xff < c) { 2307 break 2308 } 2309 v.reset(OpARM64FlagLT_ULT) 2310 return true 2311 } 2312 // match: (CMPconst (MOVHUreg _) [c]) 2313 // cond: 0xffff < c 2314 // result: (FlagLT_ULT) 2315 for { 2316 c := v.AuxInt 2317 v_0 := v.Args[0] 2318 if v_0.Op != OpARM64MOVHUreg { 2319 break 2320 } 2321 if !(0xffff < c) { 2322 break 2323 } 2324 v.reset(OpARM64FlagLT_ULT) 2325 return true 2326 } 2327 // match: (CMPconst (MOVWUreg _) [c]) 2328 // cond: 0xffffffff < c 2329 // result: (FlagLT_ULT) 2330 for { 2331 c := v.AuxInt 2332 v_0 := v.Args[0] 2333 if v_0.Op != OpARM64MOVWUreg { 2334 break 2335 } 2336 if !(0xffffffff < c) { 2337 break 2338 } 2339 v.reset(OpARM64FlagLT_ULT) 2340 return true 2341 } 2342 // match: (CMPconst (ANDconst _ [m]) [n]) 2343 // cond: 0 <= m && m < n 2344 // result: (FlagLT_ULT) 2345 for { 2346 n := v.AuxInt 2347 v_0 := v.Args[0] 2348 if v_0.Op != OpARM64ANDconst { 2349 break 2350 } 2351 m := v_0.AuxInt 2352 if !(0 <= m && m < n) { 2353 break 2354 } 2355 v.reset(OpARM64FlagLT_ULT) 2356 return true 2357 } 2358 // match: (CMPconst (SRLconst _ [c]) [n]) 2359 // cond: 0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n) 2360 // result: (FlagLT_ULT) 2361 for { 2362 n := v.AuxInt 2363 v_0 := v.Args[0] 2364 if v_0.Op != OpARM64SRLconst { 2365 break 2366 } 2367 c := v_0.AuxInt 2368 if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) { 2369 break 2370 } 2371 v.reset(OpARM64FlagLT_ULT) 2372 return true 2373 } 2374 return false 2375 } 2376 func rewriteValueARM64_OpARM64CMPshiftLL_0(v *Value) bool { 2377 b := v.Block 2378 _ = b 2379 // match: (CMPshiftLL (MOVDconst [c]) x [d]) 2380 // cond: 2381 // result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d]))) 2382 for { 2383 d := v.AuxInt 2384 _ = v.Args[1] 2385 v_0 := v.Args[0] 2386 if v_0.Op != OpARM64MOVDconst { 2387 break 2388 } 2389 c := v_0.AuxInt 2390 x := v.Args[1] 2391 v.reset(OpARM64InvertFlags) 2392 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 2393 v0.AuxInt = c 2394 v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 2395 v1.AuxInt = d 2396 v1.AddArg(x) 2397 v0.AddArg(v1) 2398 v.AddArg(v0) 2399 return true 2400 } 2401 // match: (CMPshiftLL x (MOVDconst [c]) [d]) 2402 // cond: 2403 // result: (CMPconst x [int64(uint64(c)<<uint64(d))]) 2404 for { 2405 d := v.AuxInt 2406 _ = v.Args[1] 2407 x := v.Args[0] 2408 v_1 := v.Args[1] 2409 if v_1.Op != OpARM64MOVDconst { 2410 break 2411 } 2412 c := v_1.AuxInt 2413 v.reset(OpARM64CMPconst) 2414 v.AuxInt = int64(uint64(c) << uint64(d)) 2415 v.AddArg(x) 2416 return true 2417 } 2418 return false 2419 } 2420 func rewriteValueARM64_OpARM64CMPshiftRA_0(v *Value) bool { 2421 b := v.Block 2422 _ = b 2423 // match: (CMPshiftRA (MOVDconst [c]) x [d]) 2424 // cond: 2425 // result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d]))) 2426 for { 2427 d := v.AuxInt 2428 _ = v.Args[1] 2429 v_0 := v.Args[0] 2430 if v_0.Op != OpARM64MOVDconst { 2431 break 2432 } 2433 c := v_0.AuxInt 2434 x := v.Args[1] 2435 v.reset(OpARM64InvertFlags) 2436 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 2437 v0.AuxInt = c 2438 v1 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 2439 v1.AuxInt = d 2440 v1.AddArg(x) 2441 v0.AddArg(v1) 2442 v.AddArg(v0) 2443 return true 2444 } 2445 // match: (CMPshiftRA x (MOVDconst [c]) [d]) 2446 // cond: 2447 // result: (CMPconst x [int64(int64(c)>>uint64(d))]) 2448 for { 2449 d := v.AuxInt 2450 _ = v.Args[1] 2451 x := v.Args[0] 2452 v_1 := v.Args[1] 2453 if v_1.Op != OpARM64MOVDconst { 2454 break 2455 } 2456 c := v_1.AuxInt 2457 v.reset(OpARM64CMPconst) 2458 v.AuxInt = int64(int64(c) >> uint64(d)) 2459 v.AddArg(x) 2460 return true 2461 } 2462 return false 2463 } 2464 func rewriteValueARM64_OpARM64CMPshiftRL_0(v *Value) bool { 2465 b := v.Block 2466 _ = b 2467 // match: (CMPshiftRL (MOVDconst [c]) x [d]) 2468 // cond: 2469 // result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d]))) 2470 for { 2471 d := v.AuxInt 2472 _ = v.Args[1] 2473 v_0 := v.Args[0] 2474 if v_0.Op != OpARM64MOVDconst { 2475 break 2476 } 2477 c := v_0.AuxInt 2478 x := v.Args[1] 2479 v.reset(OpARM64InvertFlags) 2480 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 2481 v0.AuxInt = c 2482 v1 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 2483 v1.AuxInt = d 2484 v1.AddArg(x) 2485 v0.AddArg(v1) 2486 v.AddArg(v0) 2487 return true 2488 } 2489 // match: (CMPshiftRL x (MOVDconst [c]) [d]) 2490 // cond: 2491 // result: (CMPconst x [int64(uint64(c)>>uint64(d))]) 2492 for { 2493 d := v.AuxInt 2494 _ = v.Args[1] 2495 x := v.Args[0] 2496 v_1 := v.Args[1] 2497 if v_1.Op != OpARM64MOVDconst { 2498 break 2499 } 2500 c := v_1.AuxInt 2501 v.reset(OpARM64CMPconst) 2502 v.AuxInt = int64(uint64(c) >> uint64(d)) 2503 v.AddArg(x) 2504 return true 2505 } 2506 return false 2507 } 2508 func rewriteValueARM64_OpARM64CSELULT_0(v *Value) bool { 2509 // match: (CSELULT x (MOVDconst [0]) flag) 2510 // cond: 2511 // result: (CSELULT0 x flag) 2512 for { 2513 _ = v.Args[2] 2514 x := v.Args[0] 2515 v_1 := v.Args[1] 2516 if v_1.Op != OpARM64MOVDconst { 2517 break 2518 } 2519 if v_1.AuxInt != 0 { 2520 break 2521 } 2522 flag := v.Args[2] 2523 v.reset(OpARM64CSELULT0) 2524 v.AddArg(x) 2525 v.AddArg(flag) 2526 return true 2527 } 2528 // match: (CSELULT _ y (FlagEQ)) 2529 // cond: 2530 // result: y 2531 for { 2532 _ = v.Args[2] 2533 y := v.Args[1] 2534 v_2 := v.Args[2] 2535 if v_2.Op != OpARM64FlagEQ { 2536 break 2537 } 2538 v.reset(OpCopy) 2539 v.Type = y.Type 2540 v.AddArg(y) 2541 return true 2542 } 2543 // match: (CSELULT x _ (FlagLT_ULT)) 2544 // cond: 2545 // result: x 2546 for { 2547 _ = v.Args[2] 2548 x := v.Args[0] 2549 v_2 := v.Args[2] 2550 if v_2.Op != OpARM64FlagLT_ULT { 2551 break 2552 } 2553 v.reset(OpCopy) 2554 v.Type = x.Type 2555 v.AddArg(x) 2556 return true 2557 } 2558 // match: (CSELULT _ y (FlagLT_UGT)) 2559 // cond: 2560 // result: y 2561 for { 2562 _ = v.Args[2] 2563 y := v.Args[1] 2564 v_2 := v.Args[2] 2565 if v_2.Op != OpARM64FlagLT_UGT { 2566 break 2567 } 2568 v.reset(OpCopy) 2569 v.Type = y.Type 2570 v.AddArg(y) 2571 return true 2572 } 2573 // match: (CSELULT x _ (FlagGT_ULT)) 2574 // cond: 2575 // result: x 2576 for { 2577 _ = v.Args[2] 2578 x := v.Args[0] 2579 v_2 := v.Args[2] 2580 if v_2.Op != OpARM64FlagGT_ULT { 2581 break 2582 } 2583 v.reset(OpCopy) 2584 v.Type = x.Type 2585 v.AddArg(x) 2586 return true 2587 } 2588 // match: (CSELULT _ y (FlagGT_UGT)) 2589 // cond: 2590 // result: y 2591 for { 2592 _ = v.Args[2] 2593 y := v.Args[1] 2594 v_2 := v.Args[2] 2595 if v_2.Op != OpARM64FlagGT_UGT { 2596 break 2597 } 2598 v.reset(OpCopy) 2599 v.Type = y.Type 2600 v.AddArg(y) 2601 return true 2602 } 2603 return false 2604 } 2605 func rewriteValueARM64_OpARM64CSELULT0_0(v *Value) bool { 2606 // match: (CSELULT0 _ (FlagEQ)) 2607 // cond: 2608 // result: (MOVDconst [0]) 2609 for { 2610 _ = v.Args[1] 2611 v_1 := v.Args[1] 2612 if v_1.Op != OpARM64FlagEQ { 2613 break 2614 } 2615 v.reset(OpARM64MOVDconst) 2616 v.AuxInt = 0 2617 return true 2618 } 2619 // match: (CSELULT0 x (FlagLT_ULT)) 2620 // cond: 2621 // result: x 2622 for { 2623 _ = v.Args[1] 2624 x := v.Args[0] 2625 v_1 := v.Args[1] 2626 if v_1.Op != OpARM64FlagLT_ULT { 2627 break 2628 } 2629 v.reset(OpCopy) 2630 v.Type = x.Type 2631 v.AddArg(x) 2632 return true 2633 } 2634 // match: (CSELULT0 _ (FlagLT_UGT)) 2635 // cond: 2636 // result: (MOVDconst [0]) 2637 for { 2638 _ = v.Args[1] 2639 v_1 := v.Args[1] 2640 if v_1.Op != OpARM64FlagLT_UGT { 2641 break 2642 } 2643 v.reset(OpARM64MOVDconst) 2644 v.AuxInt = 0 2645 return true 2646 } 2647 // match: (CSELULT0 x (FlagGT_ULT)) 2648 // cond: 2649 // result: x 2650 for { 2651 _ = v.Args[1] 2652 x := v.Args[0] 2653 v_1 := v.Args[1] 2654 if v_1.Op != OpARM64FlagGT_ULT { 2655 break 2656 } 2657 v.reset(OpCopy) 2658 v.Type = x.Type 2659 v.AddArg(x) 2660 return true 2661 } 2662 // match: (CSELULT0 _ (FlagGT_UGT)) 2663 // cond: 2664 // result: (MOVDconst [0]) 2665 for { 2666 _ = v.Args[1] 2667 v_1 := v.Args[1] 2668 if v_1.Op != OpARM64FlagGT_UGT { 2669 break 2670 } 2671 v.reset(OpARM64MOVDconst) 2672 v.AuxInt = 0 2673 return true 2674 } 2675 return false 2676 } 2677 func rewriteValueARM64_OpARM64DIV_0(v *Value) bool { 2678 // match: (DIV (MOVDconst [c]) (MOVDconst [d])) 2679 // cond: 2680 // result: (MOVDconst [int64(c)/int64(d)]) 2681 for { 2682 _ = v.Args[1] 2683 v_0 := v.Args[0] 2684 if v_0.Op != OpARM64MOVDconst { 2685 break 2686 } 2687 c := v_0.AuxInt 2688 v_1 := v.Args[1] 2689 if v_1.Op != OpARM64MOVDconst { 2690 break 2691 } 2692 d := v_1.AuxInt 2693 v.reset(OpARM64MOVDconst) 2694 v.AuxInt = int64(c) / int64(d) 2695 return true 2696 } 2697 return false 2698 } 2699 func rewriteValueARM64_OpARM64DIVW_0(v *Value) bool { 2700 // match: (DIVW (MOVDconst [c]) (MOVDconst [d])) 2701 // cond: 2702 // result: (MOVDconst [int64(int32(c)/int32(d))]) 2703 for { 2704 _ = v.Args[1] 2705 v_0 := v.Args[0] 2706 if v_0.Op != OpARM64MOVDconst { 2707 break 2708 } 2709 c := v_0.AuxInt 2710 v_1 := v.Args[1] 2711 if v_1.Op != OpARM64MOVDconst { 2712 break 2713 } 2714 d := v_1.AuxInt 2715 v.reset(OpARM64MOVDconst) 2716 v.AuxInt = int64(int32(c) / int32(d)) 2717 return true 2718 } 2719 return false 2720 } 2721 func rewriteValueARM64_OpARM64Equal_0(v *Value) bool { 2722 // match: (Equal (FlagEQ)) 2723 // cond: 2724 // result: (MOVDconst [1]) 2725 for { 2726 v_0 := v.Args[0] 2727 if v_0.Op != OpARM64FlagEQ { 2728 break 2729 } 2730 v.reset(OpARM64MOVDconst) 2731 v.AuxInt = 1 2732 return true 2733 } 2734 // match: (Equal (FlagLT_ULT)) 2735 // cond: 2736 // result: (MOVDconst [0]) 2737 for { 2738 v_0 := v.Args[0] 2739 if v_0.Op != OpARM64FlagLT_ULT { 2740 break 2741 } 2742 v.reset(OpARM64MOVDconst) 2743 v.AuxInt = 0 2744 return true 2745 } 2746 // match: (Equal (FlagLT_UGT)) 2747 // cond: 2748 // result: (MOVDconst [0]) 2749 for { 2750 v_0 := v.Args[0] 2751 if v_0.Op != OpARM64FlagLT_UGT { 2752 break 2753 } 2754 v.reset(OpARM64MOVDconst) 2755 v.AuxInt = 0 2756 return true 2757 } 2758 // match: (Equal (FlagGT_ULT)) 2759 // cond: 2760 // result: (MOVDconst [0]) 2761 for { 2762 v_0 := v.Args[0] 2763 if v_0.Op != OpARM64FlagGT_ULT { 2764 break 2765 } 2766 v.reset(OpARM64MOVDconst) 2767 v.AuxInt = 0 2768 return true 2769 } 2770 // match: (Equal (FlagGT_UGT)) 2771 // cond: 2772 // result: (MOVDconst [0]) 2773 for { 2774 v_0 := v.Args[0] 2775 if v_0.Op != OpARM64FlagGT_UGT { 2776 break 2777 } 2778 v.reset(OpARM64MOVDconst) 2779 v.AuxInt = 0 2780 return true 2781 } 2782 // match: (Equal (InvertFlags x)) 2783 // cond: 2784 // result: (Equal x) 2785 for { 2786 v_0 := v.Args[0] 2787 if v_0.Op != OpARM64InvertFlags { 2788 break 2789 } 2790 x := v_0.Args[0] 2791 v.reset(OpARM64Equal) 2792 v.AddArg(x) 2793 return true 2794 } 2795 return false 2796 } 2797 func rewriteValueARM64_OpARM64FMOVDload_0(v *Value) bool { 2798 b := v.Block 2799 _ = b 2800 config := b.Func.Config 2801 _ = config 2802 // match: (FMOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 2803 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 2804 // result: (FMOVDload [off1+off2] {sym} ptr mem) 2805 for { 2806 off1 := v.AuxInt 2807 sym := v.Aux 2808 _ = v.Args[1] 2809 v_0 := v.Args[0] 2810 if v_0.Op != OpARM64ADDconst { 2811 break 2812 } 2813 off2 := v_0.AuxInt 2814 ptr := v_0.Args[0] 2815 mem := v.Args[1] 2816 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 2817 break 2818 } 2819 v.reset(OpARM64FMOVDload) 2820 v.AuxInt = off1 + off2 2821 v.Aux = sym 2822 v.AddArg(ptr) 2823 v.AddArg(mem) 2824 return true 2825 } 2826 // match: (FMOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 2827 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 2828 // result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 2829 for { 2830 off1 := v.AuxInt 2831 sym1 := v.Aux 2832 _ = v.Args[1] 2833 v_0 := v.Args[0] 2834 if v_0.Op != OpARM64MOVDaddr { 2835 break 2836 } 2837 off2 := v_0.AuxInt 2838 sym2 := v_0.Aux 2839 ptr := v_0.Args[0] 2840 mem := v.Args[1] 2841 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 2842 break 2843 } 2844 v.reset(OpARM64FMOVDload) 2845 v.AuxInt = off1 + off2 2846 v.Aux = mergeSym(sym1, sym2) 2847 v.AddArg(ptr) 2848 v.AddArg(mem) 2849 return true 2850 } 2851 return false 2852 } 2853 func rewriteValueARM64_OpARM64FMOVDstore_0(v *Value) bool { 2854 b := v.Block 2855 _ = b 2856 config := b.Func.Config 2857 _ = config 2858 // match: (FMOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 2859 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 2860 // result: (FMOVDstore [off1+off2] {sym} ptr val mem) 2861 for { 2862 off1 := v.AuxInt 2863 sym := v.Aux 2864 _ = v.Args[2] 2865 v_0 := v.Args[0] 2866 if v_0.Op != OpARM64ADDconst { 2867 break 2868 } 2869 off2 := v_0.AuxInt 2870 ptr := v_0.Args[0] 2871 val := v.Args[1] 2872 mem := v.Args[2] 2873 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 2874 break 2875 } 2876 v.reset(OpARM64FMOVDstore) 2877 v.AuxInt = off1 + off2 2878 v.Aux = sym 2879 v.AddArg(ptr) 2880 v.AddArg(val) 2881 v.AddArg(mem) 2882 return true 2883 } 2884 // match: (FMOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 2885 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 2886 // result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 2887 for { 2888 off1 := v.AuxInt 2889 sym1 := v.Aux 2890 _ = v.Args[2] 2891 v_0 := v.Args[0] 2892 if v_0.Op != OpARM64MOVDaddr { 2893 break 2894 } 2895 off2 := v_0.AuxInt 2896 sym2 := v_0.Aux 2897 ptr := v_0.Args[0] 2898 val := v.Args[1] 2899 mem := v.Args[2] 2900 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 2901 break 2902 } 2903 v.reset(OpARM64FMOVDstore) 2904 v.AuxInt = off1 + off2 2905 v.Aux = mergeSym(sym1, sym2) 2906 v.AddArg(ptr) 2907 v.AddArg(val) 2908 v.AddArg(mem) 2909 return true 2910 } 2911 return false 2912 } 2913 func rewriteValueARM64_OpARM64FMOVSload_0(v *Value) bool { 2914 b := v.Block 2915 _ = b 2916 config := b.Func.Config 2917 _ = config 2918 // match: (FMOVSload [off1] {sym} (ADDconst [off2] ptr) mem) 2919 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 2920 // result: (FMOVSload [off1+off2] {sym} ptr mem) 2921 for { 2922 off1 := v.AuxInt 2923 sym := v.Aux 2924 _ = v.Args[1] 2925 v_0 := v.Args[0] 2926 if v_0.Op != OpARM64ADDconst { 2927 break 2928 } 2929 off2 := v_0.AuxInt 2930 ptr := v_0.Args[0] 2931 mem := v.Args[1] 2932 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 2933 break 2934 } 2935 v.reset(OpARM64FMOVSload) 2936 v.AuxInt = off1 + off2 2937 v.Aux = sym 2938 v.AddArg(ptr) 2939 v.AddArg(mem) 2940 return true 2941 } 2942 // match: (FMOVSload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 2943 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 2944 // result: (FMOVSload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 2945 for { 2946 off1 := v.AuxInt 2947 sym1 := v.Aux 2948 _ = v.Args[1] 2949 v_0 := v.Args[0] 2950 if v_0.Op != OpARM64MOVDaddr { 2951 break 2952 } 2953 off2 := v_0.AuxInt 2954 sym2 := v_0.Aux 2955 ptr := v_0.Args[0] 2956 mem := v.Args[1] 2957 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 2958 break 2959 } 2960 v.reset(OpARM64FMOVSload) 2961 v.AuxInt = off1 + off2 2962 v.Aux = mergeSym(sym1, sym2) 2963 v.AddArg(ptr) 2964 v.AddArg(mem) 2965 return true 2966 } 2967 return false 2968 } 2969 func rewriteValueARM64_OpARM64FMOVSstore_0(v *Value) bool { 2970 b := v.Block 2971 _ = b 2972 config := b.Func.Config 2973 _ = config 2974 // match: (FMOVSstore [off1] {sym} (ADDconst [off2] ptr) val mem) 2975 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 2976 // result: (FMOVSstore [off1+off2] {sym} ptr val mem) 2977 for { 2978 off1 := v.AuxInt 2979 sym := v.Aux 2980 _ = v.Args[2] 2981 v_0 := v.Args[0] 2982 if v_0.Op != OpARM64ADDconst { 2983 break 2984 } 2985 off2 := v_0.AuxInt 2986 ptr := v_0.Args[0] 2987 val := v.Args[1] 2988 mem := v.Args[2] 2989 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 2990 break 2991 } 2992 v.reset(OpARM64FMOVSstore) 2993 v.AuxInt = off1 + off2 2994 v.Aux = sym 2995 v.AddArg(ptr) 2996 v.AddArg(val) 2997 v.AddArg(mem) 2998 return true 2999 } 3000 // match: (FMOVSstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 3001 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3002 // result: (FMOVSstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 3003 for { 3004 off1 := v.AuxInt 3005 sym1 := v.Aux 3006 _ = v.Args[2] 3007 v_0 := v.Args[0] 3008 if v_0.Op != OpARM64MOVDaddr { 3009 break 3010 } 3011 off2 := v_0.AuxInt 3012 sym2 := v_0.Aux 3013 ptr := v_0.Args[0] 3014 val := v.Args[1] 3015 mem := v.Args[2] 3016 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3017 break 3018 } 3019 v.reset(OpARM64FMOVSstore) 3020 v.AuxInt = off1 + off2 3021 v.Aux = mergeSym(sym1, sym2) 3022 v.AddArg(ptr) 3023 v.AddArg(val) 3024 v.AddArg(mem) 3025 return true 3026 } 3027 return false 3028 } 3029 func rewriteValueARM64_OpARM64GreaterEqual_0(v *Value) bool { 3030 // match: (GreaterEqual (FlagEQ)) 3031 // cond: 3032 // result: (MOVDconst [1]) 3033 for { 3034 v_0 := v.Args[0] 3035 if v_0.Op != OpARM64FlagEQ { 3036 break 3037 } 3038 v.reset(OpARM64MOVDconst) 3039 v.AuxInt = 1 3040 return true 3041 } 3042 // match: (GreaterEqual (FlagLT_ULT)) 3043 // cond: 3044 // result: (MOVDconst [0]) 3045 for { 3046 v_0 := v.Args[0] 3047 if v_0.Op != OpARM64FlagLT_ULT { 3048 break 3049 } 3050 v.reset(OpARM64MOVDconst) 3051 v.AuxInt = 0 3052 return true 3053 } 3054 // match: (GreaterEqual (FlagLT_UGT)) 3055 // cond: 3056 // result: (MOVDconst [0]) 3057 for { 3058 v_0 := v.Args[0] 3059 if v_0.Op != OpARM64FlagLT_UGT { 3060 break 3061 } 3062 v.reset(OpARM64MOVDconst) 3063 v.AuxInt = 0 3064 return true 3065 } 3066 // match: (GreaterEqual (FlagGT_ULT)) 3067 // cond: 3068 // result: (MOVDconst [1]) 3069 for { 3070 v_0 := v.Args[0] 3071 if v_0.Op != OpARM64FlagGT_ULT { 3072 break 3073 } 3074 v.reset(OpARM64MOVDconst) 3075 v.AuxInt = 1 3076 return true 3077 } 3078 // match: (GreaterEqual (FlagGT_UGT)) 3079 // cond: 3080 // result: (MOVDconst [1]) 3081 for { 3082 v_0 := v.Args[0] 3083 if v_0.Op != OpARM64FlagGT_UGT { 3084 break 3085 } 3086 v.reset(OpARM64MOVDconst) 3087 v.AuxInt = 1 3088 return true 3089 } 3090 // match: (GreaterEqual (InvertFlags x)) 3091 // cond: 3092 // result: (LessEqual x) 3093 for { 3094 v_0 := v.Args[0] 3095 if v_0.Op != OpARM64InvertFlags { 3096 break 3097 } 3098 x := v_0.Args[0] 3099 v.reset(OpARM64LessEqual) 3100 v.AddArg(x) 3101 return true 3102 } 3103 return false 3104 } 3105 func rewriteValueARM64_OpARM64GreaterEqualU_0(v *Value) bool { 3106 // match: (GreaterEqualU (FlagEQ)) 3107 // cond: 3108 // result: (MOVDconst [1]) 3109 for { 3110 v_0 := v.Args[0] 3111 if v_0.Op != OpARM64FlagEQ { 3112 break 3113 } 3114 v.reset(OpARM64MOVDconst) 3115 v.AuxInt = 1 3116 return true 3117 } 3118 // match: (GreaterEqualU (FlagLT_ULT)) 3119 // cond: 3120 // result: (MOVDconst [0]) 3121 for { 3122 v_0 := v.Args[0] 3123 if v_0.Op != OpARM64FlagLT_ULT { 3124 break 3125 } 3126 v.reset(OpARM64MOVDconst) 3127 v.AuxInt = 0 3128 return true 3129 } 3130 // match: (GreaterEqualU (FlagLT_UGT)) 3131 // cond: 3132 // result: (MOVDconst [1]) 3133 for { 3134 v_0 := v.Args[0] 3135 if v_0.Op != OpARM64FlagLT_UGT { 3136 break 3137 } 3138 v.reset(OpARM64MOVDconst) 3139 v.AuxInt = 1 3140 return true 3141 } 3142 // match: (GreaterEqualU (FlagGT_ULT)) 3143 // cond: 3144 // result: (MOVDconst [0]) 3145 for { 3146 v_0 := v.Args[0] 3147 if v_0.Op != OpARM64FlagGT_ULT { 3148 break 3149 } 3150 v.reset(OpARM64MOVDconst) 3151 v.AuxInt = 0 3152 return true 3153 } 3154 // match: (GreaterEqualU (FlagGT_UGT)) 3155 // cond: 3156 // result: (MOVDconst [1]) 3157 for { 3158 v_0 := v.Args[0] 3159 if v_0.Op != OpARM64FlagGT_UGT { 3160 break 3161 } 3162 v.reset(OpARM64MOVDconst) 3163 v.AuxInt = 1 3164 return true 3165 } 3166 // match: (GreaterEqualU (InvertFlags x)) 3167 // cond: 3168 // result: (LessEqualU x) 3169 for { 3170 v_0 := v.Args[0] 3171 if v_0.Op != OpARM64InvertFlags { 3172 break 3173 } 3174 x := v_0.Args[0] 3175 v.reset(OpARM64LessEqualU) 3176 v.AddArg(x) 3177 return true 3178 } 3179 return false 3180 } 3181 func rewriteValueARM64_OpARM64GreaterThan_0(v *Value) bool { 3182 // match: (GreaterThan (FlagEQ)) 3183 // cond: 3184 // result: (MOVDconst [0]) 3185 for { 3186 v_0 := v.Args[0] 3187 if v_0.Op != OpARM64FlagEQ { 3188 break 3189 } 3190 v.reset(OpARM64MOVDconst) 3191 v.AuxInt = 0 3192 return true 3193 } 3194 // match: (GreaterThan (FlagLT_ULT)) 3195 // cond: 3196 // result: (MOVDconst [0]) 3197 for { 3198 v_0 := v.Args[0] 3199 if v_0.Op != OpARM64FlagLT_ULT { 3200 break 3201 } 3202 v.reset(OpARM64MOVDconst) 3203 v.AuxInt = 0 3204 return true 3205 } 3206 // match: (GreaterThan (FlagLT_UGT)) 3207 // cond: 3208 // result: (MOVDconst [0]) 3209 for { 3210 v_0 := v.Args[0] 3211 if v_0.Op != OpARM64FlagLT_UGT { 3212 break 3213 } 3214 v.reset(OpARM64MOVDconst) 3215 v.AuxInt = 0 3216 return true 3217 } 3218 // match: (GreaterThan (FlagGT_ULT)) 3219 // cond: 3220 // result: (MOVDconst [1]) 3221 for { 3222 v_0 := v.Args[0] 3223 if v_0.Op != OpARM64FlagGT_ULT { 3224 break 3225 } 3226 v.reset(OpARM64MOVDconst) 3227 v.AuxInt = 1 3228 return true 3229 } 3230 // match: (GreaterThan (FlagGT_UGT)) 3231 // cond: 3232 // result: (MOVDconst [1]) 3233 for { 3234 v_0 := v.Args[0] 3235 if v_0.Op != OpARM64FlagGT_UGT { 3236 break 3237 } 3238 v.reset(OpARM64MOVDconst) 3239 v.AuxInt = 1 3240 return true 3241 } 3242 // match: (GreaterThan (InvertFlags x)) 3243 // cond: 3244 // result: (LessThan x) 3245 for { 3246 v_0 := v.Args[0] 3247 if v_0.Op != OpARM64InvertFlags { 3248 break 3249 } 3250 x := v_0.Args[0] 3251 v.reset(OpARM64LessThan) 3252 v.AddArg(x) 3253 return true 3254 } 3255 return false 3256 } 3257 func rewriteValueARM64_OpARM64GreaterThanU_0(v *Value) bool { 3258 // match: (GreaterThanU (FlagEQ)) 3259 // cond: 3260 // result: (MOVDconst [0]) 3261 for { 3262 v_0 := v.Args[0] 3263 if v_0.Op != OpARM64FlagEQ { 3264 break 3265 } 3266 v.reset(OpARM64MOVDconst) 3267 v.AuxInt = 0 3268 return true 3269 } 3270 // match: (GreaterThanU (FlagLT_ULT)) 3271 // cond: 3272 // result: (MOVDconst [0]) 3273 for { 3274 v_0 := v.Args[0] 3275 if v_0.Op != OpARM64FlagLT_ULT { 3276 break 3277 } 3278 v.reset(OpARM64MOVDconst) 3279 v.AuxInt = 0 3280 return true 3281 } 3282 // match: (GreaterThanU (FlagLT_UGT)) 3283 // cond: 3284 // result: (MOVDconst [1]) 3285 for { 3286 v_0 := v.Args[0] 3287 if v_0.Op != OpARM64FlagLT_UGT { 3288 break 3289 } 3290 v.reset(OpARM64MOVDconst) 3291 v.AuxInt = 1 3292 return true 3293 } 3294 // match: (GreaterThanU (FlagGT_ULT)) 3295 // cond: 3296 // result: (MOVDconst [0]) 3297 for { 3298 v_0 := v.Args[0] 3299 if v_0.Op != OpARM64FlagGT_ULT { 3300 break 3301 } 3302 v.reset(OpARM64MOVDconst) 3303 v.AuxInt = 0 3304 return true 3305 } 3306 // match: (GreaterThanU (FlagGT_UGT)) 3307 // cond: 3308 // result: (MOVDconst [1]) 3309 for { 3310 v_0 := v.Args[0] 3311 if v_0.Op != OpARM64FlagGT_UGT { 3312 break 3313 } 3314 v.reset(OpARM64MOVDconst) 3315 v.AuxInt = 1 3316 return true 3317 } 3318 // match: (GreaterThanU (InvertFlags x)) 3319 // cond: 3320 // result: (LessThanU x) 3321 for { 3322 v_0 := v.Args[0] 3323 if v_0.Op != OpARM64InvertFlags { 3324 break 3325 } 3326 x := v_0.Args[0] 3327 v.reset(OpARM64LessThanU) 3328 v.AddArg(x) 3329 return true 3330 } 3331 return false 3332 } 3333 func rewriteValueARM64_OpARM64LessEqual_0(v *Value) bool { 3334 // match: (LessEqual (FlagEQ)) 3335 // cond: 3336 // result: (MOVDconst [1]) 3337 for { 3338 v_0 := v.Args[0] 3339 if v_0.Op != OpARM64FlagEQ { 3340 break 3341 } 3342 v.reset(OpARM64MOVDconst) 3343 v.AuxInt = 1 3344 return true 3345 } 3346 // match: (LessEqual (FlagLT_ULT)) 3347 // cond: 3348 // result: (MOVDconst [1]) 3349 for { 3350 v_0 := v.Args[0] 3351 if v_0.Op != OpARM64FlagLT_ULT { 3352 break 3353 } 3354 v.reset(OpARM64MOVDconst) 3355 v.AuxInt = 1 3356 return true 3357 } 3358 // match: (LessEqual (FlagLT_UGT)) 3359 // cond: 3360 // result: (MOVDconst [1]) 3361 for { 3362 v_0 := v.Args[0] 3363 if v_0.Op != OpARM64FlagLT_UGT { 3364 break 3365 } 3366 v.reset(OpARM64MOVDconst) 3367 v.AuxInt = 1 3368 return true 3369 } 3370 // match: (LessEqual (FlagGT_ULT)) 3371 // cond: 3372 // result: (MOVDconst [0]) 3373 for { 3374 v_0 := v.Args[0] 3375 if v_0.Op != OpARM64FlagGT_ULT { 3376 break 3377 } 3378 v.reset(OpARM64MOVDconst) 3379 v.AuxInt = 0 3380 return true 3381 } 3382 // match: (LessEqual (FlagGT_UGT)) 3383 // cond: 3384 // result: (MOVDconst [0]) 3385 for { 3386 v_0 := v.Args[0] 3387 if v_0.Op != OpARM64FlagGT_UGT { 3388 break 3389 } 3390 v.reset(OpARM64MOVDconst) 3391 v.AuxInt = 0 3392 return true 3393 } 3394 // match: (LessEqual (InvertFlags x)) 3395 // cond: 3396 // result: (GreaterEqual x) 3397 for { 3398 v_0 := v.Args[0] 3399 if v_0.Op != OpARM64InvertFlags { 3400 break 3401 } 3402 x := v_0.Args[0] 3403 v.reset(OpARM64GreaterEqual) 3404 v.AddArg(x) 3405 return true 3406 } 3407 return false 3408 } 3409 func rewriteValueARM64_OpARM64LessEqualU_0(v *Value) bool { 3410 // match: (LessEqualU (FlagEQ)) 3411 // cond: 3412 // result: (MOVDconst [1]) 3413 for { 3414 v_0 := v.Args[0] 3415 if v_0.Op != OpARM64FlagEQ { 3416 break 3417 } 3418 v.reset(OpARM64MOVDconst) 3419 v.AuxInt = 1 3420 return true 3421 } 3422 // match: (LessEqualU (FlagLT_ULT)) 3423 // cond: 3424 // result: (MOVDconst [1]) 3425 for { 3426 v_0 := v.Args[0] 3427 if v_0.Op != OpARM64FlagLT_ULT { 3428 break 3429 } 3430 v.reset(OpARM64MOVDconst) 3431 v.AuxInt = 1 3432 return true 3433 } 3434 // match: (LessEqualU (FlagLT_UGT)) 3435 // cond: 3436 // result: (MOVDconst [0]) 3437 for { 3438 v_0 := v.Args[0] 3439 if v_0.Op != OpARM64FlagLT_UGT { 3440 break 3441 } 3442 v.reset(OpARM64MOVDconst) 3443 v.AuxInt = 0 3444 return true 3445 } 3446 // match: (LessEqualU (FlagGT_ULT)) 3447 // cond: 3448 // result: (MOVDconst [1]) 3449 for { 3450 v_0 := v.Args[0] 3451 if v_0.Op != OpARM64FlagGT_ULT { 3452 break 3453 } 3454 v.reset(OpARM64MOVDconst) 3455 v.AuxInt = 1 3456 return true 3457 } 3458 // match: (LessEqualU (FlagGT_UGT)) 3459 // cond: 3460 // result: (MOVDconst [0]) 3461 for { 3462 v_0 := v.Args[0] 3463 if v_0.Op != OpARM64FlagGT_UGT { 3464 break 3465 } 3466 v.reset(OpARM64MOVDconst) 3467 v.AuxInt = 0 3468 return true 3469 } 3470 // match: (LessEqualU (InvertFlags x)) 3471 // cond: 3472 // result: (GreaterEqualU x) 3473 for { 3474 v_0 := v.Args[0] 3475 if v_0.Op != OpARM64InvertFlags { 3476 break 3477 } 3478 x := v_0.Args[0] 3479 v.reset(OpARM64GreaterEqualU) 3480 v.AddArg(x) 3481 return true 3482 } 3483 return false 3484 } 3485 func rewriteValueARM64_OpARM64LessThan_0(v *Value) bool { 3486 // match: (LessThan (FlagEQ)) 3487 // cond: 3488 // result: (MOVDconst [0]) 3489 for { 3490 v_0 := v.Args[0] 3491 if v_0.Op != OpARM64FlagEQ { 3492 break 3493 } 3494 v.reset(OpARM64MOVDconst) 3495 v.AuxInt = 0 3496 return true 3497 } 3498 // match: (LessThan (FlagLT_ULT)) 3499 // cond: 3500 // result: (MOVDconst [1]) 3501 for { 3502 v_0 := v.Args[0] 3503 if v_0.Op != OpARM64FlagLT_ULT { 3504 break 3505 } 3506 v.reset(OpARM64MOVDconst) 3507 v.AuxInt = 1 3508 return true 3509 } 3510 // match: (LessThan (FlagLT_UGT)) 3511 // cond: 3512 // result: (MOVDconst [1]) 3513 for { 3514 v_0 := v.Args[0] 3515 if v_0.Op != OpARM64FlagLT_UGT { 3516 break 3517 } 3518 v.reset(OpARM64MOVDconst) 3519 v.AuxInt = 1 3520 return true 3521 } 3522 // match: (LessThan (FlagGT_ULT)) 3523 // cond: 3524 // result: (MOVDconst [0]) 3525 for { 3526 v_0 := v.Args[0] 3527 if v_0.Op != OpARM64FlagGT_ULT { 3528 break 3529 } 3530 v.reset(OpARM64MOVDconst) 3531 v.AuxInt = 0 3532 return true 3533 } 3534 // match: (LessThan (FlagGT_UGT)) 3535 // cond: 3536 // result: (MOVDconst [0]) 3537 for { 3538 v_0 := v.Args[0] 3539 if v_0.Op != OpARM64FlagGT_UGT { 3540 break 3541 } 3542 v.reset(OpARM64MOVDconst) 3543 v.AuxInt = 0 3544 return true 3545 } 3546 // match: (LessThan (InvertFlags x)) 3547 // cond: 3548 // result: (GreaterThan x) 3549 for { 3550 v_0 := v.Args[0] 3551 if v_0.Op != OpARM64InvertFlags { 3552 break 3553 } 3554 x := v_0.Args[0] 3555 v.reset(OpARM64GreaterThan) 3556 v.AddArg(x) 3557 return true 3558 } 3559 return false 3560 } 3561 func rewriteValueARM64_OpARM64LessThanU_0(v *Value) bool { 3562 // match: (LessThanU (FlagEQ)) 3563 // cond: 3564 // result: (MOVDconst [0]) 3565 for { 3566 v_0 := v.Args[0] 3567 if v_0.Op != OpARM64FlagEQ { 3568 break 3569 } 3570 v.reset(OpARM64MOVDconst) 3571 v.AuxInt = 0 3572 return true 3573 } 3574 // match: (LessThanU (FlagLT_ULT)) 3575 // cond: 3576 // result: (MOVDconst [1]) 3577 for { 3578 v_0 := v.Args[0] 3579 if v_0.Op != OpARM64FlagLT_ULT { 3580 break 3581 } 3582 v.reset(OpARM64MOVDconst) 3583 v.AuxInt = 1 3584 return true 3585 } 3586 // match: (LessThanU (FlagLT_UGT)) 3587 // cond: 3588 // result: (MOVDconst [0]) 3589 for { 3590 v_0 := v.Args[0] 3591 if v_0.Op != OpARM64FlagLT_UGT { 3592 break 3593 } 3594 v.reset(OpARM64MOVDconst) 3595 v.AuxInt = 0 3596 return true 3597 } 3598 // match: (LessThanU (FlagGT_ULT)) 3599 // cond: 3600 // result: (MOVDconst [1]) 3601 for { 3602 v_0 := v.Args[0] 3603 if v_0.Op != OpARM64FlagGT_ULT { 3604 break 3605 } 3606 v.reset(OpARM64MOVDconst) 3607 v.AuxInt = 1 3608 return true 3609 } 3610 // match: (LessThanU (FlagGT_UGT)) 3611 // cond: 3612 // result: (MOVDconst [0]) 3613 for { 3614 v_0 := v.Args[0] 3615 if v_0.Op != OpARM64FlagGT_UGT { 3616 break 3617 } 3618 v.reset(OpARM64MOVDconst) 3619 v.AuxInt = 0 3620 return true 3621 } 3622 // match: (LessThanU (InvertFlags x)) 3623 // cond: 3624 // result: (GreaterThanU x) 3625 for { 3626 v_0 := v.Args[0] 3627 if v_0.Op != OpARM64InvertFlags { 3628 break 3629 } 3630 x := v_0.Args[0] 3631 v.reset(OpARM64GreaterThanU) 3632 v.AddArg(x) 3633 return true 3634 } 3635 return false 3636 } 3637 func rewriteValueARM64_OpARM64MOD_0(v *Value) bool { 3638 // match: (MOD (MOVDconst [c]) (MOVDconst [d])) 3639 // cond: 3640 // result: (MOVDconst [int64(c)%int64(d)]) 3641 for { 3642 _ = v.Args[1] 3643 v_0 := v.Args[0] 3644 if v_0.Op != OpARM64MOVDconst { 3645 break 3646 } 3647 c := v_0.AuxInt 3648 v_1 := v.Args[1] 3649 if v_1.Op != OpARM64MOVDconst { 3650 break 3651 } 3652 d := v_1.AuxInt 3653 v.reset(OpARM64MOVDconst) 3654 v.AuxInt = int64(c) % int64(d) 3655 return true 3656 } 3657 return false 3658 } 3659 func rewriteValueARM64_OpARM64MODW_0(v *Value) bool { 3660 // match: (MODW (MOVDconst [c]) (MOVDconst [d])) 3661 // cond: 3662 // result: (MOVDconst [int64(int32(c)%int32(d))]) 3663 for { 3664 _ = v.Args[1] 3665 v_0 := v.Args[0] 3666 if v_0.Op != OpARM64MOVDconst { 3667 break 3668 } 3669 c := v_0.AuxInt 3670 v_1 := v.Args[1] 3671 if v_1.Op != OpARM64MOVDconst { 3672 break 3673 } 3674 d := v_1.AuxInt 3675 v.reset(OpARM64MOVDconst) 3676 v.AuxInt = int64(int32(c) % int32(d)) 3677 return true 3678 } 3679 return false 3680 } 3681 func rewriteValueARM64_OpARM64MOVBUload_0(v *Value) bool { 3682 b := v.Block 3683 _ = b 3684 config := b.Func.Config 3685 _ = config 3686 // match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem) 3687 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3688 // result: (MOVBUload [off1+off2] {sym} ptr mem) 3689 for { 3690 off1 := v.AuxInt 3691 sym := v.Aux 3692 _ = v.Args[1] 3693 v_0 := v.Args[0] 3694 if v_0.Op != OpARM64ADDconst { 3695 break 3696 } 3697 off2 := v_0.AuxInt 3698 ptr := v_0.Args[0] 3699 mem := v.Args[1] 3700 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3701 break 3702 } 3703 v.reset(OpARM64MOVBUload) 3704 v.AuxInt = off1 + off2 3705 v.Aux = sym 3706 v.AddArg(ptr) 3707 v.AddArg(mem) 3708 return true 3709 } 3710 // match: (MOVBUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 3711 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3712 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3713 for { 3714 off1 := v.AuxInt 3715 sym1 := v.Aux 3716 _ = v.Args[1] 3717 v_0 := v.Args[0] 3718 if v_0.Op != OpARM64MOVDaddr { 3719 break 3720 } 3721 off2 := v_0.AuxInt 3722 sym2 := v_0.Aux 3723 ptr := v_0.Args[0] 3724 mem := v.Args[1] 3725 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3726 break 3727 } 3728 v.reset(OpARM64MOVBUload) 3729 v.AuxInt = off1 + off2 3730 v.Aux = mergeSym(sym1, sym2) 3731 v.AddArg(ptr) 3732 v.AddArg(mem) 3733 return true 3734 } 3735 // match: (MOVBUload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 3736 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 3737 // result: (MOVDconst [0]) 3738 for { 3739 off := v.AuxInt 3740 sym := v.Aux 3741 _ = v.Args[1] 3742 ptr := v.Args[0] 3743 v_1 := v.Args[1] 3744 if v_1.Op != OpARM64MOVBstorezero { 3745 break 3746 } 3747 off2 := v_1.AuxInt 3748 sym2 := v_1.Aux 3749 _ = v_1.Args[1] 3750 ptr2 := v_1.Args[0] 3751 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 3752 break 3753 } 3754 v.reset(OpARM64MOVDconst) 3755 v.AuxInt = 0 3756 return true 3757 } 3758 return false 3759 } 3760 func rewriteValueARM64_OpARM64MOVBUreg_0(v *Value) bool { 3761 // match: (MOVBUreg x:(MOVBUload _ _)) 3762 // cond: 3763 // result: (MOVDreg x) 3764 for { 3765 x := v.Args[0] 3766 if x.Op != OpARM64MOVBUload { 3767 break 3768 } 3769 _ = x.Args[1] 3770 v.reset(OpARM64MOVDreg) 3771 v.AddArg(x) 3772 return true 3773 } 3774 // match: (MOVBUreg x:(MOVBUreg _)) 3775 // cond: 3776 // result: (MOVDreg x) 3777 for { 3778 x := v.Args[0] 3779 if x.Op != OpARM64MOVBUreg { 3780 break 3781 } 3782 v.reset(OpARM64MOVDreg) 3783 v.AddArg(x) 3784 return true 3785 } 3786 // match: (MOVBUreg (MOVDconst [c])) 3787 // cond: 3788 // result: (MOVDconst [int64(uint8(c))]) 3789 for { 3790 v_0 := v.Args[0] 3791 if v_0.Op != OpARM64MOVDconst { 3792 break 3793 } 3794 c := v_0.AuxInt 3795 v.reset(OpARM64MOVDconst) 3796 v.AuxInt = int64(uint8(c)) 3797 return true 3798 } 3799 // match: (MOVBUreg x) 3800 // cond: x.Type.IsBoolean() 3801 // result: (MOVDreg x) 3802 for { 3803 x := v.Args[0] 3804 if !(x.Type.IsBoolean()) { 3805 break 3806 } 3807 v.reset(OpARM64MOVDreg) 3808 v.AddArg(x) 3809 return true 3810 } 3811 return false 3812 } 3813 func rewriteValueARM64_OpARM64MOVBload_0(v *Value) bool { 3814 b := v.Block 3815 _ = b 3816 config := b.Func.Config 3817 _ = config 3818 // match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem) 3819 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3820 // result: (MOVBload [off1+off2] {sym} ptr mem) 3821 for { 3822 off1 := v.AuxInt 3823 sym := v.Aux 3824 _ = v.Args[1] 3825 v_0 := v.Args[0] 3826 if v_0.Op != OpARM64ADDconst { 3827 break 3828 } 3829 off2 := v_0.AuxInt 3830 ptr := v_0.Args[0] 3831 mem := v.Args[1] 3832 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3833 break 3834 } 3835 v.reset(OpARM64MOVBload) 3836 v.AuxInt = off1 + off2 3837 v.Aux = sym 3838 v.AddArg(ptr) 3839 v.AddArg(mem) 3840 return true 3841 } 3842 // match: (MOVBload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 3843 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3844 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3845 for { 3846 off1 := v.AuxInt 3847 sym1 := v.Aux 3848 _ = v.Args[1] 3849 v_0 := v.Args[0] 3850 if v_0.Op != OpARM64MOVDaddr { 3851 break 3852 } 3853 off2 := v_0.AuxInt 3854 sym2 := v_0.Aux 3855 ptr := v_0.Args[0] 3856 mem := v.Args[1] 3857 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3858 break 3859 } 3860 v.reset(OpARM64MOVBload) 3861 v.AuxInt = off1 + off2 3862 v.Aux = mergeSym(sym1, sym2) 3863 v.AddArg(ptr) 3864 v.AddArg(mem) 3865 return true 3866 } 3867 // match: (MOVBload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 3868 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 3869 // result: (MOVDconst [0]) 3870 for { 3871 off := v.AuxInt 3872 sym := v.Aux 3873 _ = v.Args[1] 3874 ptr := v.Args[0] 3875 v_1 := v.Args[1] 3876 if v_1.Op != OpARM64MOVBstorezero { 3877 break 3878 } 3879 off2 := v_1.AuxInt 3880 sym2 := v_1.Aux 3881 _ = v_1.Args[1] 3882 ptr2 := v_1.Args[0] 3883 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 3884 break 3885 } 3886 v.reset(OpARM64MOVDconst) 3887 v.AuxInt = 0 3888 return true 3889 } 3890 return false 3891 } 3892 func rewriteValueARM64_OpARM64MOVBreg_0(v *Value) bool { 3893 // match: (MOVBreg x:(MOVBload _ _)) 3894 // cond: 3895 // result: (MOVDreg x) 3896 for { 3897 x := v.Args[0] 3898 if x.Op != OpARM64MOVBload { 3899 break 3900 } 3901 _ = x.Args[1] 3902 v.reset(OpARM64MOVDreg) 3903 v.AddArg(x) 3904 return true 3905 } 3906 // match: (MOVBreg x:(MOVBreg _)) 3907 // cond: 3908 // result: (MOVDreg x) 3909 for { 3910 x := v.Args[0] 3911 if x.Op != OpARM64MOVBreg { 3912 break 3913 } 3914 v.reset(OpARM64MOVDreg) 3915 v.AddArg(x) 3916 return true 3917 } 3918 // match: (MOVBreg (MOVDconst [c])) 3919 // cond: 3920 // result: (MOVDconst [int64(int8(c))]) 3921 for { 3922 v_0 := v.Args[0] 3923 if v_0.Op != OpARM64MOVDconst { 3924 break 3925 } 3926 c := v_0.AuxInt 3927 v.reset(OpARM64MOVDconst) 3928 v.AuxInt = int64(int8(c)) 3929 return true 3930 } 3931 return false 3932 } 3933 func rewriteValueARM64_OpARM64MOVBstore_0(v *Value) bool { 3934 b := v.Block 3935 _ = b 3936 config := b.Func.Config 3937 _ = config 3938 // match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem) 3939 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3940 // result: (MOVBstore [off1+off2] {sym} ptr val mem) 3941 for { 3942 off1 := v.AuxInt 3943 sym := v.Aux 3944 _ = v.Args[2] 3945 v_0 := v.Args[0] 3946 if v_0.Op != OpARM64ADDconst { 3947 break 3948 } 3949 off2 := v_0.AuxInt 3950 ptr := v_0.Args[0] 3951 val := v.Args[1] 3952 mem := v.Args[2] 3953 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3954 break 3955 } 3956 v.reset(OpARM64MOVBstore) 3957 v.AuxInt = off1 + off2 3958 v.Aux = sym 3959 v.AddArg(ptr) 3960 v.AddArg(val) 3961 v.AddArg(mem) 3962 return true 3963 } 3964 // match: (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 3965 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3966 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 3967 for { 3968 off1 := v.AuxInt 3969 sym1 := v.Aux 3970 _ = v.Args[2] 3971 v_0 := v.Args[0] 3972 if v_0.Op != OpARM64MOVDaddr { 3973 break 3974 } 3975 off2 := v_0.AuxInt 3976 sym2 := v_0.Aux 3977 ptr := v_0.Args[0] 3978 val := v.Args[1] 3979 mem := v.Args[2] 3980 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3981 break 3982 } 3983 v.reset(OpARM64MOVBstore) 3984 v.AuxInt = off1 + off2 3985 v.Aux = mergeSym(sym1, sym2) 3986 v.AddArg(ptr) 3987 v.AddArg(val) 3988 v.AddArg(mem) 3989 return true 3990 } 3991 // match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem) 3992 // cond: 3993 // result: (MOVBstorezero [off] {sym} ptr mem) 3994 for { 3995 off := v.AuxInt 3996 sym := v.Aux 3997 _ = v.Args[2] 3998 ptr := v.Args[0] 3999 v_1 := v.Args[1] 4000 if v_1.Op != OpARM64MOVDconst { 4001 break 4002 } 4003 if v_1.AuxInt != 0 { 4004 break 4005 } 4006 mem := v.Args[2] 4007 v.reset(OpARM64MOVBstorezero) 4008 v.AuxInt = off 4009 v.Aux = sym 4010 v.AddArg(ptr) 4011 v.AddArg(mem) 4012 return true 4013 } 4014 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem) 4015 // cond: 4016 // result: (MOVBstore [off] {sym} ptr x mem) 4017 for { 4018 off := v.AuxInt 4019 sym := v.Aux 4020 _ = v.Args[2] 4021 ptr := v.Args[0] 4022 v_1 := v.Args[1] 4023 if v_1.Op != OpARM64MOVBreg { 4024 break 4025 } 4026 x := v_1.Args[0] 4027 mem := v.Args[2] 4028 v.reset(OpARM64MOVBstore) 4029 v.AuxInt = off 4030 v.Aux = sym 4031 v.AddArg(ptr) 4032 v.AddArg(x) 4033 v.AddArg(mem) 4034 return true 4035 } 4036 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem) 4037 // cond: 4038 // result: (MOVBstore [off] {sym} ptr x mem) 4039 for { 4040 off := v.AuxInt 4041 sym := v.Aux 4042 _ = v.Args[2] 4043 ptr := v.Args[0] 4044 v_1 := v.Args[1] 4045 if v_1.Op != OpARM64MOVBUreg { 4046 break 4047 } 4048 x := v_1.Args[0] 4049 mem := v.Args[2] 4050 v.reset(OpARM64MOVBstore) 4051 v.AuxInt = off 4052 v.Aux = sym 4053 v.AddArg(ptr) 4054 v.AddArg(x) 4055 v.AddArg(mem) 4056 return true 4057 } 4058 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem) 4059 // cond: 4060 // result: (MOVBstore [off] {sym} ptr x mem) 4061 for { 4062 off := v.AuxInt 4063 sym := v.Aux 4064 _ = v.Args[2] 4065 ptr := v.Args[0] 4066 v_1 := v.Args[1] 4067 if v_1.Op != OpARM64MOVHreg { 4068 break 4069 } 4070 x := v_1.Args[0] 4071 mem := v.Args[2] 4072 v.reset(OpARM64MOVBstore) 4073 v.AuxInt = off 4074 v.Aux = sym 4075 v.AddArg(ptr) 4076 v.AddArg(x) 4077 v.AddArg(mem) 4078 return true 4079 } 4080 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem) 4081 // cond: 4082 // result: (MOVBstore [off] {sym} ptr x mem) 4083 for { 4084 off := v.AuxInt 4085 sym := v.Aux 4086 _ = v.Args[2] 4087 ptr := v.Args[0] 4088 v_1 := v.Args[1] 4089 if v_1.Op != OpARM64MOVHUreg { 4090 break 4091 } 4092 x := v_1.Args[0] 4093 mem := v.Args[2] 4094 v.reset(OpARM64MOVBstore) 4095 v.AuxInt = off 4096 v.Aux = sym 4097 v.AddArg(ptr) 4098 v.AddArg(x) 4099 v.AddArg(mem) 4100 return true 4101 } 4102 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem) 4103 // cond: 4104 // result: (MOVBstore [off] {sym} ptr x mem) 4105 for { 4106 off := v.AuxInt 4107 sym := v.Aux 4108 _ = v.Args[2] 4109 ptr := v.Args[0] 4110 v_1 := v.Args[1] 4111 if v_1.Op != OpARM64MOVWreg { 4112 break 4113 } 4114 x := v_1.Args[0] 4115 mem := v.Args[2] 4116 v.reset(OpARM64MOVBstore) 4117 v.AuxInt = off 4118 v.Aux = sym 4119 v.AddArg(ptr) 4120 v.AddArg(x) 4121 v.AddArg(mem) 4122 return true 4123 } 4124 // match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem) 4125 // cond: 4126 // result: (MOVBstore [off] {sym} ptr x mem) 4127 for { 4128 off := v.AuxInt 4129 sym := v.Aux 4130 _ = v.Args[2] 4131 ptr := v.Args[0] 4132 v_1 := v.Args[1] 4133 if v_1.Op != OpARM64MOVWUreg { 4134 break 4135 } 4136 x := v_1.Args[0] 4137 mem := v.Args[2] 4138 v.reset(OpARM64MOVBstore) 4139 v.AuxInt = off 4140 v.Aux = sym 4141 v.AddArg(ptr) 4142 v.AddArg(x) 4143 v.AddArg(mem) 4144 return true 4145 } 4146 return false 4147 } 4148 func rewriteValueARM64_OpARM64MOVBstorezero_0(v *Value) bool { 4149 b := v.Block 4150 _ = b 4151 config := b.Func.Config 4152 _ = config 4153 // match: (MOVBstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 4154 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4155 // result: (MOVBstorezero [off1+off2] {sym} ptr mem) 4156 for { 4157 off1 := v.AuxInt 4158 sym := v.Aux 4159 _ = v.Args[1] 4160 v_0 := v.Args[0] 4161 if v_0.Op != OpARM64ADDconst { 4162 break 4163 } 4164 off2 := v_0.AuxInt 4165 ptr := v_0.Args[0] 4166 mem := v.Args[1] 4167 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4168 break 4169 } 4170 v.reset(OpARM64MOVBstorezero) 4171 v.AuxInt = off1 + off2 4172 v.Aux = sym 4173 v.AddArg(ptr) 4174 v.AddArg(mem) 4175 return true 4176 } 4177 // match: (MOVBstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4178 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4179 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4180 for { 4181 off1 := v.AuxInt 4182 sym1 := v.Aux 4183 _ = v.Args[1] 4184 v_0 := v.Args[0] 4185 if v_0.Op != OpARM64MOVDaddr { 4186 break 4187 } 4188 off2 := v_0.AuxInt 4189 sym2 := v_0.Aux 4190 ptr := v_0.Args[0] 4191 mem := v.Args[1] 4192 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4193 break 4194 } 4195 v.reset(OpARM64MOVBstorezero) 4196 v.AuxInt = off1 + off2 4197 v.Aux = mergeSym(sym1, sym2) 4198 v.AddArg(ptr) 4199 v.AddArg(mem) 4200 return true 4201 } 4202 return false 4203 } 4204 func rewriteValueARM64_OpARM64MOVDload_0(v *Value) bool { 4205 b := v.Block 4206 _ = b 4207 config := b.Func.Config 4208 _ = config 4209 // match: (MOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 4210 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4211 // result: (MOVDload [off1+off2] {sym} ptr mem) 4212 for { 4213 off1 := v.AuxInt 4214 sym := v.Aux 4215 _ = v.Args[1] 4216 v_0 := v.Args[0] 4217 if v_0.Op != OpARM64ADDconst { 4218 break 4219 } 4220 off2 := v_0.AuxInt 4221 ptr := v_0.Args[0] 4222 mem := v.Args[1] 4223 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4224 break 4225 } 4226 v.reset(OpARM64MOVDload) 4227 v.AuxInt = off1 + off2 4228 v.Aux = sym 4229 v.AddArg(ptr) 4230 v.AddArg(mem) 4231 return true 4232 } 4233 // match: (MOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4234 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4235 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4236 for { 4237 off1 := v.AuxInt 4238 sym1 := v.Aux 4239 _ = v.Args[1] 4240 v_0 := v.Args[0] 4241 if v_0.Op != OpARM64MOVDaddr { 4242 break 4243 } 4244 off2 := v_0.AuxInt 4245 sym2 := v_0.Aux 4246 ptr := v_0.Args[0] 4247 mem := v.Args[1] 4248 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4249 break 4250 } 4251 v.reset(OpARM64MOVDload) 4252 v.AuxInt = off1 + off2 4253 v.Aux = mergeSym(sym1, sym2) 4254 v.AddArg(ptr) 4255 v.AddArg(mem) 4256 return true 4257 } 4258 // match: (MOVDload [off] {sym} ptr (MOVDstorezero [off2] {sym2} ptr2 _)) 4259 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4260 // result: (MOVDconst [0]) 4261 for { 4262 off := v.AuxInt 4263 sym := v.Aux 4264 _ = v.Args[1] 4265 ptr := v.Args[0] 4266 v_1 := v.Args[1] 4267 if v_1.Op != OpARM64MOVDstorezero { 4268 break 4269 } 4270 off2 := v_1.AuxInt 4271 sym2 := v_1.Aux 4272 _ = v_1.Args[1] 4273 ptr2 := v_1.Args[0] 4274 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4275 break 4276 } 4277 v.reset(OpARM64MOVDconst) 4278 v.AuxInt = 0 4279 return true 4280 } 4281 return false 4282 } 4283 func rewriteValueARM64_OpARM64MOVDreg_0(v *Value) bool { 4284 // match: (MOVDreg x) 4285 // cond: x.Uses == 1 4286 // result: (MOVDnop x) 4287 for { 4288 x := v.Args[0] 4289 if !(x.Uses == 1) { 4290 break 4291 } 4292 v.reset(OpARM64MOVDnop) 4293 v.AddArg(x) 4294 return true 4295 } 4296 // match: (MOVDreg (MOVDconst [c])) 4297 // cond: 4298 // result: (MOVDconst [c]) 4299 for { 4300 v_0 := v.Args[0] 4301 if v_0.Op != OpARM64MOVDconst { 4302 break 4303 } 4304 c := v_0.AuxInt 4305 v.reset(OpARM64MOVDconst) 4306 v.AuxInt = c 4307 return true 4308 } 4309 return false 4310 } 4311 func rewriteValueARM64_OpARM64MOVDstore_0(v *Value) bool { 4312 b := v.Block 4313 _ = b 4314 config := b.Func.Config 4315 _ = config 4316 // match: (MOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 4317 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4318 // result: (MOVDstore [off1+off2] {sym} ptr val mem) 4319 for { 4320 off1 := v.AuxInt 4321 sym := v.Aux 4322 _ = v.Args[2] 4323 v_0 := v.Args[0] 4324 if v_0.Op != OpARM64ADDconst { 4325 break 4326 } 4327 off2 := v_0.AuxInt 4328 ptr := v_0.Args[0] 4329 val := v.Args[1] 4330 mem := v.Args[2] 4331 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4332 break 4333 } 4334 v.reset(OpARM64MOVDstore) 4335 v.AuxInt = off1 + off2 4336 v.Aux = sym 4337 v.AddArg(ptr) 4338 v.AddArg(val) 4339 v.AddArg(mem) 4340 return true 4341 } 4342 // match: (MOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 4343 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4344 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4345 for { 4346 off1 := v.AuxInt 4347 sym1 := v.Aux 4348 _ = v.Args[2] 4349 v_0 := v.Args[0] 4350 if v_0.Op != OpARM64MOVDaddr { 4351 break 4352 } 4353 off2 := v_0.AuxInt 4354 sym2 := v_0.Aux 4355 ptr := v_0.Args[0] 4356 val := v.Args[1] 4357 mem := v.Args[2] 4358 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4359 break 4360 } 4361 v.reset(OpARM64MOVDstore) 4362 v.AuxInt = off1 + off2 4363 v.Aux = mergeSym(sym1, sym2) 4364 v.AddArg(ptr) 4365 v.AddArg(val) 4366 v.AddArg(mem) 4367 return true 4368 } 4369 // match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem) 4370 // cond: 4371 // result: (MOVDstorezero [off] {sym} ptr mem) 4372 for { 4373 off := v.AuxInt 4374 sym := v.Aux 4375 _ = v.Args[2] 4376 ptr := v.Args[0] 4377 v_1 := v.Args[1] 4378 if v_1.Op != OpARM64MOVDconst { 4379 break 4380 } 4381 if v_1.AuxInt != 0 { 4382 break 4383 } 4384 mem := v.Args[2] 4385 v.reset(OpARM64MOVDstorezero) 4386 v.AuxInt = off 4387 v.Aux = sym 4388 v.AddArg(ptr) 4389 v.AddArg(mem) 4390 return true 4391 } 4392 return false 4393 } 4394 func rewriteValueARM64_OpARM64MOVDstorezero_0(v *Value) bool { 4395 b := v.Block 4396 _ = b 4397 config := b.Func.Config 4398 _ = config 4399 // match: (MOVDstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 4400 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4401 // result: (MOVDstorezero [off1+off2] {sym} ptr mem) 4402 for { 4403 off1 := v.AuxInt 4404 sym := v.Aux 4405 _ = v.Args[1] 4406 v_0 := v.Args[0] 4407 if v_0.Op != OpARM64ADDconst { 4408 break 4409 } 4410 off2 := v_0.AuxInt 4411 ptr := v_0.Args[0] 4412 mem := v.Args[1] 4413 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4414 break 4415 } 4416 v.reset(OpARM64MOVDstorezero) 4417 v.AuxInt = off1 + off2 4418 v.Aux = sym 4419 v.AddArg(ptr) 4420 v.AddArg(mem) 4421 return true 4422 } 4423 // match: (MOVDstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4424 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4425 // result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4426 for { 4427 off1 := v.AuxInt 4428 sym1 := v.Aux 4429 _ = v.Args[1] 4430 v_0 := v.Args[0] 4431 if v_0.Op != OpARM64MOVDaddr { 4432 break 4433 } 4434 off2 := v_0.AuxInt 4435 sym2 := v_0.Aux 4436 ptr := v_0.Args[0] 4437 mem := v.Args[1] 4438 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4439 break 4440 } 4441 v.reset(OpARM64MOVDstorezero) 4442 v.AuxInt = off1 + off2 4443 v.Aux = mergeSym(sym1, sym2) 4444 v.AddArg(ptr) 4445 v.AddArg(mem) 4446 return true 4447 } 4448 return false 4449 } 4450 func rewriteValueARM64_OpARM64MOVHUload_0(v *Value) bool { 4451 b := v.Block 4452 _ = b 4453 config := b.Func.Config 4454 _ = config 4455 // match: (MOVHUload [off1] {sym} (ADDconst [off2] ptr) mem) 4456 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4457 // result: (MOVHUload [off1+off2] {sym} ptr mem) 4458 for { 4459 off1 := v.AuxInt 4460 sym := v.Aux 4461 _ = v.Args[1] 4462 v_0 := v.Args[0] 4463 if v_0.Op != OpARM64ADDconst { 4464 break 4465 } 4466 off2 := v_0.AuxInt 4467 ptr := v_0.Args[0] 4468 mem := v.Args[1] 4469 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4470 break 4471 } 4472 v.reset(OpARM64MOVHUload) 4473 v.AuxInt = off1 + off2 4474 v.Aux = sym 4475 v.AddArg(ptr) 4476 v.AddArg(mem) 4477 return true 4478 } 4479 // match: (MOVHUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4480 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4481 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4482 for { 4483 off1 := v.AuxInt 4484 sym1 := v.Aux 4485 _ = v.Args[1] 4486 v_0 := v.Args[0] 4487 if v_0.Op != OpARM64MOVDaddr { 4488 break 4489 } 4490 off2 := v_0.AuxInt 4491 sym2 := v_0.Aux 4492 ptr := v_0.Args[0] 4493 mem := v.Args[1] 4494 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4495 break 4496 } 4497 v.reset(OpARM64MOVHUload) 4498 v.AuxInt = off1 + off2 4499 v.Aux = mergeSym(sym1, sym2) 4500 v.AddArg(ptr) 4501 v.AddArg(mem) 4502 return true 4503 } 4504 // match: (MOVHUload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 4505 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4506 // result: (MOVDconst [0]) 4507 for { 4508 off := v.AuxInt 4509 sym := v.Aux 4510 _ = v.Args[1] 4511 ptr := v.Args[0] 4512 v_1 := v.Args[1] 4513 if v_1.Op != OpARM64MOVHstorezero { 4514 break 4515 } 4516 off2 := v_1.AuxInt 4517 sym2 := v_1.Aux 4518 _ = v_1.Args[1] 4519 ptr2 := v_1.Args[0] 4520 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4521 break 4522 } 4523 v.reset(OpARM64MOVDconst) 4524 v.AuxInt = 0 4525 return true 4526 } 4527 return false 4528 } 4529 func rewriteValueARM64_OpARM64MOVHUreg_0(v *Value) bool { 4530 // match: (MOVHUreg x:(MOVBUload _ _)) 4531 // cond: 4532 // result: (MOVDreg x) 4533 for { 4534 x := v.Args[0] 4535 if x.Op != OpARM64MOVBUload { 4536 break 4537 } 4538 _ = x.Args[1] 4539 v.reset(OpARM64MOVDreg) 4540 v.AddArg(x) 4541 return true 4542 } 4543 // match: (MOVHUreg x:(MOVHUload _ _)) 4544 // cond: 4545 // result: (MOVDreg x) 4546 for { 4547 x := v.Args[0] 4548 if x.Op != OpARM64MOVHUload { 4549 break 4550 } 4551 _ = x.Args[1] 4552 v.reset(OpARM64MOVDreg) 4553 v.AddArg(x) 4554 return true 4555 } 4556 // match: (MOVHUreg x:(MOVBUreg _)) 4557 // cond: 4558 // result: (MOVDreg x) 4559 for { 4560 x := v.Args[0] 4561 if x.Op != OpARM64MOVBUreg { 4562 break 4563 } 4564 v.reset(OpARM64MOVDreg) 4565 v.AddArg(x) 4566 return true 4567 } 4568 // match: (MOVHUreg x:(MOVHUreg _)) 4569 // cond: 4570 // result: (MOVDreg x) 4571 for { 4572 x := v.Args[0] 4573 if x.Op != OpARM64MOVHUreg { 4574 break 4575 } 4576 v.reset(OpARM64MOVDreg) 4577 v.AddArg(x) 4578 return true 4579 } 4580 // match: (MOVHUreg (MOVDconst [c])) 4581 // cond: 4582 // result: (MOVDconst [int64(uint16(c))]) 4583 for { 4584 v_0 := v.Args[0] 4585 if v_0.Op != OpARM64MOVDconst { 4586 break 4587 } 4588 c := v_0.AuxInt 4589 v.reset(OpARM64MOVDconst) 4590 v.AuxInt = int64(uint16(c)) 4591 return true 4592 } 4593 return false 4594 } 4595 func rewriteValueARM64_OpARM64MOVHload_0(v *Value) bool { 4596 b := v.Block 4597 _ = b 4598 config := b.Func.Config 4599 _ = config 4600 // match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem) 4601 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4602 // result: (MOVHload [off1+off2] {sym} ptr mem) 4603 for { 4604 off1 := v.AuxInt 4605 sym := v.Aux 4606 _ = v.Args[1] 4607 v_0 := v.Args[0] 4608 if v_0.Op != OpARM64ADDconst { 4609 break 4610 } 4611 off2 := v_0.AuxInt 4612 ptr := v_0.Args[0] 4613 mem := v.Args[1] 4614 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4615 break 4616 } 4617 v.reset(OpARM64MOVHload) 4618 v.AuxInt = off1 + off2 4619 v.Aux = sym 4620 v.AddArg(ptr) 4621 v.AddArg(mem) 4622 return true 4623 } 4624 // match: (MOVHload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4625 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4626 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4627 for { 4628 off1 := v.AuxInt 4629 sym1 := v.Aux 4630 _ = v.Args[1] 4631 v_0 := v.Args[0] 4632 if v_0.Op != OpARM64MOVDaddr { 4633 break 4634 } 4635 off2 := v_0.AuxInt 4636 sym2 := v_0.Aux 4637 ptr := v_0.Args[0] 4638 mem := v.Args[1] 4639 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4640 break 4641 } 4642 v.reset(OpARM64MOVHload) 4643 v.AuxInt = off1 + off2 4644 v.Aux = mergeSym(sym1, sym2) 4645 v.AddArg(ptr) 4646 v.AddArg(mem) 4647 return true 4648 } 4649 // match: (MOVHload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 4650 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4651 // result: (MOVDconst [0]) 4652 for { 4653 off := v.AuxInt 4654 sym := v.Aux 4655 _ = v.Args[1] 4656 ptr := v.Args[0] 4657 v_1 := v.Args[1] 4658 if v_1.Op != OpARM64MOVHstorezero { 4659 break 4660 } 4661 off2 := v_1.AuxInt 4662 sym2 := v_1.Aux 4663 _ = v_1.Args[1] 4664 ptr2 := v_1.Args[0] 4665 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4666 break 4667 } 4668 v.reset(OpARM64MOVDconst) 4669 v.AuxInt = 0 4670 return true 4671 } 4672 return false 4673 } 4674 func rewriteValueARM64_OpARM64MOVHreg_0(v *Value) bool { 4675 // match: (MOVHreg x:(MOVBload _ _)) 4676 // cond: 4677 // result: (MOVDreg x) 4678 for { 4679 x := v.Args[0] 4680 if x.Op != OpARM64MOVBload { 4681 break 4682 } 4683 _ = x.Args[1] 4684 v.reset(OpARM64MOVDreg) 4685 v.AddArg(x) 4686 return true 4687 } 4688 // match: (MOVHreg x:(MOVBUload _ _)) 4689 // cond: 4690 // result: (MOVDreg x) 4691 for { 4692 x := v.Args[0] 4693 if x.Op != OpARM64MOVBUload { 4694 break 4695 } 4696 _ = x.Args[1] 4697 v.reset(OpARM64MOVDreg) 4698 v.AddArg(x) 4699 return true 4700 } 4701 // match: (MOVHreg x:(MOVHload _ _)) 4702 // cond: 4703 // result: (MOVDreg x) 4704 for { 4705 x := v.Args[0] 4706 if x.Op != OpARM64MOVHload { 4707 break 4708 } 4709 _ = x.Args[1] 4710 v.reset(OpARM64MOVDreg) 4711 v.AddArg(x) 4712 return true 4713 } 4714 // match: (MOVHreg x:(MOVBreg _)) 4715 // cond: 4716 // result: (MOVDreg x) 4717 for { 4718 x := v.Args[0] 4719 if x.Op != OpARM64MOVBreg { 4720 break 4721 } 4722 v.reset(OpARM64MOVDreg) 4723 v.AddArg(x) 4724 return true 4725 } 4726 // match: (MOVHreg x:(MOVBUreg _)) 4727 // cond: 4728 // result: (MOVDreg x) 4729 for { 4730 x := v.Args[0] 4731 if x.Op != OpARM64MOVBUreg { 4732 break 4733 } 4734 v.reset(OpARM64MOVDreg) 4735 v.AddArg(x) 4736 return true 4737 } 4738 // match: (MOVHreg x:(MOVHreg _)) 4739 // cond: 4740 // result: (MOVDreg x) 4741 for { 4742 x := v.Args[0] 4743 if x.Op != OpARM64MOVHreg { 4744 break 4745 } 4746 v.reset(OpARM64MOVDreg) 4747 v.AddArg(x) 4748 return true 4749 } 4750 // match: (MOVHreg (MOVDconst [c])) 4751 // cond: 4752 // result: (MOVDconst [int64(int16(c))]) 4753 for { 4754 v_0 := v.Args[0] 4755 if v_0.Op != OpARM64MOVDconst { 4756 break 4757 } 4758 c := v_0.AuxInt 4759 v.reset(OpARM64MOVDconst) 4760 v.AuxInt = int64(int16(c)) 4761 return true 4762 } 4763 return false 4764 } 4765 func rewriteValueARM64_OpARM64MOVHstore_0(v *Value) bool { 4766 b := v.Block 4767 _ = b 4768 config := b.Func.Config 4769 _ = config 4770 // match: (MOVHstore [off1] {sym} (ADDconst [off2] ptr) val mem) 4771 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4772 // result: (MOVHstore [off1+off2] {sym} ptr val mem) 4773 for { 4774 off1 := v.AuxInt 4775 sym := v.Aux 4776 _ = v.Args[2] 4777 v_0 := v.Args[0] 4778 if v_0.Op != OpARM64ADDconst { 4779 break 4780 } 4781 off2 := v_0.AuxInt 4782 ptr := v_0.Args[0] 4783 val := v.Args[1] 4784 mem := v.Args[2] 4785 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4786 break 4787 } 4788 v.reset(OpARM64MOVHstore) 4789 v.AuxInt = off1 + off2 4790 v.Aux = sym 4791 v.AddArg(ptr) 4792 v.AddArg(val) 4793 v.AddArg(mem) 4794 return true 4795 } 4796 // match: (MOVHstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 4797 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4798 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4799 for { 4800 off1 := v.AuxInt 4801 sym1 := v.Aux 4802 _ = v.Args[2] 4803 v_0 := v.Args[0] 4804 if v_0.Op != OpARM64MOVDaddr { 4805 break 4806 } 4807 off2 := v_0.AuxInt 4808 sym2 := v_0.Aux 4809 ptr := v_0.Args[0] 4810 val := v.Args[1] 4811 mem := v.Args[2] 4812 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4813 break 4814 } 4815 v.reset(OpARM64MOVHstore) 4816 v.AuxInt = off1 + off2 4817 v.Aux = mergeSym(sym1, sym2) 4818 v.AddArg(ptr) 4819 v.AddArg(val) 4820 v.AddArg(mem) 4821 return true 4822 } 4823 // match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem) 4824 // cond: 4825 // result: (MOVHstorezero [off] {sym} ptr mem) 4826 for { 4827 off := v.AuxInt 4828 sym := v.Aux 4829 _ = v.Args[2] 4830 ptr := v.Args[0] 4831 v_1 := v.Args[1] 4832 if v_1.Op != OpARM64MOVDconst { 4833 break 4834 } 4835 if v_1.AuxInt != 0 { 4836 break 4837 } 4838 mem := v.Args[2] 4839 v.reset(OpARM64MOVHstorezero) 4840 v.AuxInt = off 4841 v.Aux = sym 4842 v.AddArg(ptr) 4843 v.AddArg(mem) 4844 return true 4845 } 4846 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem) 4847 // cond: 4848 // result: (MOVHstore [off] {sym} ptr x mem) 4849 for { 4850 off := v.AuxInt 4851 sym := v.Aux 4852 _ = v.Args[2] 4853 ptr := v.Args[0] 4854 v_1 := v.Args[1] 4855 if v_1.Op != OpARM64MOVHreg { 4856 break 4857 } 4858 x := v_1.Args[0] 4859 mem := v.Args[2] 4860 v.reset(OpARM64MOVHstore) 4861 v.AuxInt = off 4862 v.Aux = sym 4863 v.AddArg(ptr) 4864 v.AddArg(x) 4865 v.AddArg(mem) 4866 return true 4867 } 4868 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem) 4869 // cond: 4870 // result: (MOVHstore [off] {sym} ptr x mem) 4871 for { 4872 off := v.AuxInt 4873 sym := v.Aux 4874 _ = v.Args[2] 4875 ptr := v.Args[0] 4876 v_1 := v.Args[1] 4877 if v_1.Op != OpARM64MOVHUreg { 4878 break 4879 } 4880 x := v_1.Args[0] 4881 mem := v.Args[2] 4882 v.reset(OpARM64MOVHstore) 4883 v.AuxInt = off 4884 v.Aux = sym 4885 v.AddArg(ptr) 4886 v.AddArg(x) 4887 v.AddArg(mem) 4888 return true 4889 } 4890 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem) 4891 // cond: 4892 // result: (MOVHstore [off] {sym} ptr x mem) 4893 for { 4894 off := v.AuxInt 4895 sym := v.Aux 4896 _ = v.Args[2] 4897 ptr := v.Args[0] 4898 v_1 := v.Args[1] 4899 if v_1.Op != OpARM64MOVWreg { 4900 break 4901 } 4902 x := v_1.Args[0] 4903 mem := v.Args[2] 4904 v.reset(OpARM64MOVHstore) 4905 v.AuxInt = off 4906 v.Aux = sym 4907 v.AddArg(ptr) 4908 v.AddArg(x) 4909 v.AddArg(mem) 4910 return true 4911 } 4912 // match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem) 4913 // cond: 4914 // result: (MOVHstore [off] {sym} ptr x mem) 4915 for { 4916 off := v.AuxInt 4917 sym := v.Aux 4918 _ = v.Args[2] 4919 ptr := v.Args[0] 4920 v_1 := v.Args[1] 4921 if v_1.Op != OpARM64MOVWUreg { 4922 break 4923 } 4924 x := v_1.Args[0] 4925 mem := v.Args[2] 4926 v.reset(OpARM64MOVHstore) 4927 v.AuxInt = off 4928 v.Aux = sym 4929 v.AddArg(ptr) 4930 v.AddArg(x) 4931 v.AddArg(mem) 4932 return true 4933 } 4934 return false 4935 } 4936 func rewriteValueARM64_OpARM64MOVHstorezero_0(v *Value) bool { 4937 b := v.Block 4938 _ = b 4939 config := b.Func.Config 4940 _ = config 4941 // match: (MOVHstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 4942 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4943 // result: (MOVHstorezero [off1+off2] {sym} ptr mem) 4944 for { 4945 off1 := v.AuxInt 4946 sym := v.Aux 4947 _ = v.Args[1] 4948 v_0 := v.Args[0] 4949 if v_0.Op != OpARM64ADDconst { 4950 break 4951 } 4952 off2 := v_0.AuxInt 4953 ptr := v_0.Args[0] 4954 mem := v.Args[1] 4955 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4956 break 4957 } 4958 v.reset(OpARM64MOVHstorezero) 4959 v.AuxInt = off1 + off2 4960 v.Aux = sym 4961 v.AddArg(ptr) 4962 v.AddArg(mem) 4963 return true 4964 } 4965 // match: (MOVHstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4966 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4967 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4968 for { 4969 off1 := v.AuxInt 4970 sym1 := v.Aux 4971 _ = v.Args[1] 4972 v_0 := v.Args[0] 4973 if v_0.Op != OpARM64MOVDaddr { 4974 break 4975 } 4976 off2 := v_0.AuxInt 4977 sym2 := v_0.Aux 4978 ptr := v_0.Args[0] 4979 mem := v.Args[1] 4980 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4981 break 4982 } 4983 v.reset(OpARM64MOVHstorezero) 4984 v.AuxInt = off1 + off2 4985 v.Aux = mergeSym(sym1, sym2) 4986 v.AddArg(ptr) 4987 v.AddArg(mem) 4988 return true 4989 } 4990 return false 4991 } 4992 func rewriteValueARM64_OpARM64MOVQstorezero_0(v *Value) bool { 4993 b := v.Block 4994 _ = b 4995 config := b.Func.Config 4996 _ = config 4997 // match: (MOVQstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 4998 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4999 // result: (MOVQstorezero [off1+off2] {sym} ptr mem) 5000 for { 5001 off1 := v.AuxInt 5002 sym := v.Aux 5003 _ = v.Args[1] 5004 v_0 := v.Args[0] 5005 if v_0.Op != OpARM64ADDconst { 5006 break 5007 } 5008 off2 := v_0.AuxInt 5009 ptr := v_0.Args[0] 5010 mem := v.Args[1] 5011 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5012 break 5013 } 5014 v.reset(OpARM64MOVQstorezero) 5015 v.AuxInt = off1 + off2 5016 v.Aux = sym 5017 v.AddArg(ptr) 5018 v.AddArg(mem) 5019 return true 5020 } 5021 // match: (MOVQstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5022 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5023 // result: (MOVQstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5024 for { 5025 off1 := v.AuxInt 5026 sym1 := v.Aux 5027 _ = v.Args[1] 5028 v_0 := v.Args[0] 5029 if v_0.Op != OpARM64MOVDaddr { 5030 break 5031 } 5032 off2 := v_0.AuxInt 5033 sym2 := v_0.Aux 5034 ptr := v_0.Args[0] 5035 mem := v.Args[1] 5036 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5037 break 5038 } 5039 v.reset(OpARM64MOVQstorezero) 5040 v.AuxInt = off1 + off2 5041 v.Aux = mergeSym(sym1, sym2) 5042 v.AddArg(ptr) 5043 v.AddArg(mem) 5044 return true 5045 } 5046 return false 5047 } 5048 func rewriteValueARM64_OpARM64MOVWUload_0(v *Value) bool { 5049 b := v.Block 5050 _ = b 5051 config := b.Func.Config 5052 _ = config 5053 // match: (MOVWUload [off1] {sym} (ADDconst [off2] ptr) mem) 5054 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5055 // result: (MOVWUload [off1+off2] {sym} ptr mem) 5056 for { 5057 off1 := v.AuxInt 5058 sym := v.Aux 5059 _ = v.Args[1] 5060 v_0 := v.Args[0] 5061 if v_0.Op != OpARM64ADDconst { 5062 break 5063 } 5064 off2 := v_0.AuxInt 5065 ptr := v_0.Args[0] 5066 mem := v.Args[1] 5067 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5068 break 5069 } 5070 v.reset(OpARM64MOVWUload) 5071 v.AuxInt = off1 + off2 5072 v.Aux = sym 5073 v.AddArg(ptr) 5074 v.AddArg(mem) 5075 return true 5076 } 5077 // match: (MOVWUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5078 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5079 // result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5080 for { 5081 off1 := v.AuxInt 5082 sym1 := v.Aux 5083 _ = v.Args[1] 5084 v_0 := v.Args[0] 5085 if v_0.Op != OpARM64MOVDaddr { 5086 break 5087 } 5088 off2 := v_0.AuxInt 5089 sym2 := v_0.Aux 5090 ptr := v_0.Args[0] 5091 mem := v.Args[1] 5092 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5093 break 5094 } 5095 v.reset(OpARM64MOVWUload) 5096 v.AuxInt = off1 + off2 5097 v.Aux = mergeSym(sym1, sym2) 5098 v.AddArg(ptr) 5099 v.AddArg(mem) 5100 return true 5101 } 5102 // match: (MOVWUload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 5103 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 5104 // result: (MOVDconst [0]) 5105 for { 5106 off := v.AuxInt 5107 sym := v.Aux 5108 _ = v.Args[1] 5109 ptr := v.Args[0] 5110 v_1 := v.Args[1] 5111 if v_1.Op != OpARM64MOVWstorezero { 5112 break 5113 } 5114 off2 := v_1.AuxInt 5115 sym2 := v_1.Aux 5116 _ = v_1.Args[1] 5117 ptr2 := v_1.Args[0] 5118 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 5119 break 5120 } 5121 v.reset(OpARM64MOVDconst) 5122 v.AuxInt = 0 5123 return true 5124 } 5125 return false 5126 } 5127 func rewriteValueARM64_OpARM64MOVWUreg_0(v *Value) bool { 5128 // match: (MOVWUreg x:(MOVBUload _ _)) 5129 // cond: 5130 // result: (MOVDreg x) 5131 for { 5132 x := v.Args[0] 5133 if x.Op != OpARM64MOVBUload { 5134 break 5135 } 5136 _ = x.Args[1] 5137 v.reset(OpARM64MOVDreg) 5138 v.AddArg(x) 5139 return true 5140 } 5141 // match: (MOVWUreg x:(MOVHUload _ _)) 5142 // cond: 5143 // result: (MOVDreg x) 5144 for { 5145 x := v.Args[0] 5146 if x.Op != OpARM64MOVHUload { 5147 break 5148 } 5149 _ = x.Args[1] 5150 v.reset(OpARM64MOVDreg) 5151 v.AddArg(x) 5152 return true 5153 } 5154 // match: (MOVWUreg x:(MOVWUload _ _)) 5155 // cond: 5156 // result: (MOVDreg x) 5157 for { 5158 x := v.Args[0] 5159 if x.Op != OpARM64MOVWUload { 5160 break 5161 } 5162 _ = x.Args[1] 5163 v.reset(OpARM64MOVDreg) 5164 v.AddArg(x) 5165 return true 5166 } 5167 // match: (MOVWUreg x:(MOVBUreg _)) 5168 // cond: 5169 // result: (MOVDreg x) 5170 for { 5171 x := v.Args[0] 5172 if x.Op != OpARM64MOVBUreg { 5173 break 5174 } 5175 v.reset(OpARM64MOVDreg) 5176 v.AddArg(x) 5177 return true 5178 } 5179 // match: (MOVWUreg x:(MOVHUreg _)) 5180 // cond: 5181 // result: (MOVDreg x) 5182 for { 5183 x := v.Args[0] 5184 if x.Op != OpARM64MOVHUreg { 5185 break 5186 } 5187 v.reset(OpARM64MOVDreg) 5188 v.AddArg(x) 5189 return true 5190 } 5191 // match: (MOVWUreg x:(MOVWUreg _)) 5192 // cond: 5193 // result: (MOVDreg x) 5194 for { 5195 x := v.Args[0] 5196 if x.Op != OpARM64MOVWUreg { 5197 break 5198 } 5199 v.reset(OpARM64MOVDreg) 5200 v.AddArg(x) 5201 return true 5202 } 5203 // match: (MOVWUreg (MOVDconst [c])) 5204 // cond: 5205 // result: (MOVDconst [int64(uint32(c))]) 5206 for { 5207 v_0 := v.Args[0] 5208 if v_0.Op != OpARM64MOVDconst { 5209 break 5210 } 5211 c := v_0.AuxInt 5212 v.reset(OpARM64MOVDconst) 5213 v.AuxInt = int64(uint32(c)) 5214 return true 5215 } 5216 return false 5217 } 5218 func rewriteValueARM64_OpARM64MOVWload_0(v *Value) bool { 5219 b := v.Block 5220 _ = b 5221 config := b.Func.Config 5222 _ = config 5223 // match: (MOVWload [off1] {sym} (ADDconst [off2] ptr) mem) 5224 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5225 // result: (MOVWload [off1+off2] {sym} ptr mem) 5226 for { 5227 off1 := v.AuxInt 5228 sym := v.Aux 5229 _ = v.Args[1] 5230 v_0 := v.Args[0] 5231 if v_0.Op != OpARM64ADDconst { 5232 break 5233 } 5234 off2 := v_0.AuxInt 5235 ptr := v_0.Args[0] 5236 mem := v.Args[1] 5237 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5238 break 5239 } 5240 v.reset(OpARM64MOVWload) 5241 v.AuxInt = off1 + off2 5242 v.Aux = sym 5243 v.AddArg(ptr) 5244 v.AddArg(mem) 5245 return true 5246 } 5247 // match: (MOVWload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5248 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5249 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5250 for { 5251 off1 := v.AuxInt 5252 sym1 := v.Aux 5253 _ = v.Args[1] 5254 v_0 := v.Args[0] 5255 if v_0.Op != OpARM64MOVDaddr { 5256 break 5257 } 5258 off2 := v_0.AuxInt 5259 sym2 := v_0.Aux 5260 ptr := v_0.Args[0] 5261 mem := v.Args[1] 5262 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5263 break 5264 } 5265 v.reset(OpARM64MOVWload) 5266 v.AuxInt = off1 + off2 5267 v.Aux = mergeSym(sym1, sym2) 5268 v.AddArg(ptr) 5269 v.AddArg(mem) 5270 return true 5271 } 5272 // match: (MOVWload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 5273 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 5274 // result: (MOVDconst [0]) 5275 for { 5276 off := v.AuxInt 5277 sym := v.Aux 5278 _ = v.Args[1] 5279 ptr := v.Args[0] 5280 v_1 := v.Args[1] 5281 if v_1.Op != OpARM64MOVWstorezero { 5282 break 5283 } 5284 off2 := v_1.AuxInt 5285 sym2 := v_1.Aux 5286 _ = v_1.Args[1] 5287 ptr2 := v_1.Args[0] 5288 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 5289 break 5290 } 5291 v.reset(OpARM64MOVDconst) 5292 v.AuxInt = 0 5293 return true 5294 } 5295 return false 5296 } 5297 func rewriteValueARM64_OpARM64MOVWreg_0(v *Value) bool { 5298 // match: (MOVWreg x:(MOVBload _ _)) 5299 // cond: 5300 // result: (MOVDreg x) 5301 for { 5302 x := v.Args[0] 5303 if x.Op != OpARM64MOVBload { 5304 break 5305 } 5306 _ = x.Args[1] 5307 v.reset(OpARM64MOVDreg) 5308 v.AddArg(x) 5309 return true 5310 } 5311 // match: (MOVWreg x:(MOVBUload _ _)) 5312 // cond: 5313 // result: (MOVDreg x) 5314 for { 5315 x := v.Args[0] 5316 if x.Op != OpARM64MOVBUload { 5317 break 5318 } 5319 _ = x.Args[1] 5320 v.reset(OpARM64MOVDreg) 5321 v.AddArg(x) 5322 return true 5323 } 5324 // match: (MOVWreg x:(MOVHload _ _)) 5325 // cond: 5326 // result: (MOVDreg x) 5327 for { 5328 x := v.Args[0] 5329 if x.Op != OpARM64MOVHload { 5330 break 5331 } 5332 _ = x.Args[1] 5333 v.reset(OpARM64MOVDreg) 5334 v.AddArg(x) 5335 return true 5336 } 5337 // match: (MOVWreg x:(MOVHUload _ _)) 5338 // cond: 5339 // result: (MOVDreg x) 5340 for { 5341 x := v.Args[0] 5342 if x.Op != OpARM64MOVHUload { 5343 break 5344 } 5345 _ = x.Args[1] 5346 v.reset(OpARM64MOVDreg) 5347 v.AddArg(x) 5348 return true 5349 } 5350 // match: (MOVWreg x:(MOVWload _ _)) 5351 // cond: 5352 // result: (MOVDreg x) 5353 for { 5354 x := v.Args[0] 5355 if x.Op != OpARM64MOVWload { 5356 break 5357 } 5358 _ = x.Args[1] 5359 v.reset(OpARM64MOVDreg) 5360 v.AddArg(x) 5361 return true 5362 } 5363 // match: (MOVWreg x:(MOVBreg _)) 5364 // cond: 5365 // result: (MOVDreg x) 5366 for { 5367 x := v.Args[0] 5368 if x.Op != OpARM64MOVBreg { 5369 break 5370 } 5371 v.reset(OpARM64MOVDreg) 5372 v.AddArg(x) 5373 return true 5374 } 5375 // match: (MOVWreg x:(MOVBUreg _)) 5376 // cond: 5377 // result: (MOVDreg x) 5378 for { 5379 x := v.Args[0] 5380 if x.Op != OpARM64MOVBUreg { 5381 break 5382 } 5383 v.reset(OpARM64MOVDreg) 5384 v.AddArg(x) 5385 return true 5386 } 5387 // match: (MOVWreg x:(MOVHreg _)) 5388 // cond: 5389 // result: (MOVDreg x) 5390 for { 5391 x := v.Args[0] 5392 if x.Op != OpARM64MOVHreg { 5393 break 5394 } 5395 v.reset(OpARM64MOVDreg) 5396 v.AddArg(x) 5397 return true 5398 } 5399 // match: (MOVWreg x:(MOVHreg _)) 5400 // cond: 5401 // result: (MOVDreg x) 5402 for { 5403 x := v.Args[0] 5404 if x.Op != OpARM64MOVHreg { 5405 break 5406 } 5407 v.reset(OpARM64MOVDreg) 5408 v.AddArg(x) 5409 return true 5410 } 5411 // match: (MOVWreg x:(MOVWreg _)) 5412 // cond: 5413 // result: (MOVDreg x) 5414 for { 5415 x := v.Args[0] 5416 if x.Op != OpARM64MOVWreg { 5417 break 5418 } 5419 v.reset(OpARM64MOVDreg) 5420 v.AddArg(x) 5421 return true 5422 } 5423 return false 5424 } 5425 func rewriteValueARM64_OpARM64MOVWreg_10(v *Value) bool { 5426 // match: (MOVWreg (MOVDconst [c])) 5427 // cond: 5428 // result: (MOVDconst [int64(int32(c))]) 5429 for { 5430 v_0 := v.Args[0] 5431 if v_0.Op != OpARM64MOVDconst { 5432 break 5433 } 5434 c := v_0.AuxInt 5435 v.reset(OpARM64MOVDconst) 5436 v.AuxInt = int64(int32(c)) 5437 return true 5438 } 5439 return false 5440 } 5441 func rewriteValueARM64_OpARM64MOVWstore_0(v *Value) bool { 5442 b := v.Block 5443 _ = b 5444 config := b.Func.Config 5445 _ = config 5446 // match: (MOVWstore [off1] {sym} (ADDconst [off2] ptr) val mem) 5447 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5448 // result: (MOVWstore [off1+off2] {sym} ptr val mem) 5449 for { 5450 off1 := v.AuxInt 5451 sym := v.Aux 5452 _ = v.Args[2] 5453 v_0 := v.Args[0] 5454 if v_0.Op != OpARM64ADDconst { 5455 break 5456 } 5457 off2 := v_0.AuxInt 5458 ptr := v_0.Args[0] 5459 val := v.Args[1] 5460 mem := v.Args[2] 5461 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5462 break 5463 } 5464 v.reset(OpARM64MOVWstore) 5465 v.AuxInt = off1 + off2 5466 v.Aux = sym 5467 v.AddArg(ptr) 5468 v.AddArg(val) 5469 v.AddArg(mem) 5470 return true 5471 } 5472 // match: (MOVWstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 5473 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5474 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 5475 for { 5476 off1 := v.AuxInt 5477 sym1 := v.Aux 5478 _ = v.Args[2] 5479 v_0 := v.Args[0] 5480 if v_0.Op != OpARM64MOVDaddr { 5481 break 5482 } 5483 off2 := v_0.AuxInt 5484 sym2 := v_0.Aux 5485 ptr := v_0.Args[0] 5486 val := v.Args[1] 5487 mem := v.Args[2] 5488 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5489 break 5490 } 5491 v.reset(OpARM64MOVWstore) 5492 v.AuxInt = off1 + off2 5493 v.Aux = mergeSym(sym1, sym2) 5494 v.AddArg(ptr) 5495 v.AddArg(val) 5496 v.AddArg(mem) 5497 return true 5498 } 5499 // match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem) 5500 // cond: 5501 // result: (MOVWstorezero [off] {sym} ptr mem) 5502 for { 5503 off := v.AuxInt 5504 sym := v.Aux 5505 _ = v.Args[2] 5506 ptr := v.Args[0] 5507 v_1 := v.Args[1] 5508 if v_1.Op != OpARM64MOVDconst { 5509 break 5510 } 5511 if v_1.AuxInt != 0 { 5512 break 5513 } 5514 mem := v.Args[2] 5515 v.reset(OpARM64MOVWstorezero) 5516 v.AuxInt = off 5517 v.Aux = sym 5518 v.AddArg(ptr) 5519 v.AddArg(mem) 5520 return true 5521 } 5522 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem) 5523 // cond: 5524 // result: (MOVWstore [off] {sym} ptr x mem) 5525 for { 5526 off := v.AuxInt 5527 sym := v.Aux 5528 _ = v.Args[2] 5529 ptr := v.Args[0] 5530 v_1 := v.Args[1] 5531 if v_1.Op != OpARM64MOVWreg { 5532 break 5533 } 5534 x := v_1.Args[0] 5535 mem := v.Args[2] 5536 v.reset(OpARM64MOVWstore) 5537 v.AuxInt = off 5538 v.Aux = sym 5539 v.AddArg(ptr) 5540 v.AddArg(x) 5541 v.AddArg(mem) 5542 return true 5543 } 5544 // match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem) 5545 // cond: 5546 // result: (MOVWstore [off] {sym} ptr x mem) 5547 for { 5548 off := v.AuxInt 5549 sym := v.Aux 5550 _ = v.Args[2] 5551 ptr := v.Args[0] 5552 v_1 := v.Args[1] 5553 if v_1.Op != OpARM64MOVWUreg { 5554 break 5555 } 5556 x := v_1.Args[0] 5557 mem := v.Args[2] 5558 v.reset(OpARM64MOVWstore) 5559 v.AuxInt = off 5560 v.Aux = sym 5561 v.AddArg(ptr) 5562 v.AddArg(x) 5563 v.AddArg(mem) 5564 return true 5565 } 5566 return false 5567 } 5568 func rewriteValueARM64_OpARM64MOVWstorezero_0(v *Value) bool { 5569 b := v.Block 5570 _ = b 5571 config := b.Func.Config 5572 _ = config 5573 // match: (MOVWstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 5574 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5575 // result: (MOVWstorezero [off1+off2] {sym} ptr mem) 5576 for { 5577 off1 := v.AuxInt 5578 sym := v.Aux 5579 _ = v.Args[1] 5580 v_0 := v.Args[0] 5581 if v_0.Op != OpARM64ADDconst { 5582 break 5583 } 5584 off2 := v_0.AuxInt 5585 ptr := v_0.Args[0] 5586 mem := v.Args[1] 5587 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5588 break 5589 } 5590 v.reset(OpARM64MOVWstorezero) 5591 v.AuxInt = off1 + off2 5592 v.Aux = sym 5593 v.AddArg(ptr) 5594 v.AddArg(mem) 5595 return true 5596 } 5597 // match: (MOVWstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5598 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5599 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5600 for { 5601 off1 := v.AuxInt 5602 sym1 := v.Aux 5603 _ = v.Args[1] 5604 v_0 := v.Args[0] 5605 if v_0.Op != OpARM64MOVDaddr { 5606 break 5607 } 5608 off2 := v_0.AuxInt 5609 sym2 := v_0.Aux 5610 ptr := v_0.Args[0] 5611 mem := v.Args[1] 5612 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5613 break 5614 } 5615 v.reset(OpARM64MOVWstorezero) 5616 v.AuxInt = off1 + off2 5617 v.Aux = mergeSym(sym1, sym2) 5618 v.AddArg(ptr) 5619 v.AddArg(mem) 5620 return true 5621 } 5622 return false 5623 } 5624 func rewriteValueARM64_OpARM64MUL_0(v *Value) bool { 5625 // match: (MUL x (MOVDconst [-1])) 5626 // cond: 5627 // result: (NEG x) 5628 for { 5629 _ = v.Args[1] 5630 x := v.Args[0] 5631 v_1 := v.Args[1] 5632 if v_1.Op != OpARM64MOVDconst { 5633 break 5634 } 5635 if v_1.AuxInt != -1 { 5636 break 5637 } 5638 v.reset(OpARM64NEG) 5639 v.AddArg(x) 5640 return true 5641 } 5642 // match: (MUL (MOVDconst [-1]) x) 5643 // cond: 5644 // result: (NEG x) 5645 for { 5646 _ = v.Args[1] 5647 v_0 := v.Args[0] 5648 if v_0.Op != OpARM64MOVDconst { 5649 break 5650 } 5651 if v_0.AuxInt != -1 { 5652 break 5653 } 5654 x := v.Args[1] 5655 v.reset(OpARM64NEG) 5656 v.AddArg(x) 5657 return true 5658 } 5659 // match: (MUL _ (MOVDconst [0])) 5660 // cond: 5661 // result: (MOVDconst [0]) 5662 for { 5663 _ = v.Args[1] 5664 v_1 := v.Args[1] 5665 if v_1.Op != OpARM64MOVDconst { 5666 break 5667 } 5668 if v_1.AuxInt != 0 { 5669 break 5670 } 5671 v.reset(OpARM64MOVDconst) 5672 v.AuxInt = 0 5673 return true 5674 } 5675 // match: (MUL (MOVDconst [0]) _) 5676 // cond: 5677 // result: (MOVDconst [0]) 5678 for { 5679 _ = v.Args[1] 5680 v_0 := v.Args[0] 5681 if v_0.Op != OpARM64MOVDconst { 5682 break 5683 } 5684 if v_0.AuxInt != 0 { 5685 break 5686 } 5687 v.reset(OpARM64MOVDconst) 5688 v.AuxInt = 0 5689 return true 5690 } 5691 // match: (MUL x (MOVDconst [1])) 5692 // cond: 5693 // result: x 5694 for { 5695 _ = v.Args[1] 5696 x := v.Args[0] 5697 v_1 := v.Args[1] 5698 if v_1.Op != OpARM64MOVDconst { 5699 break 5700 } 5701 if v_1.AuxInt != 1 { 5702 break 5703 } 5704 v.reset(OpCopy) 5705 v.Type = x.Type 5706 v.AddArg(x) 5707 return true 5708 } 5709 // match: (MUL (MOVDconst [1]) x) 5710 // cond: 5711 // result: x 5712 for { 5713 _ = v.Args[1] 5714 v_0 := v.Args[0] 5715 if v_0.Op != OpARM64MOVDconst { 5716 break 5717 } 5718 if v_0.AuxInt != 1 { 5719 break 5720 } 5721 x := v.Args[1] 5722 v.reset(OpCopy) 5723 v.Type = x.Type 5724 v.AddArg(x) 5725 return true 5726 } 5727 // match: (MUL x (MOVDconst [c])) 5728 // cond: isPowerOfTwo(c) 5729 // result: (SLLconst [log2(c)] x) 5730 for { 5731 _ = v.Args[1] 5732 x := v.Args[0] 5733 v_1 := v.Args[1] 5734 if v_1.Op != OpARM64MOVDconst { 5735 break 5736 } 5737 c := v_1.AuxInt 5738 if !(isPowerOfTwo(c)) { 5739 break 5740 } 5741 v.reset(OpARM64SLLconst) 5742 v.AuxInt = log2(c) 5743 v.AddArg(x) 5744 return true 5745 } 5746 // match: (MUL (MOVDconst [c]) x) 5747 // cond: isPowerOfTwo(c) 5748 // result: (SLLconst [log2(c)] x) 5749 for { 5750 _ = v.Args[1] 5751 v_0 := v.Args[0] 5752 if v_0.Op != OpARM64MOVDconst { 5753 break 5754 } 5755 c := v_0.AuxInt 5756 x := v.Args[1] 5757 if !(isPowerOfTwo(c)) { 5758 break 5759 } 5760 v.reset(OpARM64SLLconst) 5761 v.AuxInt = log2(c) 5762 v.AddArg(x) 5763 return true 5764 } 5765 // match: (MUL x (MOVDconst [c])) 5766 // cond: isPowerOfTwo(c-1) && c >= 3 5767 // result: (ADDshiftLL x x [log2(c-1)]) 5768 for { 5769 _ = v.Args[1] 5770 x := v.Args[0] 5771 v_1 := v.Args[1] 5772 if v_1.Op != OpARM64MOVDconst { 5773 break 5774 } 5775 c := v_1.AuxInt 5776 if !(isPowerOfTwo(c-1) && c >= 3) { 5777 break 5778 } 5779 v.reset(OpARM64ADDshiftLL) 5780 v.AuxInt = log2(c - 1) 5781 v.AddArg(x) 5782 v.AddArg(x) 5783 return true 5784 } 5785 // match: (MUL (MOVDconst [c]) x) 5786 // cond: isPowerOfTwo(c-1) && c >= 3 5787 // result: (ADDshiftLL x x [log2(c-1)]) 5788 for { 5789 _ = v.Args[1] 5790 v_0 := v.Args[0] 5791 if v_0.Op != OpARM64MOVDconst { 5792 break 5793 } 5794 c := v_0.AuxInt 5795 x := v.Args[1] 5796 if !(isPowerOfTwo(c-1) && c >= 3) { 5797 break 5798 } 5799 v.reset(OpARM64ADDshiftLL) 5800 v.AuxInt = log2(c - 1) 5801 v.AddArg(x) 5802 v.AddArg(x) 5803 return true 5804 } 5805 return false 5806 } 5807 func rewriteValueARM64_OpARM64MUL_10(v *Value) bool { 5808 b := v.Block 5809 _ = b 5810 // match: (MUL x (MOVDconst [c])) 5811 // cond: isPowerOfTwo(c+1) && c >= 7 5812 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 5813 for { 5814 _ = v.Args[1] 5815 x := v.Args[0] 5816 v_1 := v.Args[1] 5817 if v_1.Op != OpARM64MOVDconst { 5818 break 5819 } 5820 c := v_1.AuxInt 5821 if !(isPowerOfTwo(c+1) && c >= 7) { 5822 break 5823 } 5824 v.reset(OpARM64ADDshiftLL) 5825 v.AuxInt = log2(c + 1) 5826 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5827 v0.AddArg(x) 5828 v.AddArg(v0) 5829 v.AddArg(x) 5830 return true 5831 } 5832 // match: (MUL (MOVDconst [c]) x) 5833 // cond: isPowerOfTwo(c+1) && c >= 7 5834 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 5835 for { 5836 _ = v.Args[1] 5837 v_0 := v.Args[0] 5838 if v_0.Op != OpARM64MOVDconst { 5839 break 5840 } 5841 c := v_0.AuxInt 5842 x := v.Args[1] 5843 if !(isPowerOfTwo(c+1) && c >= 7) { 5844 break 5845 } 5846 v.reset(OpARM64ADDshiftLL) 5847 v.AuxInt = log2(c + 1) 5848 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5849 v0.AddArg(x) 5850 v.AddArg(v0) 5851 v.AddArg(x) 5852 return true 5853 } 5854 // match: (MUL x (MOVDconst [c])) 5855 // cond: c%3 == 0 && isPowerOfTwo(c/3) 5856 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 5857 for { 5858 _ = v.Args[1] 5859 x := v.Args[0] 5860 v_1 := v.Args[1] 5861 if v_1.Op != OpARM64MOVDconst { 5862 break 5863 } 5864 c := v_1.AuxInt 5865 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 5866 break 5867 } 5868 v.reset(OpARM64SLLconst) 5869 v.AuxInt = log2(c / 3) 5870 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5871 v0.AuxInt = 1 5872 v0.AddArg(x) 5873 v0.AddArg(x) 5874 v.AddArg(v0) 5875 return true 5876 } 5877 // match: (MUL (MOVDconst [c]) x) 5878 // cond: c%3 == 0 && isPowerOfTwo(c/3) 5879 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 5880 for { 5881 _ = v.Args[1] 5882 v_0 := v.Args[0] 5883 if v_0.Op != OpARM64MOVDconst { 5884 break 5885 } 5886 c := v_0.AuxInt 5887 x := v.Args[1] 5888 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 5889 break 5890 } 5891 v.reset(OpARM64SLLconst) 5892 v.AuxInt = log2(c / 3) 5893 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5894 v0.AuxInt = 1 5895 v0.AddArg(x) 5896 v0.AddArg(x) 5897 v.AddArg(v0) 5898 return true 5899 } 5900 // match: (MUL x (MOVDconst [c])) 5901 // cond: c%5 == 0 && isPowerOfTwo(c/5) 5902 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 5903 for { 5904 _ = v.Args[1] 5905 x := v.Args[0] 5906 v_1 := v.Args[1] 5907 if v_1.Op != OpARM64MOVDconst { 5908 break 5909 } 5910 c := v_1.AuxInt 5911 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 5912 break 5913 } 5914 v.reset(OpARM64SLLconst) 5915 v.AuxInt = log2(c / 5) 5916 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5917 v0.AuxInt = 2 5918 v0.AddArg(x) 5919 v0.AddArg(x) 5920 v.AddArg(v0) 5921 return true 5922 } 5923 // match: (MUL (MOVDconst [c]) x) 5924 // cond: c%5 == 0 && isPowerOfTwo(c/5) 5925 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 5926 for { 5927 _ = v.Args[1] 5928 v_0 := v.Args[0] 5929 if v_0.Op != OpARM64MOVDconst { 5930 break 5931 } 5932 c := v_0.AuxInt 5933 x := v.Args[1] 5934 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 5935 break 5936 } 5937 v.reset(OpARM64SLLconst) 5938 v.AuxInt = log2(c / 5) 5939 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5940 v0.AuxInt = 2 5941 v0.AddArg(x) 5942 v0.AddArg(x) 5943 v.AddArg(v0) 5944 return true 5945 } 5946 // match: (MUL x (MOVDconst [c])) 5947 // cond: c%7 == 0 && isPowerOfTwo(c/7) 5948 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 5949 for { 5950 _ = v.Args[1] 5951 x := v.Args[0] 5952 v_1 := v.Args[1] 5953 if v_1.Op != OpARM64MOVDconst { 5954 break 5955 } 5956 c := v_1.AuxInt 5957 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 5958 break 5959 } 5960 v.reset(OpARM64SLLconst) 5961 v.AuxInt = log2(c / 7) 5962 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5963 v0.AuxInt = 3 5964 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5965 v1.AddArg(x) 5966 v0.AddArg(v1) 5967 v0.AddArg(x) 5968 v.AddArg(v0) 5969 return true 5970 } 5971 // match: (MUL (MOVDconst [c]) x) 5972 // cond: c%7 == 0 && isPowerOfTwo(c/7) 5973 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 5974 for { 5975 _ = v.Args[1] 5976 v_0 := v.Args[0] 5977 if v_0.Op != OpARM64MOVDconst { 5978 break 5979 } 5980 c := v_0.AuxInt 5981 x := v.Args[1] 5982 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 5983 break 5984 } 5985 v.reset(OpARM64SLLconst) 5986 v.AuxInt = log2(c / 7) 5987 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5988 v0.AuxInt = 3 5989 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5990 v1.AddArg(x) 5991 v0.AddArg(v1) 5992 v0.AddArg(x) 5993 v.AddArg(v0) 5994 return true 5995 } 5996 // match: (MUL x (MOVDconst [c])) 5997 // cond: c%9 == 0 && isPowerOfTwo(c/9) 5998 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 5999 for { 6000 _ = v.Args[1] 6001 x := v.Args[0] 6002 v_1 := v.Args[1] 6003 if v_1.Op != OpARM64MOVDconst { 6004 break 6005 } 6006 c := v_1.AuxInt 6007 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 6008 break 6009 } 6010 v.reset(OpARM64SLLconst) 6011 v.AuxInt = log2(c / 9) 6012 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6013 v0.AuxInt = 3 6014 v0.AddArg(x) 6015 v0.AddArg(x) 6016 v.AddArg(v0) 6017 return true 6018 } 6019 // match: (MUL (MOVDconst [c]) x) 6020 // cond: c%9 == 0 && isPowerOfTwo(c/9) 6021 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 6022 for { 6023 _ = v.Args[1] 6024 v_0 := v.Args[0] 6025 if v_0.Op != OpARM64MOVDconst { 6026 break 6027 } 6028 c := v_0.AuxInt 6029 x := v.Args[1] 6030 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 6031 break 6032 } 6033 v.reset(OpARM64SLLconst) 6034 v.AuxInt = log2(c / 9) 6035 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6036 v0.AuxInt = 3 6037 v0.AddArg(x) 6038 v0.AddArg(x) 6039 v.AddArg(v0) 6040 return true 6041 } 6042 return false 6043 } 6044 func rewriteValueARM64_OpARM64MUL_20(v *Value) bool { 6045 // match: (MUL (MOVDconst [c]) (MOVDconst [d])) 6046 // cond: 6047 // result: (MOVDconst [c*d]) 6048 for { 6049 _ = v.Args[1] 6050 v_0 := v.Args[0] 6051 if v_0.Op != OpARM64MOVDconst { 6052 break 6053 } 6054 c := v_0.AuxInt 6055 v_1 := v.Args[1] 6056 if v_1.Op != OpARM64MOVDconst { 6057 break 6058 } 6059 d := v_1.AuxInt 6060 v.reset(OpARM64MOVDconst) 6061 v.AuxInt = c * d 6062 return true 6063 } 6064 // match: (MUL (MOVDconst [d]) (MOVDconst [c])) 6065 // cond: 6066 // result: (MOVDconst [c*d]) 6067 for { 6068 _ = v.Args[1] 6069 v_0 := v.Args[0] 6070 if v_0.Op != OpARM64MOVDconst { 6071 break 6072 } 6073 d := v_0.AuxInt 6074 v_1 := v.Args[1] 6075 if v_1.Op != OpARM64MOVDconst { 6076 break 6077 } 6078 c := v_1.AuxInt 6079 v.reset(OpARM64MOVDconst) 6080 v.AuxInt = c * d 6081 return true 6082 } 6083 return false 6084 } 6085 func rewriteValueARM64_OpARM64MULW_0(v *Value) bool { 6086 // match: (MULW x (MOVDconst [c])) 6087 // cond: int32(c)==-1 6088 // result: (NEG x) 6089 for { 6090 _ = v.Args[1] 6091 x := v.Args[0] 6092 v_1 := v.Args[1] 6093 if v_1.Op != OpARM64MOVDconst { 6094 break 6095 } 6096 c := v_1.AuxInt 6097 if !(int32(c) == -1) { 6098 break 6099 } 6100 v.reset(OpARM64NEG) 6101 v.AddArg(x) 6102 return true 6103 } 6104 // match: (MULW (MOVDconst [c]) x) 6105 // cond: int32(c)==-1 6106 // result: (NEG x) 6107 for { 6108 _ = v.Args[1] 6109 v_0 := v.Args[0] 6110 if v_0.Op != OpARM64MOVDconst { 6111 break 6112 } 6113 c := v_0.AuxInt 6114 x := v.Args[1] 6115 if !(int32(c) == -1) { 6116 break 6117 } 6118 v.reset(OpARM64NEG) 6119 v.AddArg(x) 6120 return true 6121 } 6122 // match: (MULW _ (MOVDconst [c])) 6123 // cond: int32(c)==0 6124 // result: (MOVDconst [0]) 6125 for { 6126 _ = v.Args[1] 6127 v_1 := v.Args[1] 6128 if v_1.Op != OpARM64MOVDconst { 6129 break 6130 } 6131 c := v_1.AuxInt 6132 if !(int32(c) == 0) { 6133 break 6134 } 6135 v.reset(OpARM64MOVDconst) 6136 v.AuxInt = 0 6137 return true 6138 } 6139 // match: (MULW (MOVDconst [c]) _) 6140 // cond: int32(c)==0 6141 // result: (MOVDconst [0]) 6142 for { 6143 _ = v.Args[1] 6144 v_0 := v.Args[0] 6145 if v_0.Op != OpARM64MOVDconst { 6146 break 6147 } 6148 c := v_0.AuxInt 6149 if !(int32(c) == 0) { 6150 break 6151 } 6152 v.reset(OpARM64MOVDconst) 6153 v.AuxInt = 0 6154 return true 6155 } 6156 // match: (MULW x (MOVDconst [c])) 6157 // cond: int32(c)==1 6158 // result: x 6159 for { 6160 _ = v.Args[1] 6161 x := v.Args[0] 6162 v_1 := v.Args[1] 6163 if v_1.Op != OpARM64MOVDconst { 6164 break 6165 } 6166 c := v_1.AuxInt 6167 if !(int32(c) == 1) { 6168 break 6169 } 6170 v.reset(OpCopy) 6171 v.Type = x.Type 6172 v.AddArg(x) 6173 return true 6174 } 6175 // match: (MULW (MOVDconst [c]) x) 6176 // cond: int32(c)==1 6177 // result: x 6178 for { 6179 _ = v.Args[1] 6180 v_0 := v.Args[0] 6181 if v_0.Op != OpARM64MOVDconst { 6182 break 6183 } 6184 c := v_0.AuxInt 6185 x := v.Args[1] 6186 if !(int32(c) == 1) { 6187 break 6188 } 6189 v.reset(OpCopy) 6190 v.Type = x.Type 6191 v.AddArg(x) 6192 return true 6193 } 6194 // match: (MULW x (MOVDconst [c])) 6195 // cond: isPowerOfTwo(c) 6196 // result: (SLLconst [log2(c)] x) 6197 for { 6198 _ = v.Args[1] 6199 x := v.Args[0] 6200 v_1 := v.Args[1] 6201 if v_1.Op != OpARM64MOVDconst { 6202 break 6203 } 6204 c := v_1.AuxInt 6205 if !(isPowerOfTwo(c)) { 6206 break 6207 } 6208 v.reset(OpARM64SLLconst) 6209 v.AuxInt = log2(c) 6210 v.AddArg(x) 6211 return true 6212 } 6213 // match: (MULW (MOVDconst [c]) x) 6214 // cond: isPowerOfTwo(c) 6215 // result: (SLLconst [log2(c)] x) 6216 for { 6217 _ = v.Args[1] 6218 v_0 := v.Args[0] 6219 if v_0.Op != OpARM64MOVDconst { 6220 break 6221 } 6222 c := v_0.AuxInt 6223 x := v.Args[1] 6224 if !(isPowerOfTwo(c)) { 6225 break 6226 } 6227 v.reset(OpARM64SLLconst) 6228 v.AuxInt = log2(c) 6229 v.AddArg(x) 6230 return true 6231 } 6232 // match: (MULW x (MOVDconst [c])) 6233 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 6234 // result: (ADDshiftLL x x [log2(c-1)]) 6235 for { 6236 _ = v.Args[1] 6237 x := v.Args[0] 6238 v_1 := v.Args[1] 6239 if v_1.Op != OpARM64MOVDconst { 6240 break 6241 } 6242 c := v_1.AuxInt 6243 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 6244 break 6245 } 6246 v.reset(OpARM64ADDshiftLL) 6247 v.AuxInt = log2(c - 1) 6248 v.AddArg(x) 6249 v.AddArg(x) 6250 return true 6251 } 6252 // match: (MULW (MOVDconst [c]) x) 6253 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 6254 // result: (ADDshiftLL x x [log2(c-1)]) 6255 for { 6256 _ = v.Args[1] 6257 v_0 := v.Args[0] 6258 if v_0.Op != OpARM64MOVDconst { 6259 break 6260 } 6261 c := v_0.AuxInt 6262 x := v.Args[1] 6263 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 6264 break 6265 } 6266 v.reset(OpARM64ADDshiftLL) 6267 v.AuxInt = log2(c - 1) 6268 v.AddArg(x) 6269 v.AddArg(x) 6270 return true 6271 } 6272 return false 6273 } 6274 func rewriteValueARM64_OpARM64MULW_10(v *Value) bool { 6275 b := v.Block 6276 _ = b 6277 // match: (MULW x (MOVDconst [c])) 6278 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 6279 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 6280 for { 6281 _ = v.Args[1] 6282 x := v.Args[0] 6283 v_1 := v.Args[1] 6284 if v_1.Op != OpARM64MOVDconst { 6285 break 6286 } 6287 c := v_1.AuxInt 6288 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 6289 break 6290 } 6291 v.reset(OpARM64ADDshiftLL) 6292 v.AuxInt = log2(c + 1) 6293 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 6294 v0.AddArg(x) 6295 v.AddArg(v0) 6296 v.AddArg(x) 6297 return true 6298 } 6299 // match: (MULW (MOVDconst [c]) x) 6300 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 6301 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 6302 for { 6303 _ = v.Args[1] 6304 v_0 := v.Args[0] 6305 if v_0.Op != OpARM64MOVDconst { 6306 break 6307 } 6308 c := v_0.AuxInt 6309 x := v.Args[1] 6310 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 6311 break 6312 } 6313 v.reset(OpARM64ADDshiftLL) 6314 v.AuxInt = log2(c + 1) 6315 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 6316 v0.AddArg(x) 6317 v.AddArg(v0) 6318 v.AddArg(x) 6319 return true 6320 } 6321 // match: (MULW x (MOVDconst [c])) 6322 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 6323 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 6324 for { 6325 _ = v.Args[1] 6326 x := v.Args[0] 6327 v_1 := v.Args[1] 6328 if v_1.Op != OpARM64MOVDconst { 6329 break 6330 } 6331 c := v_1.AuxInt 6332 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 6333 break 6334 } 6335 v.reset(OpARM64SLLconst) 6336 v.AuxInt = log2(c / 3) 6337 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6338 v0.AuxInt = 1 6339 v0.AddArg(x) 6340 v0.AddArg(x) 6341 v.AddArg(v0) 6342 return true 6343 } 6344 // match: (MULW (MOVDconst [c]) x) 6345 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 6346 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 6347 for { 6348 _ = v.Args[1] 6349 v_0 := v.Args[0] 6350 if v_0.Op != OpARM64MOVDconst { 6351 break 6352 } 6353 c := v_0.AuxInt 6354 x := v.Args[1] 6355 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 6356 break 6357 } 6358 v.reset(OpARM64SLLconst) 6359 v.AuxInt = log2(c / 3) 6360 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6361 v0.AuxInt = 1 6362 v0.AddArg(x) 6363 v0.AddArg(x) 6364 v.AddArg(v0) 6365 return true 6366 } 6367 // match: (MULW x (MOVDconst [c])) 6368 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 6369 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 6370 for { 6371 _ = v.Args[1] 6372 x := v.Args[0] 6373 v_1 := v.Args[1] 6374 if v_1.Op != OpARM64MOVDconst { 6375 break 6376 } 6377 c := v_1.AuxInt 6378 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 6379 break 6380 } 6381 v.reset(OpARM64SLLconst) 6382 v.AuxInt = log2(c / 5) 6383 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6384 v0.AuxInt = 2 6385 v0.AddArg(x) 6386 v0.AddArg(x) 6387 v.AddArg(v0) 6388 return true 6389 } 6390 // match: (MULW (MOVDconst [c]) x) 6391 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 6392 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 6393 for { 6394 _ = v.Args[1] 6395 v_0 := v.Args[0] 6396 if v_0.Op != OpARM64MOVDconst { 6397 break 6398 } 6399 c := v_0.AuxInt 6400 x := v.Args[1] 6401 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 6402 break 6403 } 6404 v.reset(OpARM64SLLconst) 6405 v.AuxInt = log2(c / 5) 6406 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6407 v0.AuxInt = 2 6408 v0.AddArg(x) 6409 v0.AddArg(x) 6410 v.AddArg(v0) 6411 return true 6412 } 6413 // match: (MULW x (MOVDconst [c])) 6414 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 6415 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 6416 for { 6417 _ = v.Args[1] 6418 x := v.Args[0] 6419 v_1 := v.Args[1] 6420 if v_1.Op != OpARM64MOVDconst { 6421 break 6422 } 6423 c := v_1.AuxInt 6424 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 6425 break 6426 } 6427 v.reset(OpARM64SLLconst) 6428 v.AuxInt = log2(c / 7) 6429 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6430 v0.AuxInt = 3 6431 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 6432 v1.AddArg(x) 6433 v0.AddArg(v1) 6434 v0.AddArg(x) 6435 v.AddArg(v0) 6436 return true 6437 } 6438 // match: (MULW (MOVDconst [c]) x) 6439 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 6440 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 6441 for { 6442 _ = v.Args[1] 6443 v_0 := v.Args[0] 6444 if v_0.Op != OpARM64MOVDconst { 6445 break 6446 } 6447 c := v_0.AuxInt 6448 x := v.Args[1] 6449 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 6450 break 6451 } 6452 v.reset(OpARM64SLLconst) 6453 v.AuxInt = log2(c / 7) 6454 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6455 v0.AuxInt = 3 6456 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 6457 v1.AddArg(x) 6458 v0.AddArg(v1) 6459 v0.AddArg(x) 6460 v.AddArg(v0) 6461 return true 6462 } 6463 // match: (MULW x (MOVDconst [c])) 6464 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 6465 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 6466 for { 6467 _ = v.Args[1] 6468 x := v.Args[0] 6469 v_1 := v.Args[1] 6470 if v_1.Op != OpARM64MOVDconst { 6471 break 6472 } 6473 c := v_1.AuxInt 6474 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 6475 break 6476 } 6477 v.reset(OpARM64SLLconst) 6478 v.AuxInt = log2(c / 9) 6479 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6480 v0.AuxInt = 3 6481 v0.AddArg(x) 6482 v0.AddArg(x) 6483 v.AddArg(v0) 6484 return true 6485 } 6486 // match: (MULW (MOVDconst [c]) x) 6487 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 6488 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 6489 for { 6490 _ = v.Args[1] 6491 v_0 := v.Args[0] 6492 if v_0.Op != OpARM64MOVDconst { 6493 break 6494 } 6495 c := v_0.AuxInt 6496 x := v.Args[1] 6497 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 6498 break 6499 } 6500 v.reset(OpARM64SLLconst) 6501 v.AuxInt = log2(c / 9) 6502 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6503 v0.AuxInt = 3 6504 v0.AddArg(x) 6505 v0.AddArg(x) 6506 v.AddArg(v0) 6507 return true 6508 } 6509 return false 6510 } 6511 func rewriteValueARM64_OpARM64MULW_20(v *Value) bool { 6512 // match: (MULW (MOVDconst [c]) (MOVDconst [d])) 6513 // cond: 6514 // result: (MOVDconst [int64(int32(c)*int32(d))]) 6515 for { 6516 _ = v.Args[1] 6517 v_0 := v.Args[0] 6518 if v_0.Op != OpARM64MOVDconst { 6519 break 6520 } 6521 c := v_0.AuxInt 6522 v_1 := v.Args[1] 6523 if v_1.Op != OpARM64MOVDconst { 6524 break 6525 } 6526 d := v_1.AuxInt 6527 v.reset(OpARM64MOVDconst) 6528 v.AuxInt = int64(int32(c) * int32(d)) 6529 return true 6530 } 6531 // match: (MULW (MOVDconst [d]) (MOVDconst [c])) 6532 // cond: 6533 // result: (MOVDconst [int64(int32(c)*int32(d))]) 6534 for { 6535 _ = v.Args[1] 6536 v_0 := v.Args[0] 6537 if v_0.Op != OpARM64MOVDconst { 6538 break 6539 } 6540 d := v_0.AuxInt 6541 v_1 := v.Args[1] 6542 if v_1.Op != OpARM64MOVDconst { 6543 break 6544 } 6545 c := v_1.AuxInt 6546 v.reset(OpARM64MOVDconst) 6547 v.AuxInt = int64(int32(c) * int32(d)) 6548 return true 6549 } 6550 return false 6551 } 6552 func rewriteValueARM64_OpARM64MVN_0(v *Value) bool { 6553 // match: (MVN (MOVDconst [c])) 6554 // cond: 6555 // result: (MOVDconst [^c]) 6556 for { 6557 v_0 := v.Args[0] 6558 if v_0.Op != OpARM64MOVDconst { 6559 break 6560 } 6561 c := v_0.AuxInt 6562 v.reset(OpARM64MOVDconst) 6563 v.AuxInt = ^c 6564 return true 6565 } 6566 return false 6567 } 6568 func rewriteValueARM64_OpARM64NEG_0(v *Value) bool { 6569 // match: (NEG (MOVDconst [c])) 6570 // cond: 6571 // result: (MOVDconst [-c]) 6572 for { 6573 v_0 := v.Args[0] 6574 if v_0.Op != OpARM64MOVDconst { 6575 break 6576 } 6577 c := v_0.AuxInt 6578 v.reset(OpARM64MOVDconst) 6579 v.AuxInt = -c 6580 return true 6581 } 6582 return false 6583 } 6584 func rewriteValueARM64_OpARM64NotEqual_0(v *Value) bool { 6585 // match: (NotEqual (FlagEQ)) 6586 // cond: 6587 // result: (MOVDconst [0]) 6588 for { 6589 v_0 := v.Args[0] 6590 if v_0.Op != OpARM64FlagEQ { 6591 break 6592 } 6593 v.reset(OpARM64MOVDconst) 6594 v.AuxInt = 0 6595 return true 6596 } 6597 // match: (NotEqual (FlagLT_ULT)) 6598 // cond: 6599 // result: (MOVDconst [1]) 6600 for { 6601 v_0 := v.Args[0] 6602 if v_0.Op != OpARM64FlagLT_ULT { 6603 break 6604 } 6605 v.reset(OpARM64MOVDconst) 6606 v.AuxInt = 1 6607 return true 6608 } 6609 // match: (NotEqual (FlagLT_UGT)) 6610 // cond: 6611 // result: (MOVDconst [1]) 6612 for { 6613 v_0 := v.Args[0] 6614 if v_0.Op != OpARM64FlagLT_UGT { 6615 break 6616 } 6617 v.reset(OpARM64MOVDconst) 6618 v.AuxInt = 1 6619 return true 6620 } 6621 // match: (NotEqual (FlagGT_ULT)) 6622 // cond: 6623 // result: (MOVDconst [1]) 6624 for { 6625 v_0 := v.Args[0] 6626 if v_0.Op != OpARM64FlagGT_ULT { 6627 break 6628 } 6629 v.reset(OpARM64MOVDconst) 6630 v.AuxInt = 1 6631 return true 6632 } 6633 // match: (NotEqual (FlagGT_UGT)) 6634 // cond: 6635 // result: (MOVDconst [1]) 6636 for { 6637 v_0 := v.Args[0] 6638 if v_0.Op != OpARM64FlagGT_UGT { 6639 break 6640 } 6641 v.reset(OpARM64MOVDconst) 6642 v.AuxInt = 1 6643 return true 6644 } 6645 // match: (NotEqual (InvertFlags x)) 6646 // cond: 6647 // result: (NotEqual x) 6648 for { 6649 v_0 := v.Args[0] 6650 if v_0.Op != OpARM64InvertFlags { 6651 break 6652 } 6653 x := v_0.Args[0] 6654 v.reset(OpARM64NotEqual) 6655 v.AddArg(x) 6656 return true 6657 } 6658 return false 6659 } 6660 func rewriteValueARM64_OpARM64OR_0(v *Value) bool { 6661 b := v.Block 6662 _ = b 6663 // match: (OR x (MOVDconst [c])) 6664 // cond: 6665 // result: (ORconst [c] x) 6666 for { 6667 _ = v.Args[1] 6668 x := v.Args[0] 6669 v_1 := v.Args[1] 6670 if v_1.Op != OpARM64MOVDconst { 6671 break 6672 } 6673 c := v_1.AuxInt 6674 v.reset(OpARM64ORconst) 6675 v.AuxInt = c 6676 v.AddArg(x) 6677 return true 6678 } 6679 // match: (OR (MOVDconst [c]) x) 6680 // cond: 6681 // result: (ORconst [c] x) 6682 for { 6683 _ = v.Args[1] 6684 v_0 := v.Args[0] 6685 if v_0.Op != OpARM64MOVDconst { 6686 break 6687 } 6688 c := v_0.AuxInt 6689 x := v.Args[1] 6690 v.reset(OpARM64ORconst) 6691 v.AuxInt = c 6692 v.AddArg(x) 6693 return true 6694 } 6695 // match: (OR x x) 6696 // cond: 6697 // result: x 6698 for { 6699 _ = v.Args[1] 6700 x := v.Args[0] 6701 if x != v.Args[1] { 6702 break 6703 } 6704 v.reset(OpCopy) 6705 v.Type = x.Type 6706 v.AddArg(x) 6707 return true 6708 } 6709 // match: (OR x (SLLconst [c] y)) 6710 // cond: 6711 // result: (ORshiftLL x y [c]) 6712 for { 6713 _ = v.Args[1] 6714 x := v.Args[0] 6715 v_1 := v.Args[1] 6716 if v_1.Op != OpARM64SLLconst { 6717 break 6718 } 6719 c := v_1.AuxInt 6720 y := v_1.Args[0] 6721 v.reset(OpARM64ORshiftLL) 6722 v.AuxInt = c 6723 v.AddArg(x) 6724 v.AddArg(y) 6725 return true 6726 } 6727 // match: (OR (SLLconst [c] y) x) 6728 // cond: 6729 // result: (ORshiftLL x y [c]) 6730 for { 6731 _ = v.Args[1] 6732 v_0 := v.Args[0] 6733 if v_0.Op != OpARM64SLLconst { 6734 break 6735 } 6736 c := v_0.AuxInt 6737 y := v_0.Args[0] 6738 x := v.Args[1] 6739 v.reset(OpARM64ORshiftLL) 6740 v.AuxInt = c 6741 v.AddArg(x) 6742 v.AddArg(y) 6743 return true 6744 } 6745 // match: (OR x (SRLconst [c] y)) 6746 // cond: 6747 // result: (ORshiftRL x y [c]) 6748 for { 6749 _ = v.Args[1] 6750 x := v.Args[0] 6751 v_1 := v.Args[1] 6752 if v_1.Op != OpARM64SRLconst { 6753 break 6754 } 6755 c := v_1.AuxInt 6756 y := v_1.Args[0] 6757 v.reset(OpARM64ORshiftRL) 6758 v.AuxInt = c 6759 v.AddArg(x) 6760 v.AddArg(y) 6761 return true 6762 } 6763 // match: (OR (SRLconst [c] y) x) 6764 // cond: 6765 // result: (ORshiftRL x y [c]) 6766 for { 6767 _ = v.Args[1] 6768 v_0 := v.Args[0] 6769 if v_0.Op != OpARM64SRLconst { 6770 break 6771 } 6772 c := v_0.AuxInt 6773 y := v_0.Args[0] 6774 x := v.Args[1] 6775 v.reset(OpARM64ORshiftRL) 6776 v.AuxInt = c 6777 v.AddArg(x) 6778 v.AddArg(y) 6779 return true 6780 } 6781 // match: (OR x (SRAconst [c] y)) 6782 // cond: 6783 // result: (ORshiftRA x y [c]) 6784 for { 6785 _ = v.Args[1] 6786 x := v.Args[0] 6787 v_1 := v.Args[1] 6788 if v_1.Op != OpARM64SRAconst { 6789 break 6790 } 6791 c := v_1.AuxInt 6792 y := v_1.Args[0] 6793 v.reset(OpARM64ORshiftRA) 6794 v.AuxInt = c 6795 v.AddArg(x) 6796 v.AddArg(y) 6797 return true 6798 } 6799 // match: (OR (SRAconst [c] y) x) 6800 // cond: 6801 // result: (ORshiftRA x y [c]) 6802 for { 6803 _ = v.Args[1] 6804 v_0 := v.Args[0] 6805 if v_0.Op != OpARM64SRAconst { 6806 break 6807 } 6808 c := v_0.AuxInt 6809 y := v_0.Args[0] 6810 x := v.Args[1] 6811 v.reset(OpARM64ORshiftRA) 6812 v.AuxInt = c 6813 v.AddArg(x) 6814 v.AddArg(y) 6815 return true 6816 } 6817 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i1] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i0] {s} p mem))) 6818 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 6819 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 6820 for { 6821 t := v.Type 6822 _ = v.Args[1] 6823 o0 := v.Args[0] 6824 if o0.Op != OpARM64ORshiftLL { 6825 break 6826 } 6827 if o0.AuxInt != 8 { 6828 break 6829 } 6830 _ = o0.Args[1] 6831 o1 := o0.Args[0] 6832 if o1.Op != OpARM64ORshiftLL { 6833 break 6834 } 6835 if o1.AuxInt != 16 { 6836 break 6837 } 6838 _ = o1.Args[1] 6839 s0 := o1.Args[0] 6840 if s0.Op != OpARM64SLLconst { 6841 break 6842 } 6843 if s0.AuxInt != 24 { 6844 break 6845 } 6846 y0 := s0.Args[0] 6847 if y0.Op != OpARM64MOVDnop { 6848 break 6849 } 6850 x0 := y0.Args[0] 6851 if x0.Op != OpARM64MOVBUload { 6852 break 6853 } 6854 i3 := x0.AuxInt 6855 s := x0.Aux 6856 _ = x0.Args[1] 6857 p := x0.Args[0] 6858 mem := x0.Args[1] 6859 y1 := o1.Args[1] 6860 if y1.Op != OpARM64MOVDnop { 6861 break 6862 } 6863 x1 := y1.Args[0] 6864 if x1.Op != OpARM64MOVBUload { 6865 break 6866 } 6867 i2 := x1.AuxInt 6868 if x1.Aux != s { 6869 break 6870 } 6871 _ = x1.Args[1] 6872 if p != x1.Args[0] { 6873 break 6874 } 6875 if mem != x1.Args[1] { 6876 break 6877 } 6878 y2 := o0.Args[1] 6879 if y2.Op != OpARM64MOVDnop { 6880 break 6881 } 6882 x2 := y2.Args[0] 6883 if x2.Op != OpARM64MOVBUload { 6884 break 6885 } 6886 i1 := x2.AuxInt 6887 if x2.Aux != s { 6888 break 6889 } 6890 _ = x2.Args[1] 6891 if p != x2.Args[0] { 6892 break 6893 } 6894 if mem != x2.Args[1] { 6895 break 6896 } 6897 y3 := v.Args[1] 6898 if y3.Op != OpARM64MOVDnop { 6899 break 6900 } 6901 x3 := y3.Args[0] 6902 if x3.Op != OpARM64MOVBUload { 6903 break 6904 } 6905 i0 := x3.AuxInt 6906 if x3.Aux != s { 6907 break 6908 } 6909 _ = x3.Args[1] 6910 if p != x3.Args[0] { 6911 break 6912 } 6913 if mem != x3.Args[1] { 6914 break 6915 } 6916 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 6917 break 6918 } 6919 b = mergePoint(b, x0, x1, x2, x3) 6920 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 6921 v.reset(OpCopy) 6922 v.AddArg(v0) 6923 v0.Aux = s 6924 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 6925 v1.AuxInt = i0 6926 v1.AddArg(p) 6927 v0.AddArg(v1) 6928 v0.AddArg(mem) 6929 return true 6930 } 6931 return false 6932 } 6933 func rewriteValueARM64_OpARM64OR_10(v *Value) bool { 6934 b := v.Block 6935 _ = b 6936 // match: (OR <t> y3:(MOVDnop x3:(MOVBUload [i0] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i1] {s} p mem)))) 6937 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 6938 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 6939 for { 6940 t := v.Type 6941 _ = v.Args[1] 6942 y3 := v.Args[0] 6943 if y3.Op != OpARM64MOVDnop { 6944 break 6945 } 6946 x3 := y3.Args[0] 6947 if x3.Op != OpARM64MOVBUload { 6948 break 6949 } 6950 i0 := x3.AuxInt 6951 s := x3.Aux 6952 _ = x3.Args[1] 6953 p := x3.Args[0] 6954 mem := x3.Args[1] 6955 o0 := v.Args[1] 6956 if o0.Op != OpARM64ORshiftLL { 6957 break 6958 } 6959 if o0.AuxInt != 8 { 6960 break 6961 } 6962 _ = o0.Args[1] 6963 o1 := o0.Args[0] 6964 if o1.Op != OpARM64ORshiftLL { 6965 break 6966 } 6967 if o1.AuxInt != 16 { 6968 break 6969 } 6970 _ = o1.Args[1] 6971 s0 := o1.Args[0] 6972 if s0.Op != OpARM64SLLconst { 6973 break 6974 } 6975 if s0.AuxInt != 24 { 6976 break 6977 } 6978 y0 := s0.Args[0] 6979 if y0.Op != OpARM64MOVDnop { 6980 break 6981 } 6982 x0 := y0.Args[0] 6983 if x0.Op != OpARM64MOVBUload { 6984 break 6985 } 6986 i3 := x0.AuxInt 6987 if x0.Aux != s { 6988 break 6989 } 6990 _ = x0.Args[1] 6991 if p != x0.Args[0] { 6992 break 6993 } 6994 if mem != x0.Args[1] { 6995 break 6996 } 6997 y1 := o1.Args[1] 6998 if y1.Op != OpARM64MOVDnop { 6999 break 7000 } 7001 x1 := y1.Args[0] 7002 if x1.Op != OpARM64MOVBUload { 7003 break 7004 } 7005 i2 := x1.AuxInt 7006 if x1.Aux != s { 7007 break 7008 } 7009 _ = x1.Args[1] 7010 if p != x1.Args[0] { 7011 break 7012 } 7013 if mem != x1.Args[1] { 7014 break 7015 } 7016 y2 := o0.Args[1] 7017 if y2.Op != OpARM64MOVDnop { 7018 break 7019 } 7020 x2 := y2.Args[0] 7021 if x2.Op != OpARM64MOVBUload { 7022 break 7023 } 7024 i1 := x2.AuxInt 7025 if x2.Aux != s { 7026 break 7027 } 7028 _ = x2.Args[1] 7029 if p != x2.Args[0] { 7030 break 7031 } 7032 if mem != x2.Args[1] { 7033 break 7034 } 7035 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 7036 break 7037 } 7038 b = mergePoint(b, x0, x1, x2, x3) 7039 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 7040 v.reset(OpCopy) 7041 v.AddArg(v0) 7042 v0.Aux = s 7043 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7044 v1.AuxInt = i0 7045 v1.AddArg(p) 7046 v0.AddArg(v1) 7047 v0.AddArg(mem) 7048 return true 7049 } 7050 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem))) 7051 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 7052 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 7053 for { 7054 t := v.Type 7055 _ = v.Args[1] 7056 o0 := v.Args[0] 7057 if o0.Op != OpARM64ORshiftLL { 7058 break 7059 } 7060 if o0.AuxInt != 8 { 7061 break 7062 } 7063 _ = o0.Args[1] 7064 o1 := o0.Args[0] 7065 if o1.Op != OpARM64ORshiftLL { 7066 break 7067 } 7068 if o1.AuxInt != 16 { 7069 break 7070 } 7071 _ = o1.Args[1] 7072 o2 := o1.Args[0] 7073 if o2.Op != OpARM64ORshiftLL { 7074 break 7075 } 7076 if o2.AuxInt != 24 { 7077 break 7078 } 7079 _ = o2.Args[1] 7080 o3 := o2.Args[0] 7081 if o3.Op != OpARM64ORshiftLL { 7082 break 7083 } 7084 if o3.AuxInt != 32 { 7085 break 7086 } 7087 _ = o3.Args[1] 7088 o4 := o3.Args[0] 7089 if o4.Op != OpARM64ORshiftLL { 7090 break 7091 } 7092 if o4.AuxInt != 40 { 7093 break 7094 } 7095 _ = o4.Args[1] 7096 o5 := o4.Args[0] 7097 if o5.Op != OpARM64ORshiftLL { 7098 break 7099 } 7100 if o5.AuxInt != 48 { 7101 break 7102 } 7103 _ = o5.Args[1] 7104 s0 := o5.Args[0] 7105 if s0.Op != OpARM64SLLconst { 7106 break 7107 } 7108 if s0.AuxInt != 56 { 7109 break 7110 } 7111 y0 := s0.Args[0] 7112 if y0.Op != OpARM64MOVDnop { 7113 break 7114 } 7115 x0 := y0.Args[0] 7116 if x0.Op != OpARM64MOVBUload { 7117 break 7118 } 7119 i7 := x0.AuxInt 7120 s := x0.Aux 7121 _ = x0.Args[1] 7122 p := x0.Args[0] 7123 mem := x0.Args[1] 7124 y1 := o5.Args[1] 7125 if y1.Op != OpARM64MOVDnop { 7126 break 7127 } 7128 x1 := y1.Args[0] 7129 if x1.Op != OpARM64MOVBUload { 7130 break 7131 } 7132 i6 := x1.AuxInt 7133 if x1.Aux != s { 7134 break 7135 } 7136 _ = x1.Args[1] 7137 if p != x1.Args[0] { 7138 break 7139 } 7140 if mem != x1.Args[1] { 7141 break 7142 } 7143 y2 := o4.Args[1] 7144 if y2.Op != OpARM64MOVDnop { 7145 break 7146 } 7147 x2 := y2.Args[0] 7148 if x2.Op != OpARM64MOVBUload { 7149 break 7150 } 7151 i5 := x2.AuxInt 7152 if x2.Aux != s { 7153 break 7154 } 7155 _ = x2.Args[1] 7156 if p != x2.Args[0] { 7157 break 7158 } 7159 if mem != x2.Args[1] { 7160 break 7161 } 7162 y3 := o3.Args[1] 7163 if y3.Op != OpARM64MOVDnop { 7164 break 7165 } 7166 x3 := y3.Args[0] 7167 if x3.Op != OpARM64MOVBUload { 7168 break 7169 } 7170 i4 := x3.AuxInt 7171 if x3.Aux != s { 7172 break 7173 } 7174 _ = x3.Args[1] 7175 if p != x3.Args[0] { 7176 break 7177 } 7178 if mem != x3.Args[1] { 7179 break 7180 } 7181 y4 := o2.Args[1] 7182 if y4.Op != OpARM64MOVDnop { 7183 break 7184 } 7185 x4 := y4.Args[0] 7186 if x4.Op != OpARM64MOVBUload { 7187 break 7188 } 7189 i3 := x4.AuxInt 7190 if x4.Aux != s { 7191 break 7192 } 7193 _ = x4.Args[1] 7194 if p != x4.Args[0] { 7195 break 7196 } 7197 if mem != x4.Args[1] { 7198 break 7199 } 7200 y5 := o1.Args[1] 7201 if y5.Op != OpARM64MOVDnop { 7202 break 7203 } 7204 x5 := y5.Args[0] 7205 if x5.Op != OpARM64MOVBUload { 7206 break 7207 } 7208 i2 := x5.AuxInt 7209 if x5.Aux != s { 7210 break 7211 } 7212 _ = x5.Args[1] 7213 if p != x5.Args[0] { 7214 break 7215 } 7216 if mem != x5.Args[1] { 7217 break 7218 } 7219 y6 := o0.Args[1] 7220 if y6.Op != OpARM64MOVDnop { 7221 break 7222 } 7223 x6 := y6.Args[0] 7224 if x6.Op != OpARM64MOVBUload { 7225 break 7226 } 7227 i1 := x6.AuxInt 7228 if x6.Aux != s { 7229 break 7230 } 7231 _ = x6.Args[1] 7232 if p != x6.Args[0] { 7233 break 7234 } 7235 if mem != x6.Args[1] { 7236 break 7237 } 7238 y7 := v.Args[1] 7239 if y7.Op != OpARM64MOVDnop { 7240 break 7241 } 7242 x7 := y7.Args[0] 7243 if x7.Op != OpARM64MOVBUload { 7244 break 7245 } 7246 i0 := x7.AuxInt 7247 if x7.Aux != s { 7248 break 7249 } 7250 _ = x7.Args[1] 7251 if p != x7.Args[0] { 7252 break 7253 } 7254 if mem != x7.Args[1] { 7255 break 7256 } 7257 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 7258 break 7259 } 7260 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 7261 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 7262 v.reset(OpCopy) 7263 v.AddArg(v0) 7264 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 7265 v1.Aux = s 7266 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7267 v2.AuxInt = i0 7268 v2.AddArg(p) 7269 v1.AddArg(v2) 7270 v1.AddArg(mem) 7271 v0.AddArg(v1) 7272 return true 7273 } 7274 // match: (OR <t> y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem)))) 7275 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 7276 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 7277 for { 7278 t := v.Type 7279 _ = v.Args[1] 7280 y7 := v.Args[0] 7281 if y7.Op != OpARM64MOVDnop { 7282 break 7283 } 7284 x7 := y7.Args[0] 7285 if x7.Op != OpARM64MOVBUload { 7286 break 7287 } 7288 i0 := x7.AuxInt 7289 s := x7.Aux 7290 _ = x7.Args[1] 7291 p := x7.Args[0] 7292 mem := x7.Args[1] 7293 o0 := v.Args[1] 7294 if o0.Op != OpARM64ORshiftLL { 7295 break 7296 } 7297 if o0.AuxInt != 8 { 7298 break 7299 } 7300 _ = o0.Args[1] 7301 o1 := o0.Args[0] 7302 if o1.Op != OpARM64ORshiftLL { 7303 break 7304 } 7305 if o1.AuxInt != 16 { 7306 break 7307 } 7308 _ = o1.Args[1] 7309 o2 := o1.Args[0] 7310 if o2.Op != OpARM64ORshiftLL { 7311 break 7312 } 7313 if o2.AuxInt != 24 { 7314 break 7315 } 7316 _ = o2.Args[1] 7317 o3 := o2.Args[0] 7318 if o3.Op != OpARM64ORshiftLL { 7319 break 7320 } 7321 if o3.AuxInt != 32 { 7322 break 7323 } 7324 _ = o3.Args[1] 7325 o4 := o3.Args[0] 7326 if o4.Op != OpARM64ORshiftLL { 7327 break 7328 } 7329 if o4.AuxInt != 40 { 7330 break 7331 } 7332 _ = o4.Args[1] 7333 o5 := o4.Args[0] 7334 if o5.Op != OpARM64ORshiftLL { 7335 break 7336 } 7337 if o5.AuxInt != 48 { 7338 break 7339 } 7340 _ = o5.Args[1] 7341 s0 := o5.Args[0] 7342 if s0.Op != OpARM64SLLconst { 7343 break 7344 } 7345 if s0.AuxInt != 56 { 7346 break 7347 } 7348 y0 := s0.Args[0] 7349 if y0.Op != OpARM64MOVDnop { 7350 break 7351 } 7352 x0 := y0.Args[0] 7353 if x0.Op != OpARM64MOVBUload { 7354 break 7355 } 7356 i7 := x0.AuxInt 7357 if x0.Aux != s { 7358 break 7359 } 7360 _ = x0.Args[1] 7361 if p != x0.Args[0] { 7362 break 7363 } 7364 if mem != x0.Args[1] { 7365 break 7366 } 7367 y1 := o5.Args[1] 7368 if y1.Op != OpARM64MOVDnop { 7369 break 7370 } 7371 x1 := y1.Args[0] 7372 if x1.Op != OpARM64MOVBUload { 7373 break 7374 } 7375 i6 := x1.AuxInt 7376 if x1.Aux != s { 7377 break 7378 } 7379 _ = x1.Args[1] 7380 if p != x1.Args[0] { 7381 break 7382 } 7383 if mem != x1.Args[1] { 7384 break 7385 } 7386 y2 := o4.Args[1] 7387 if y2.Op != OpARM64MOVDnop { 7388 break 7389 } 7390 x2 := y2.Args[0] 7391 if x2.Op != OpARM64MOVBUload { 7392 break 7393 } 7394 i5 := x2.AuxInt 7395 if x2.Aux != s { 7396 break 7397 } 7398 _ = x2.Args[1] 7399 if p != x2.Args[0] { 7400 break 7401 } 7402 if mem != x2.Args[1] { 7403 break 7404 } 7405 y3 := o3.Args[1] 7406 if y3.Op != OpARM64MOVDnop { 7407 break 7408 } 7409 x3 := y3.Args[0] 7410 if x3.Op != OpARM64MOVBUload { 7411 break 7412 } 7413 i4 := x3.AuxInt 7414 if x3.Aux != s { 7415 break 7416 } 7417 _ = x3.Args[1] 7418 if p != x3.Args[0] { 7419 break 7420 } 7421 if mem != x3.Args[1] { 7422 break 7423 } 7424 y4 := o2.Args[1] 7425 if y4.Op != OpARM64MOVDnop { 7426 break 7427 } 7428 x4 := y4.Args[0] 7429 if x4.Op != OpARM64MOVBUload { 7430 break 7431 } 7432 i3 := x4.AuxInt 7433 if x4.Aux != s { 7434 break 7435 } 7436 _ = x4.Args[1] 7437 if p != x4.Args[0] { 7438 break 7439 } 7440 if mem != x4.Args[1] { 7441 break 7442 } 7443 y5 := o1.Args[1] 7444 if y5.Op != OpARM64MOVDnop { 7445 break 7446 } 7447 x5 := y5.Args[0] 7448 if x5.Op != OpARM64MOVBUload { 7449 break 7450 } 7451 i2 := x5.AuxInt 7452 if x5.Aux != s { 7453 break 7454 } 7455 _ = x5.Args[1] 7456 if p != x5.Args[0] { 7457 break 7458 } 7459 if mem != x5.Args[1] { 7460 break 7461 } 7462 y6 := o0.Args[1] 7463 if y6.Op != OpARM64MOVDnop { 7464 break 7465 } 7466 x6 := y6.Args[0] 7467 if x6.Op != OpARM64MOVBUload { 7468 break 7469 } 7470 i1 := x6.AuxInt 7471 if x6.Aux != s { 7472 break 7473 } 7474 _ = x6.Args[1] 7475 if p != x6.Args[0] { 7476 break 7477 } 7478 if mem != x6.Args[1] { 7479 break 7480 } 7481 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 7482 break 7483 } 7484 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 7485 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 7486 v.reset(OpCopy) 7487 v.AddArg(v0) 7488 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 7489 v1.Aux = s 7490 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7491 v2.AuxInt = i0 7492 v2.AddArg(p) 7493 v1.AddArg(v2) 7494 v1.AddArg(mem) 7495 v0.AddArg(v1) 7496 return true 7497 } 7498 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) 7499 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 7500 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 7501 for { 7502 t := v.Type 7503 _ = v.Args[1] 7504 o0 := v.Args[0] 7505 if o0.Op != OpARM64ORshiftLL { 7506 break 7507 } 7508 if o0.AuxInt != 8 { 7509 break 7510 } 7511 _ = o0.Args[1] 7512 o1 := o0.Args[0] 7513 if o1.Op != OpARM64ORshiftLL { 7514 break 7515 } 7516 if o1.AuxInt != 16 { 7517 break 7518 } 7519 _ = o1.Args[1] 7520 s0 := o1.Args[0] 7521 if s0.Op != OpARM64SLLconst { 7522 break 7523 } 7524 if s0.AuxInt != 24 { 7525 break 7526 } 7527 y0 := s0.Args[0] 7528 if y0.Op != OpARM64MOVDnop { 7529 break 7530 } 7531 x0 := y0.Args[0] 7532 if x0.Op != OpARM64MOVBUload { 7533 break 7534 } 7535 i0 := x0.AuxInt 7536 s := x0.Aux 7537 _ = x0.Args[1] 7538 p := x0.Args[0] 7539 mem := x0.Args[1] 7540 y1 := o1.Args[1] 7541 if y1.Op != OpARM64MOVDnop { 7542 break 7543 } 7544 x1 := y1.Args[0] 7545 if x1.Op != OpARM64MOVBUload { 7546 break 7547 } 7548 i1 := x1.AuxInt 7549 if x1.Aux != s { 7550 break 7551 } 7552 _ = x1.Args[1] 7553 if p != x1.Args[0] { 7554 break 7555 } 7556 if mem != x1.Args[1] { 7557 break 7558 } 7559 y2 := o0.Args[1] 7560 if y2.Op != OpARM64MOVDnop { 7561 break 7562 } 7563 x2 := y2.Args[0] 7564 if x2.Op != OpARM64MOVBUload { 7565 break 7566 } 7567 i2 := x2.AuxInt 7568 if x2.Aux != s { 7569 break 7570 } 7571 _ = x2.Args[1] 7572 if p != x2.Args[0] { 7573 break 7574 } 7575 if mem != x2.Args[1] { 7576 break 7577 } 7578 y3 := v.Args[1] 7579 if y3.Op != OpARM64MOVDnop { 7580 break 7581 } 7582 x3 := y3.Args[0] 7583 if x3.Op != OpARM64MOVBUload { 7584 break 7585 } 7586 i3 := x3.AuxInt 7587 if x3.Aux != s { 7588 break 7589 } 7590 _ = x3.Args[1] 7591 if p != x3.Args[0] { 7592 break 7593 } 7594 if mem != x3.Args[1] { 7595 break 7596 } 7597 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 7598 break 7599 } 7600 b = mergePoint(b, x0, x1, x2, x3) 7601 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 7602 v.reset(OpCopy) 7603 v.AddArg(v0) 7604 v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 7605 v1.Aux = s 7606 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7607 v2.AuxInt = i0 7608 v2.AddArg(p) 7609 v1.AddArg(v2) 7610 v1.AddArg(mem) 7611 v0.AddArg(v1) 7612 return true 7613 } 7614 // match: (OR <t> y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem)))) 7615 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 7616 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 7617 for { 7618 t := v.Type 7619 _ = v.Args[1] 7620 y3 := v.Args[0] 7621 if y3.Op != OpARM64MOVDnop { 7622 break 7623 } 7624 x3 := y3.Args[0] 7625 if x3.Op != OpARM64MOVBUload { 7626 break 7627 } 7628 i3 := x3.AuxInt 7629 s := x3.Aux 7630 _ = x3.Args[1] 7631 p := x3.Args[0] 7632 mem := x3.Args[1] 7633 o0 := v.Args[1] 7634 if o0.Op != OpARM64ORshiftLL { 7635 break 7636 } 7637 if o0.AuxInt != 8 { 7638 break 7639 } 7640 _ = o0.Args[1] 7641 o1 := o0.Args[0] 7642 if o1.Op != OpARM64ORshiftLL { 7643 break 7644 } 7645 if o1.AuxInt != 16 { 7646 break 7647 } 7648 _ = o1.Args[1] 7649 s0 := o1.Args[0] 7650 if s0.Op != OpARM64SLLconst { 7651 break 7652 } 7653 if s0.AuxInt != 24 { 7654 break 7655 } 7656 y0 := s0.Args[0] 7657 if y0.Op != OpARM64MOVDnop { 7658 break 7659 } 7660 x0 := y0.Args[0] 7661 if x0.Op != OpARM64MOVBUload { 7662 break 7663 } 7664 i0 := x0.AuxInt 7665 if x0.Aux != s { 7666 break 7667 } 7668 _ = x0.Args[1] 7669 if p != x0.Args[0] { 7670 break 7671 } 7672 if mem != x0.Args[1] { 7673 break 7674 } 7675 y1 := o1.Args[1] 7676 if y1.Op != OpARM64MOVDnop { 7677 break 7678 } 7679 x1 := y1.Args[0] 7680 if x1.Op != OpARM64MOVBUload { 7681 break 7682 } 7683 i1 := x1.AuxInt 7684 if x1.Aux != s { 7685 break 7686 } 7687 _ = x1.Args[1] 7688 if p != x1.Args[0] { 7689 break 7690 } 7691 if mem != x1.Args[1] { 7692 break 7693 } 7694 y2 := o0.Args[1] 7695 if y2.Op != OpARM64MOVDnop { 7696 break 7697 } 7698 x2 := y2.Args[0] 7699 if x2.Op != OpARM64MOVBUload { 7700 break 7701 } 7702 i2 := x2.AuxInt 7703 if x2.Aux != s { 7704 break 7705 } 7706 _ = x2.Args[1] 7707 if p != x2.Args[0] { 7708 break 7709 } 7710 if mem != x2.Args[1] { 7711 break 7712 } 7713 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 7714 break 7715 } 7716 b = mergePoint(b, x0, x1, x2, x3) 7717 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 7718 v.reset(OpCopy) 7719 v.AddArg(v0) 7720 v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 7721 v1.Aux = s 7722 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7723 v2.AuxInt = i0 7724 v2.AddArg(p) 7725 v1.AddArg(v2) 7726 v1.AddArg(mem) 7727 v0.AddArg(v1) 7728 return true 7729 } 7730 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i6] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i7] {s} p mem))) 7731 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 7732 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 7733 for { 7734 t := v.Type 7735 _ = v.Args[1] 7736 o0 := v.Args[0] 7737 if o0.Op != OpARM64ORshiftLL { 7738 break 7739 } 7740 if o0.AuxInt != 8 { 7741 break 7742 } 7743 _ = o0.Args[1] 7744 o1 := o0.Args[0] 7745 if o1.Op != OpARM64ORshiftLL { 7746 break 7747 } 7748 if o1.AuxInt != 16 { 7749 break 7750 } 7751 _ = o1.Args[1] 7752 o2 := o1.Args[0] 7753 if o2.Op != OpARM64ORshiftLL { 7754 break 7755 } 7756 if o2.AuxInt != 24 { 7757 break 7758 } 7759 _ = o2.Args[1] 7760 o3 := o2.Args[0] 7761 if o3.Op != OpARM64ORshiftLL { 7762 break 7763 } 7764 if o3.AuxInt != 32 { 7765 break 7766 } 7767 _ = o3.Args[1] 7768 o4 := o3.Args[0] 7769 if o4.Op != OpARM64ORshiftLL { 7770 break 7771 } 7772 if o4.AuxInt != 40 { 7773 break 7774 } 7775 _ = o4.Args[1] 7776 o5 := o4.Args[0] 7777 if o5.Op != OpARM64ORshiftLL { 7778 break 7779 } 7780 if o5.AuxInt != 48 { 7781 break 7782 } 7783 _ = o5.Args[1] 7784 s0 := o5.Args[0] 7785 if s0.Op != OpARM64SLLconst { 7786 break 7787 } 7788 if s0.AuxInt != 56 { 7789 break 7790 } 7791 y0 := s0.Args[0] 7792 if y0.Op != OpARM64MOVDnop { 7793 break 7794 } 7795 x0 := y0.Args[0] 7796 if x0.Op != OpARM64MOVBUload { 7797 break 7798 } 7799 i0 := x0.AuxInt 7800 s := x0.Aux 7801 _ = x0.Args[1] 7802 p := x0.Args[0] 7803 mem := x0.Args[1] 7804 y1 := o5.Args[1] 7805 if y1.Op != OpARM64MOVDnop { 7806 break 7807 } 7808 x1 := y1.Args[0] 7809 if x1.Op != OpARM64MOVBUload { 7810 break 7811 } 7812 i1 := x1.AuxInt 7813 if x1.Aux != s { 7814 break 7815 } 7816 _ = x1.Args[1] 7817 if p != x1.Args[0] { 7818 break 7819 } 7820 if mem != x1.Args[1] { 7821 break 7822 } 7823 y2 := o4.Args[1] 7824 if y2.Op != OpARM64MOVDnop { 7825 break 7826 } 7827 x2 := y2.Args[0] 7828 if x2.Op != OpARM64MOVBUload { 7829 break 7830 } 7831 i2 := x2.AuxInt 7832 if x2.Aux != s { 7833 break 7834 } 7835 _ = x2.Args[1] 7836 if p != x2.Args[0] { 7837 break 7838 } 7839 if mem != x2.Args[1] { 7840 break 7841 } 7842 y3 := o3.Args[1] 7843 if y3.Op != OpARM64MOVDnop { 7844 break 7845 } 7846 x3 := y3.Args[0] 7847 if x3.Op != OpARM64MOVBUload { 7848 break 7849 } 7850 i3 := x3.AuxInt 7851 if x3.Aux != s { 7852 break 7853 } 7854 _ = x3.Args[1] 7855 if p != x3.Args[0] { 7856 break 7857 } 7858 if mem != x3.Args[1] { 7859 break 7860 } 7861 y4 := o2.Args[1] 7862 if y4.Op != OpARM64MOVDnop { 7863 break 7864 } 7865 x4 := y4.Args[0] 7866 if x4.Op != OpARM64MOVBUload { 7867 break 7868 } 7869 i4 := x4.AuxInt 7870 if x4.Aux != s { 7871 break 7872 } 7873 _ = x4.Args[1] 7874 if p != x4.Args[0] { 7875 break 7876 } 7877 if mem != x4.Args[1] { 7878 break 7879 } 7880 y5 := o1.Args[1] 7881 if y5.Op != OpARM64MOVDnop { 7882 break 7883 } 7884 x5 := y5.Args[0] 7885 if x5.Op != OpARM64MOVBUload { 7886 break 7887 } 7888 i5 := x5.AuxInt 7889 if x5.Aux != s { 7890 break 7891 } 7892 _ = x5.Args[1] 7893 if p != x5.Args[0] { 7894 break 7895 } 7896 if mem != x5.Args[1] { 7897 break 7898 } 7899 y6 := o0.Args[1] 7900 if y6.Op != OpARM64MOVDnop { 7901 break 7902 } 7903 x6 := y6.Args[0] 7904 if x6.Op != OpARM64MOVBUload { 7905 break 7906 } 7907 i6 := x6.AuxInt 7908 if x6.Aux != s { 7909 break 7910 } 7911 _ = x6.Args[1] 7912 if p != x6.Args[0] { 7913 break 7914 } 7915 if mem != x6.Args[1] { 7916 break 7917 } 7918 y7 := v.Args[1] 7919 if y7.Op != OpARM64MOVDnop { 7920 break 7921 } 7922 x7 := y7.Args[0] 7923 if x7.Op != OpARM64MOVBUload { 7924 break 7925 } 7926 i7 := x7.AuxInt 7927 if x7.Aux != s { 7928 break 7929 } 7930 _ = x7.Args[1] 7931 if p != x7.Args[0] { 7932 break 7933 } 7934 if mem != x7.Args[1] { 7935 break 7936 } 7937 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 7938 break 7939 } 7940 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 7941 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 7942 v.reset(OpCopy) 7943 v.AddArg(v0) 7944 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 7945 v1.Aux = s 7946 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7947 v2.AuxInt = i0 7948 v2.AddArg(p) 7949 v1.AddArg(v2) 7950 v1.AddArg(mem) 7951 v0.AddArg(v1) 7952 return true 7953 } 7954 // match: (OR <t> y7:(MOVDnop x7:(MOVBUload [i7] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i6] {s} p mem)))) 7955 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 7956 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 7957 for { 7958 t := v.Type 7959 _ = v.Args[1] 7960 y7 := v.Args[0] 7961 if y7.Op != OpARM64MOVDnop { 7962 break 7963 } 7964 x7 := y7.Args[0] 7965 if x7.Op != OpARM64MOVBUload { 7966 break 7967 } 7968 i7 := x7.AuxInt 7969 s := x7.Aux 7970 _ = x7.Args[1] 7971 p := x7.Args[0] 7972 mem := x7.Args[1] 7973 o0 := v.Args[1] 7974 if o0.Op != OpARM64ORshiftLL { 7975 break 7976 } 7977 if o0.AuxInt != 8 { 7978 break 7979 } 7980 _ = o0.Args[1] 7981 o1 := o0.Args[0] 7982 if o1.Op != OpARM64ORshiftLL { 7983 break 7984 } 7985 if o1.AuxInt != 16 { 7986 break 7987 } 7988 _ = o1.Args[1] 7989 o2 := o1.Args[0] 7990 if o2.Op != OpARM64ORshiftLL { 7991 break 7992 } 7993 if o2.AuxInt != 24 { 7994 break 7995 } 7996 _ = o2.Args[1] 7997 o3 := o2.Args[0] 7998 if o3.Op != OpARM64ORshiftLL { 7999 break 8000 } 8001 if o3.AuxInt != 32 { 8002 break 8003 } 8004 _ = o3.Args[1] 8005 o4 := o3.Args[0] 8006 if o4.Op != OpARM64ORshiftLL { 8007 break 8008 } 8009 if o4.AuxInt != 40 { 8010 break 8011 } 8012 _ = o4.Args[1] 8013 o5 := o4.Args[0] 8014 if o5.Op != OpARM64ORshiftLL { 8015 break 8016 } 8017 if o5.AuxInt != 48 { 8018 break 8019 } 8020 _ = o5.Args[1] 8021 s0 := o5.Args[0] 8022 if s0.Op != OpARM64SLLconst { 8023 break 8024 } 8025 if s0.AuxInt != 56 { 8026 break 8027 } 8028 y0 := s0.Args[0] 8029 if y0.Op != OpARM64MOVDnop { 8030 break 8031 } 8032 x0 := y0.Args[0] 8033 if x0.Op != OpARM64MOVBUload { 8034 break 8035 } 8036 i0 := x0.AuxInt 8037 if x0.Aux != s { 8038 break 8039 } 8040 _ = x0.Args[1] 8041 if p != x0.Args[0] { 8042 break 8043 } 8044 if mem != x0.Args[1] { 8045 break 8046 } 8047 y1 := o5.Args[1] 8048 if y1.Op != OpARM64MOVDnop { 8049 break 8050 } 8051 x1 := y1.Args[0] 8052 if x1.Op != OpARM64MOVBUload { 8053 break 8054 } 8055 i1 := x1.AuxInt 8056 if x1.Aux != s { 8057 break 8058 } 8059 _ = x1.Args[1] 8060 if p != x1.Args[0] { 8061 break 8062 } 8063 if mem != x1.Args[1] { 8064 break 8065 } 8066 y2 := o4.Args[1] 8067 if y2.Op != OpARM64MOVDnop { 8068 break 8069 } 8070 x2 := y2.Args[0] 8071 if x2.Op != OpARM64MOVBUload { 8072 break 8073 } 8074 i2 := x2.AuxInt 8075 if x2.Aux != s { 8076 break 8077 } 8078 _ = x2.Args[1] 8079 if p != x2.Args[0] { 8080 break 8081 } 8082 if mem != x2.Args[1] { 8083 break 8084 } 8085 y3 := o3.Args[1] 8086 if y3.Op != OpARM64MOVDnop { 8087 break 8088 } 8089 x3 := y3.Args[0] 8090 if x3.Op != OpARM64MOVBUload { 8091 break 8092 } 8093 i3 := x3.AuxInt 8094 if x3.Aux != s { 8095 break 8096 } 8097 _ = x3.Args[1] 8098 if p != x3.Args[0] { 8099 break 8100 } 8101 if mem != x3.Args[1] { 8102 break 8103 } 8104 y4 := o2.Args[1] 8105 if y4.Op != OpARM64MOVDnop { 8106 break 8107 } 8108 x4 := y4.Args[0] 8109 if x4.Op != OpARM64MOVBUload { 8110 break 8111 } 8112 i4 := x4.AuxInt 8113 if x4.Aux != s { 8114 break 8115 } 8116 _ = x4.Args[1] 8117 if p != x4.Args[0] { 8118 break 8119 } 8120 if mem != x4.Args[1] { 8121 break 8122 } 8123 y5 := o1.Args[1] 8124 if y5.Op != OpARM64MOVDnop { 8125 break 8126 } 8127 x5 := y5.Args[0] 8128 if x5.Op != OpARM64MOVBUload { 8129 break 8130 } 8131 i5 := x5.AuxInt 8132 if x5.Aux != s { 8133 break 8134 } 8135 _ = x5.Args[1] 8136 if p != x5.Args[0] { 8137 break 8138 } 8139 if mem != x5.Args[1] { 8140 break 8141 } 8142 y6 := o0.Args[1] 8143 if y6.Op != OpARM64MOVDnop { 8144 break 8145 } 8146 x6 := y6.Args[0] 8147 if x6.Op != OpARM64MOVBUload { 8148 break 8149 } 8150 i6 := x6.AuxInt 8151 if x6.Aux != s { 8152 break 8153 } 8154 _ = x6.Args[1] 8155 if p != x6.Args[0] { 8156 break 8157 } 8158 if mem != x6.Args[1] { 8159 break 8160 } 8161 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 8162 break 8163 } 8164 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 8165 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 8166 v.reset(OpCopy) 8167 v.AddArg(v0) 8168 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 8169 v1.Aux = s 8170 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8171 v2.AuxInt = i0 8172 v2.AddArg(p) 8173 v1.AddArg(v2) 8174 v1.AddArg(mem) 8175 v0.AddArg(v1) 8176 return true 8177 } 8178 return false 8179 } 8180 func rewriteValueARM64_OpARM64ORconst_0(v *Value) bool { 8181 // match: (ORconst [0] x) 8182 // cond: 8183 // result: x 8184 for { 8185 if v.AuxInt != 0 { 8186 break 8187 } 8188 x := v.Args[0] 8189 v.reset(OpCopy) 8190 v.Type = x.Type 8191 v.AddArg(x) 8192 return true 8193 } 8194 // match: (ORconst [-1] _) 8195 // cond: 8196 // result: (MOVDconst [-1]) 8197 for { 8198 if v.AuxInt != -1 { 8199 break 8200 } 8201 v.reset(OpARM64MOVDconst) 8202 v.AuxInt = -1 8203 return true 8204 } 8205 // match: (ORconst [c] (MOVDconst [d])) 8206 // cond: 8207 // result: (MOVDconst [c|d]) 8208 for { 8209 c := v.AuxInt 8210 v_0 := v.Args[0] 8211 if v_0.Op != OpARM64MOVDconst { 8212 break 8213 } 8214 d := v_0.AuxInt 8215 v.reset(OpARM64MOVDconst) 8216 v.AuxInt = c | d 8217 return true 8218 } 8219 // match: (ORconst [c] (ORconst [d] x)) 8220 // cond: 8221 // result: (ORconst [c|d] x) 8222 for { 8223 c := v.AuxInt 8224 v_0 := v.Args[0] 8225 if v_0.Op != OpARM64ORconst { 8226 break 8227 } 8228 d := v_0.AuxInt 8229 x := v_0.Args[0] 8230 v.reset(OpARM64ORconst) 8231 v.AuxInt = c | d 8232 v.AddArg(x) 8233 return true 8234 } 8235 return false 8236 } 8237 func rewriteValueARM64_OpARM64ORshiftLL_0(v *Value) bool { 8238 b := v.Block 8239 _ = b 8240 // match: (ORshiftLL (MOVDconst [c]) x [d]) 8241 // cond: 8242 // result: (ORconst [c] (SLLconst <x.Type> x [d])) 8243 for { 8244 d := v.AuxInt 8245 _ = v.Args[1] 8246 v_0 := v.Args[0] 8247 if v_0.Op != OpARM64MOVDconst { 8248 break 8249 } 8250 c := v_0.AuxInt 8251 x := v.Args[1] 8252 v.reset(OpARM64ORconst) 8253 v.AuxInt = c 8254 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8255 v0.AuxInt = d 8256 v0.AddArg(x) 8257 v.AddArg(v0) 8258 return true 8259 } 8260 // match: (ORshiftLL x (MOVDconst [c]) [d]) 8261 // cond: 8262 // result: (ORconst x [int64(uint64(c)<<uint64(d))]) 8263 for { 8264 d := v.AuxInt 8265 _ = v.Args[1] 8266 x := v.Args[0] 8267 v_1 := v.Args[1] 8268 if v_1.Op != OpARM64MOVDconst { 8269 break 8270 } 8271 c := v_1.AuxInt 8272 v.reset(OpARM64ORconst) 8273 v.AuxInt = int64(uint64(c) << uint64(d)) 8274 v.AddArg(x) 8275 return true 8276 } 8277 // match: (ORshiftLL x y:(SLLconst x [c]) [d]) 8278 // cond: c==d 8279 // result: y 8280 for { 8281 d := v.AuxInt 8282 _ = v.Args[1] 8283 x := v.Args[0] 8284 y := v.Args[1] 8285 if y.Op != OpARM64SLLconst { 8286 break 8287 } 8288 c := y.AuxInt 8289 if x != y.Args[0] { 8290 break 8291 } 8292 if !(c == d) { 8293 break 8294 } 8295 v.reset(OpCopy) 8296 v.Type = y.Type 8297 v.AddArg(y) 8298 return true 8299 } 8300 // match: (ORshiftLL [c] (SRLconst x [64-c]) x) 8301 // cond: 8302 // result: (RORconst [64-c] x) 8303 for { 8304 c := v.AuxInt 8305 _ = v.Args[1] 8306 v_0 := v.Args[0] 8307 if v_0.Op != OpARM64SRLconst { 8308 break 8309 } 8310 if v_0.AuxInt != 64-c { 8311 break 8312 } 8313 x := v_0.Args[0] 8314 if x != v.Args[1] { 8315 break 8316 } 8317 v.reset(OpARM64RORconst) 8318 v.AuxInt = 64 - c 8319 v.AddArg(x) 8320 return true 8321 } 8322 // match: (ORshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x) 8323 // cond: c < 32 && t.Size() == 4 8324 // result: (RORWconst [32-c] x) 8325 for { 8326 t := v.Type 8327 c := v.AuxInt 8328 _ = v.Args[1] 8329 v_0 := v.Args[0] 8330 if v_0.Op != OpARM64SRLconst { 8331 break 8332 } 8333 if v_0.AuxInt != 32-c { 8334 break 8335 } 8336 v_0_0 := v_0.Args[0] 8337 if v_0_0.Op != OpARM64MOVWUreg { 8338 break 8339 } 8340 x := v_0_0.Args[0] 8341 if x != v.Args[1] { 8342 break 8343 } 8344 if !(c < 32 && t.Size() == 4) { 8345 break 8346 } 8347 v.reset(OpARM64RORWconst) 8348 v.AuxInt = 32 - c 8349 v.AddArg(x) 8350 return true 8351 } 8352 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) 8353 // cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 8354 // result: @mergePoint(b,x0,x1) (MOVHUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 8355 for { 8356 t := v.Type 8357 if v.AuxInt != 8 { 8358 break 8359 } 8360 _ = v.Args[1] 8361 y0 := v.Args[0] 8362 if y0.Op != OpARM64MOVDnop { 8363 break 8364 } 8365 x0 := y0.Args[0] 8366 if x0.Op != OpARM64MOVBUload { 8367 break 8368 } 8369 i0 := x0.AuxInt 8370 s := x0.Aux 8371 _ = x0.Args[1] 8372 p := x0.Args[0] 8373 mem := x0.Args[1] 8374 y1 := v.Args[1] 8375 if y1.Op != OpARM64MOVDnop { 8376 break 8377 } 8378 x1 := y1.Args[0] 8379 if x1.Op != OpARM64MOVBUload { 8380 break 8381 } 8382 i1 := x1.AuxInt 8383 if x1.Aux != s { 8384 break 8385 } 8386 _ = x1.Args[1] 8387 if p != x1.Args[0] { 8388 break 8389 } 8390 if mem != x1.Args[1] { 8391 break 8392 } 8393 if !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 8394 break 8395 } 8396 b = mergePoint(b, x0, x1) 8397 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, t) 8398 v.reset(OpCopy) 8399 v.AddArg(v0) 8400 v0.Aux = s 8401 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8402 v1.AuxInt = i0 8403 v1.AddArg(p) 8404 v0.AddArg(v1) 8405 v0.AddArg(mem) 8406 return true 8407 } 8408 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i3] {s} p mem))) 8409 // cond: i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0) 8410 // result: @mergePoint(b,x0,x1,x2) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 8411 for { 8412 t := v.Type 8413 if v.AuxInt != 24 { 8414 break 8415 } 8416 _ = v.Args[1] 8417 o0 := v.Args[0] 8418 if o0.Op != OpARM64ORshiftLL { 8419 break 8420 } 8421 if o0.AuxInt != 16 { 8422 break 8423 } 8424 _ = o0.Args[1] 8425 x0 := o0.Args[0] 8426 if x0.Op != OpARM64MOVHUload { 8427 break 8428 } 8429 i0 := x0.AuxInt 8430 s := x0.Aux 8431 _ = x0.Args[1] 8432 p := x0.Args[0] 8433 mem := x0.Args[1] 8434 y1 := o0.Args[1] 8435 if y1.Op != OpARM64MOVDnop { 8436 break 8437 } 8438 x1 := y1.Args[0] 8439 if x1.Op != OpARM64MOVBUload { 8440 break 8441 } 8442 i2 := x1.AuxInt 8443 if x1.Aux != s { 8444 break 8445 } 8446 _ = x1.Args[1] 8447 if p != x1.Args[0] { 8448 break 8449 } 8450 if mem != x1.Args[1] { 8451 break 8452 } 8453 y2 := v.Args[1] 8454 if y2.Op != OpARM64MOVDnop { 8455 break 8456 } 8457 x2 := y2.Args[0] 8458 if x2.Op != OpARM64MOVBUload { 8459 break 8460 } 8461 i3 := x2.AuxInt 8462 if x2.Aux != s { 8463 break 8464 } 8465 _ = x2.Args[1] 8466 if p != x2.Args[0] { 8467 break 8468 } 8469 if mem != x2.Args[1] { 8470 break 8471 } 8472 if !(i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) { 8473 break 8474 } 8475 b = mergePoint(b, x0, x1, x2) 8476 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 8477 v.reset(OpCopy) 8478 v.AddArg(v0) 8479 v0.Aux = s 8480 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8481 v1.AuxInt = i0 8482 v1.AddArg(p) 8483 v0.AddArg(v1) 8484 v0.AddArg(mem) 8485 return true 8486 } 8487 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i4] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i7] {s} p mem))) 8488 // cond: i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 8489 // result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem) 8490 for { 8491 t := v.Type 8492 if v.AuxInt != 56 { 8493 break 8494 } 8495 _ = v.Args[1] 8496 o0 := v.Args[0] 8497 if o0.Op != OpARM64ORshiftLL { 8498 break 8499 } 8500 if o0.AuxInt != 48 { 8501 break 8502 } 8503 _ = o0.Args[1] 8504 o1 := o0.Args[0] 8505 if o1.Op != OpARM64ORshiftLL { 8506 break 8507 } 8508 if o1.AuxInt != 40 { 8509 break 8510 } 8511 _ = o1.Args[1] 8512 o2 := o1.Args[0] 8513 if o2.Op != OpARM64ORshiftLL { 8514 break 8515 } 8516 if o2.AuxInt != 32 { 8517 break 8518 } 8519 _ = o2.Args[1] 8520 x0 := o2.Args[0] 8521 if x0.Op != OpARM64MOVWUload { 8522 break 8523 } 8524 i0 := x0.AuxInt 8525 s := x0.Aux 8526 _ = x0.Args[1] 8527 p := x0.Args[0] 8528 mem := x0.Args[1] 8529 y1 := o2.Args[1] 8530 if y1.Op != OpARM64MOVDnop { 8531 break 8532 } 8533 x1 := y1.Args[0] 8534 if x1.Op != OpARM64MOVBUload { 8535 break 8536 } 8537 i4 := x1.AuxInt 8538 if x1.Aux != s { 8539 break 8540 } 8541 _ = x1.Args[1] 8542 if p != x1.Args[0] { 8543 break 8544 } 8545 if mem != x1.Args[1] { 8546 break 8547 } 8548 y2 := o1.Args[1] 8549 if y2.Op != OpARM64MOVDnop { 8550 break 8551 } 8552 x2 := y2.Args[0] 8553 if x2.Op != OpARM64MOVBUload { 8554 break 8555 } 8556 i5 := x2.AuxInt 8557 if x2.Aux != s { 8558 break 8559 } 8560 _ = x2.Args[1] 8561 if p != x2.Args[0] { 8562 break 8563 } 8564 if mem != x2.Args[1] { 8565 break 8566 } 8567 y3 := o0.Args[1] 8568 if y3.Op != OpARM64MOVDnop { 8569 break 8570 } 8571 x3 := y3.Args[0] 8572 if x3.Op != OpARM64MOVBUload { 8573 break 8574 } 8575 i6 := x3.AuxInt 8576 if x3.Aux != s { 8577 break 8578 } 8579 _ = x3.Args[1] 8580 if p != x3.Args[0] { 8581 break 8582 } 8583 if mem != x3.Args[1] { 8584 break 8585 } 8586 y4 := v.Args[1] 8587 if y4.Op != OpARM64MOVDnop { 8588 break 8589 } 8590 x4 := y4.Args[0] 8591 if x4.Op != OpARM64MOVBUload { 8592 break 8593 } 8594 i7 := x4.AuxInt 8595 if x4.Aux != s { 8596 break 8597 } 8598 _ = x4.Args[1] 8599 if p != x4.Args[0] { 8600 break 8601 } 8602 if mem != x4.Args[1] { 8603 break 8604 } 8605 if !(i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 8606 break 8607 } 8608 b = mergePoint(b, x0, x1, x2, x3, x4) 8609 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 8610 v.reset(OpCopy) 8611 v.AddArg(v0) 8612 v0.Aux = s 8613 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8614 v1.AuxInt = i0 8615 v1.AddArg(p) 8616 v0.AddArg(v1) 8617 v0.AddArg(mem) 8618 return true 8619 } 8620 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i1] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i0] {s} p mem))) 8621 // cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 8622 // result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUload <t> [i0] {s} p mem)) 8623 for { 8624 t := v.Type 8625 if v.AuxInt != 8 { 8626 break 8627 } 8628 _ = v.Args[1] 8629 y0 := v.Args[0] 8630 if y0.Op != OpARM64MOVDnop { 8631 break 8632 } 8633 x0 := y0.Args[0] 8634 if x0.Op != OpARM64MOVBUload { 8635 break 8636 } 8637 i1 := x0.AuxInt 8638 s := x0.Aux 8639 _ = x0.Args[1] 8640 p := x0.Args[0] 8641 mem := x0.Args[1] 8642 y1 := v.Args[1] 8643 if y1.Op != OpARM64MOVDnop { 8644 break 8645 } 8646 x1 := y1.Args[0] 8647 if x1.Op != OpARM64MOVBUload { 8648 break 8649 } 8650 i0 := x1.AuxInt 8651 if x1.Aux != s { 8652 break 8653 } 8654 _ = x1.Args[1] 8655 if p != x1.Args[0] { 8656 break 8657 } 8658 if mem != x1.Args[1] { 8659 break 8660 } 8661 if !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 8662 break 8663 } 8664 b = mergePoint(b, x0, x1) 8665 v0 := b.NewValue0(v.Pos, OpARM64REV16W, t) 8666 v.reset(OpCopy) 8667 v.AddArg(v0) 8668 v1 := b.NewValue0(v.Pos, OpARM64MOVHUload, t) 8669 v1.AuxInt = i0 8670 v1.Aux = s 8671 v1.AddArg(p) 8672 v1.AddArg(mem) 8673 v0.AddArg(v1) 8674 return true 8675 } 8676 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUload [i2] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i0] {s} p mem))) 8677 // cond: i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0) 8678 // result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 8679 for { 8680 t := v.Type 8681 if v.AuxInt != 24 { 8682 break 8683 } 8684 _ = v.Args[1] 8685 o0 := v.Args[0] 8686 if o0.Op != OpARM64ORshiftLL { 8687 break 8688 } 8689 if o0.AuxInt != 16 { 8690 break 8691 } 8692 _ = o0.Args[1] 8693 y0 := o0.Args[0] 8694 if y0.Op != OpARM64REV16W { 8695 break 8696 } 8697 x0 := y0.Args[0] 8698 if x0.Op != OpARM64MOVHUload { 8699 break 8700 } 8701 i2 := x0.AuxInt 8702 s := x0.Aux 8703 _ = x0.Args[1] 8704 p := x0.Args[0] 8705 mem := x0.Args[1] 8706 y1 := o0.Args[1] 8707 if y1.Op != OpARM64MOVDnop { 8708 break 8709 } 8710 x1 := y1.Args[0] 8711 if x1.Op != OpARM64MOVBUload { 8712 break 8713 } 8714 i1 := x1.AuxInt 8715 if x1.Aux != s { 8716 break 8717 } 8718 _ = x1.Args[1] 8719 if p != x1.Args[0] { 8720 break 8721 } 8722 if mem != x1.Args[1] { 8723 break 8724 } 8725 y2 := v.Args[1] 8726 if y2.Op != OpARM64MOVDnop { 8727 break 8728 } 8729 x2 := y2.Args[0] 8730 if x2.Op != OpARM64MOVBUload { 8731 break 8732 } 8733 i0 := x2.AuxInt 8734 if x2.Aux != s { 8735 break 8736 } 8737 _ = x2.Args[1] 8738 if p != x2.Args[0] { 8739 break 8740 } 8741 if mem != x2.Args[1] { 8742 break 8743 } 8744 if !(i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) { 8745 break 8746 } 8747 b = mergePoint(b, x0, x1, x2) 8748 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 8749 v.reset(OpCopy) 8750 v.AddArg(v0) 8751 v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 8752 v1.Aux = s 8753 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8754 v2.AuxInt = i0 8755 v2.AddArg(p) 8756 v1.AddArg(v2) 8757 v1.AddArg(mem) 8758 v0.AddArg(v1) 8759 return true 8760 } 8761 return false 8762 } 8763 func rewriteValueARM64_OpARM64ORshiftLL_10(v *Value) bool { 8764 b := v.Block 8765 _ = b 8766 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUload [i4] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i3] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i1] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i0] {s} p mem))) 8767 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 8768 // result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 8769 for { 8770 t := v.Type 8771 if v.AuxInt != 56 { 8772 break 8773 } 8774 _ = v.Args[1] 8775 o0 := v.Args[0] 8776 if o0.Op != OpARM64ORshiftLL { 8777 break 8778 } 8779 if o0.AuxInt != 48 { 8780 break 8781 } 8782 _ = o0.Args[1] 8783 o1 := o0.Args[0] 8784 if o1.Op != OpARM64ORshiftLL { 8785 break 8786 } 8787 if o1.AuxInt != 40 { 8788 break 8789 } 8790 _ = o1.Args[1] 8791 o2 := o1.Args[0] 8792 if o2.Op != OpARM64ORshiftLL { 8793 break 8794 } 8795 if o2.AuxInt != 32 { 8796 break 8797 } 8798 _ = o2.Args[1] 8799 y0 := o2.Args[0] 8800 if y0.Op != OpARM64REVW { 8801 break 8802 } 8803 x0 := y0.Args[0] 8804 if x0.Op != OpARM64MOVWUload { 8805 break 8806 } 8807 i4 := x0.AuxInt 8808 s := x0.Aux 8809 _ = x0.Args[1] 8810 p := x0.Args[0] 8811 mem := x0.Args[1] 8812 y1 := o2.Args[1] 8813 if y1.Op != OpARM64MOVDnop { 8814 break 8815 } 8816 x1 := y1.Args[0] 8817 if x1.Op != OpARM64MOVBUload { 8818 break 8819 } 8820 i3 := x1.AuxInt 8821 if x1.Aux != s { 8822 break 8823 } 8824 _ = x1.Args[1] 8825 if p != x1.Args[0] { 8826 break 8827 } 8828 if mem != x1.Args[1] { 8829 break 8830 } 8831 y2 := o1.Args[1] 8832 if y2.Op != OpARM64MOVDnop { 8833 break 8834 } 8835 x2 := y2.Args[0] 8836 if x2.Op != OpARM64MOVBUload { 8837 break 8838 } 8839 i2 := x2.AuxInt 8840 if x2.Aux != s { 8841 break 8842 } 8843 _ = x2.Args[1] 8844 if p != x2.Args[0] { 8845 break 8846 } 8847 if mem != x2.Args[1] { 8848 break 8849 } 8850 y3 := o0.Args[1] 8851 if y3.Op != OpARM64MOVDnop { 8852 break 8853 } 8854 x3 := y3.Args[0] 8855 if x3.Op != OpARM64MOVBUload { 8856 break 8857 } 8858 i1 := x3.AuxInt 8859 if x3.Aux != s { 8860 break 8861 } 8862 _ = x3.Args[1] 8863 if p != x3.Args[0] { 8864 break 8865 } 8866 if mem != x3.Args[1] { 8867 break 8868 } 8869 y4 := v.Args[1] 8870 if y4.Op != OpARM64MOVDnop { 8871 break 8872 } 8873 x4 := y4.Args[0] 8874 if x4.Op != OpARM64MOVBUload { 8875 break 8876 } 8877 i0 := x4.AuxInt 8878 if x4.Aux != s { 8879 break 8880 } 8881 _ = x4.Args[1] 8882 if p != x4.Args[0] { 8883 break 8884 } 8885 if mem != x4.Args[1] { 8886 break 8887 } 8888 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 8889 break 8890 } 8891 b = mergePoint(b, x0, x1, x2, x3, x4) 8892 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 8893 v.reset(OpCopy) 8894 v.AddArg(v0) 8895 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 8896 v1.Aux = s 8897 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8898 v2.AuxInt = i0 8899 v2.AddArg(p) 8900 v1.AddArg(v2) 8901 v1.AddArg(mem) 8902 v0.AddArg(v1) 8903 return true 8904 } 8905 return false 8906 } 8907 func rewriteValueARM64_OpARM64ORshiftRA_0(v *Value) bool { 8908 b := v.Block 8909 _ = b 8910 // match: (ORshiftRA (MOVDconst [c]) x [d]) 8911 // cond: 8912 // result: (ORconst [c] (SRAconst <x.Type> x [d])) 8913 for { 8914 d := v.AuxInt 8915 _ = v.Args[1] 8916 v_0 := v.Args[0] 8917 if v_0.Op != OpARM64MOVDconst { 8918 break 8919 } 8920 c := v_0.AuxInt 8921 x := v.Args[1] 8922 v.reset(OpARM64ORconst) 8923 v.AuxInt = c 8924 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 8925 v0.AuxInt = d 8926 v0.AddArg(x) 8927 v.AddArg(v0) 8928 return true 8929 } 8930 // match: (ORshiftRA x (MOVDconst [c]) [d]) 8931 // cond: 8932 // result: (ORconst x [int64(int64(c)>>uint64(d))]) 8933 for { 8934 d := v.AuxInt 8935 _ = v.Args[1] 8936 x := v.Args[0] 8937 v_1 := v.Args[1] 8938 if v_1.Op != OpARM64MOVDconst { 8939 break 8940 } 8941 c := v_1.AuxInt 8942 v.reset(OpARM64ORconst) 8943 v.AuxInt = int64(int64(c) >> uint64(d)) 8944 v.AddArg(x) 8945 return true 8946 } 8947 // match: (ORshiftRA x y:(SRAconst x [c]) [d]) 8948 // cond: c==d 8949 // result: y 8950 for { 8951 d := v.AuxInt 8952 _ = v.Args[1] 8953 x := v.Args[0] 8954 y := v.Args[1] 8955 if y.Op != OpARM64SRAconst { 8956 break 8957 } 8958 c := y.AuxInt 8959 if x != y.Args[0] { 8960 break 8961 } 8962 if !(c == d) { 8963 break 8964 } 8965 v.reset(OpCopy) 8966 v.Type = y.Type 8967 v.AddArg(y) 8968 return true 8969 } 8970 return false 8971 } 8972 func rewriteValueARM64_OpARM64ORshiftRL_0(v *Value) bool { 8973 b := v.Block 8974 _ = b 8975 // match: (ORshiftRL (MOVDconst [c]) x [d]) 8976 // cond: 8977 // result: (ORconst [c] (SRLconst <x.Type> x [d])) 8978 for { 8979 d := v.AuxInt 8980 _ = v.Args[1] 8981 v_0 := v.Args[0] 8982 if v_0.Op != OpARM64MOVDconst { 8983 break 8984 } 8985 c := v_0.AuxInt 8986 x := v.Args[1] 8987 v.reset(OpARM64ORconst) 8988 v.AuxInt = c 8989 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 8990 v0.AuxInt = d 8991 v0.AddArg(x) 8992 v.AddArg(v0) 8993 return true 8994 } 8995 // match: (ORshiftRL x (MOVDconst [c]) [d]) 8996 // cond: 8997 // result: (ORconst x [int64(uint64(c)>>uint64(d))]) 8998 for { 8999 d := v.AuxInt 9000 _ = v.Args[1] 9001 x := v.Args[0] 9002 v_1 := v.Args[1] 9003 if v_1.Op != OpARM64MOVDconst { 9004 break 9005 } 9006 c := v_1.AuxInt 9007 v.reset(OpARM64ORconst) 9008 v.AuxInt = int64(uint64(c) >> uint64(d)) 9009 v.AddArg(x) 9010 return true 9011 } 9012 // match: (ORshiftRL x y:(SRLconst x [c]) [d]) 9013 // cond: c==d 9014 // result: y 9015 for { 9016 d := v.AuxInt 9017 _ = v.Args[1] 9018 x := v.Args[0] 9019 y := v.Args[1] 9020 if y.Op != OpARM64SRLconst { 9021 break 9022 } 9023 c := y.AuxInt 9024 if x != y.Args[0] { 9025 break 9026 } 9027 if !(c == d) { 9028 break 9029 } 9030 v.reset(OpCopy) 9031 v.Type = y.Type 9032 v.AddArg(y) 9033 return true 9034 } 9035 // match: (ORshiftRL [c] (SLLconst x [64-c]) x) 9036 // cond: 9037 // result: (RORconst [ c] x) 9038 for { 9039 c := v.AuxInt 9040 _ = v.Args[1] 9041 v_0 := v.Args[0] 9042 if v_0.Op != OpARM64SLLconst { 9043 break 9044 } 9045 if v_0.AuxInt != 64-c { 9046 break 9047 } 9048 x := v_0.Args[0] 9049 if x != v.Args[1] { 9050 break 9051 } 9052 v.reset(OpARM64RORconst) 9053 v.AuxInt = c 9054 v.AddArg(x) 9055 return true 9056 } 9057 // match: (ORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 9058 // cond: c < 32 && t.Size() == 4 9059 // result: (RORWconst [ c] x) 9060 for { 9061 t := v.Type 9062 c := v.AuxInt 9063 _ = v.Args[1] 9064 v_0 := v.Args[0] 9065 if v_0.Op != OpARM64SLLconst { 9066 break 9067 } 9068 if v_0.AuxInt != 32-c { 9069 break 9070 } 9071 x := v_0.Args[0] 9072 v_1 := v.Args[1] 9073 if v_1.Op != OpARM64MOVWUreg { 9074 break 9075 } 9076 if x != v_1.Args[0] { 9077 break 9078 } 9079 if !(c < 32 && t.Size() == 4) { 9080 break 9081 } 9082 v.reset(OpARM64RORWconst) 9083 v.AuxInt = c 9084 v.AddArg(x) 9085 return true 9086 } 9087 return false 9088 } 9089 func rewriteValueARM64_OpARM64SLL_0(v *Value) bool { 9090 // match: (SLL x (MOVDconst [c])) 9091 // cond: 9092 // result: (SLLconst x [c&63]) 9093 for { 9094 _ = v.Args[1] 9095 x := v.Args[0] 9096 v_1 := v.Args[1] 9097 if v_1.Op != OpARM64MOVDconst { 9098 break 9099 } 9100 c := v_1.AuxInt 9101 v.reset(OpARM64SLLconst) 9102 v.AuxInt = c & 63 9103 v.AddArg(x) 9104 return true 9105 } 9106 return false 9107 } 9108 func rewriteValueARM64_OpARM64SLLconst_0(v *Value) bool { 9109 // match: (SLLconst [c] (MOVDconst [d])) 9110 // cond: 9111 // result: (MOVDconst [int64(d)<<uint64(c)]) 9112 for { 9113 c := v.AuxInt 9114 v_0 := v.Args[0] 9115 if v_0.Op != OpARM64MOVDconst { 9116 break 9117 } 9118 d := v_0.AuxInt 9119 v.reset(OpARM64MOVDconst) 9120 v.AuxInt = int64(d) << uint64(c) 9121 return true 9122 } 9123 // match: (SLLconst [c] (SRLconst [c] x)) 9124 // cond: 0 < c && c < 64 9125 // result: (ANDconst [^(1<<uint(c)-1)] x) 9126 for { 9127 c := v.AuxInt 9128 v_0 := v.Args[0] 9129 if v_0.Op != OpARM64SRLconst { 9130 break 9131 } 9132 if v_0.AuxInt != c { 9133 break 9134 } 9135 x := v_0.Args[0] 9136 if !(0 < c && c < 64) { 9137 break 9138 } 9139 v.reset(OpARM64ANDconst) 9140 v.AuxInt = ^(1<<uint(c) - 1) 9141 v.AddArg(x) 9142 return true 9143 } 9144 return false 9145 } 9146 func rewriteValueARM64_OpARM64SRA_0(v *Value) bool { 9147 // match: (SRA x (MOVDconst [c])) 9148 // cond: 9149 // result: (SRAconst x [c&63]) 9150 for { 9151 _ = v.Args[1] 9152 x := v.Args[0] 9153 v_1 := v.Args[1] 9154 if v_1.Op != OpARM64MOVDconst { 9155 break 9156 } 9157 c := v_1.AuxInt 9158 v.reset(OpARM64SRAconst) 9159 v.AuxInt = c & 63 9160 v.AddArg(x) 9161 return true 9162 } 9163 return false 9164 } 9165 func rewriteValueARM64_OpARM64SRAconst_0(v *Value) bool { 9166 // match: (SRAconst [c] (MOVDconst [d])) 9167 // cond: 9168 // result: (MOVDconst [int64(d)>>uint64(c)]) 9169 for { 9170 c := v.AuxInt 9171 v_0 := v.Args[0] 9172 if v_0.Op != OpARM64MOVDconst { 9173 break 9174 } 9175 d := v_0.AuxInt 9176 v.reset(OpARM64MOVDconst) 9177 v.AuxInt = int64(d) >> uint64(c) 9178 return true 9179 } 9180 return false 9181 } 9182 func rewriteValueARM64_OpARM64SRL_0(v *Value) bool { 9183 // match: (SRL x (MOVDconst [c])) 9184 // cond: 9185 // result: (SRLconst x [c&63]) 9186 for { 9187 _ = v.Args[1] 9188 x := v.Args[0] 9189 v_1 := v.Args[1] 9190 if v_1.Op != OpARM64MOVDconst { 9191 break 9192 } 9193 c := v_1.AuxInt 9194 v.reset(OpARM64SRLconst) 9195 v.AuxInt = c & 63 9196 v.AddArg(x) 9197 return true 9198 } 9199 return false 9200 } 9201 func rewriteValueARM64_OpARM64SRLconst_0(v *Value) bool { 9202 // match: (SRLconst [c] (MOVDconst [d])) 9203 // cond: 9204 // result: (MOVDconst [int64(uint64(d)>>uint64(c))]) 9205 for { 9206 c := v.AuxInt 9207 v_0 := v.Args[0] 9208 if v_0.Op != OpARM64MOVDconst { 9209 break 9210 } 9211 d := v_0.AuxInt 9212 v.reset(OpARM64MOVDconst) 9213 v.AuxInt = int64(uint64(d) >> uint64(c)) 9214 return true 9215 } 9216 // match: (SRLconst [c] (SLLconst [c] x)) 9217 // cond: 0 < c && c < 64 9218 // result: (ANDconst [1<<uint(64-c)-1] x) 9219 for { 9220 c := v.AuxInt 9221 v_0 := v.Args[0] 9222 if v_0.Op != OpARM64SLLconst { 9223 break 9224 } 9225 if v_0.AuxInt != c { 9226 break 9227 } 9228 x := v_0.Args[0] 9229 if !(0 < c && c < 64) { 9230 break 9231 } 9232 v.reset(OpARM64ANDconst) 9233 v.AuxInt = 1<<uint(64-c) - 1 9234 v.AddArg(x) 9235 return true 9236 } 9237 return false 9238 } 9239 func rewriteValueARM64_OpARM64STP_0(v *Value) bool { 9240 b := v.Block 9241 _ = b 9242 config := b.Func.Config 9243 _ = config 9244 // match: (STP [off1] {sym} (ADDconst [off2] ptr) val1 val2 mem) 9245 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 9246 // result: (STP [off1+off2] {sym} ptr val1 val2 mem) 9247 for { 9248 off1 := v.AuxInt 9249 sym := v.Aux 9250 _ = v.Args[3] 9251 v_0 := v.Args[0] 9252 if v_0.Op != OpARM64ADDconst { 9253 break 9254 } 9255 off2 := v_0.AuxInt 9256 ptr := v_0.Args[0] 9257 val1 := v.Args[1] 9258 val2 := v.Args[2] 9259 mem := v.Args[3] 9260 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 9261 break 9262 } 9263 v.reset(OpARM64STP) 9264 v.AuxInt = off1 + off2 9265 v.Aux = sym 9266 v.AddArg(ptr) 9267 v.AddArg(val1) 9268 v.AddArg(val2) 9269 v.AddArg(mem) 9270 return true 9271 } 9272 // match: (STP [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val1 val2 mem) 9273 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 9274 // result: (STP [off1+off2] {mergeSym(sym1,sym2)} ptr val1 val2 mem) 9275 for { 9276 off1 := v.AuxInt 9277 sym1 := v.Aux 9278 _ = v.Args[3] 9279 v_0 := v.Args[0] 9280 if v_0.Op != OpARM64MOVDaddr { 9281 break 9282 } 9283 off2 := v_0.AuxInt 9284 sym2 := v_0.Aux 9285 ptr := v_0.Args[0] 9286 val1 := v.Args[1] 9287 val2 := v.Args[2] 9288 mem := v.Args[3] 9289 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 9290 break 9291 } 9292 v.reset(OpARM64STP) 9293 v.AuxInt = off1 + off2 9294 v.Aux = mergeSym(sym1, sym2) 9295 v.AddArg(ptr) 9296 v.AddArg(val1) 9297 v.AddArg(val2) 9298 v.AddArg(mem) 9299 return true 9300 } 9301 // match: (STP [off] {sym} ptr (MOVDconst [0]) (MOVDconst [0]) mem) 9302 // cond: 9303 // result: (MOVQstorezero [off] {sym} ptr mem) 9304 for { 9305 off := v.AuxInt 9306 sym := v.Aux 9307 _ = v.Args[3] 9308 ptr := v.Args[0] 9309 v_1 := v.Args[1] 9310 if v_1.Op != OpARM64MOVDconst { 9311 break 9312 } 9313 if v_1.AuxInt != 0 { 9314 break 9315 } 9316 v_2 := v.Args[2] 9317 if v_2.Op != OpARM64MOVDconst { 9318 break 9319 } 9320 if v_2.AuxInt != 0 { 9321 break 9322 } 9323 mem := v.Args[3] 9324 v.reset(OpARM64MOVQstorezero) 9325 v.AuxInt = off 9326 v.Aux = sym 9327 v.AddArg(ptr) 9328 v.AddArg(mem) 9329 return true 9330 } 9331 return false 9332 } 9333 func rewriteValueARM64_OpARM64SUB_0(v *Value) bool { 9334 b := v.Block 9335 _ = b 9336 // match: (SUB x (MOVDconst [c])) 9337 // cond: 9338 // result: (SUBconst [c] x) 9339 for { 9340 _ = v.Args[1] 9341 x := v.Args[0] 9342 v_1 := v.Args[1] 9343 if v_1.Op != OpARM64MOVDconst { 9344 break 9345 } 9346 c := v_1.AuxInt 9347 v.reset(OpARM64SUBconst) 9348 v.AuxInt = c 9349 v.AddArg(x) 9350 return true 9351 } 9352 // match: (SUB x x) 9353 // cond: 9354 // result: (MOVDconst [0]) 9355 for { 9356 _ = v.Args[1] 9357 x := v.Args[0] 9358 if x != v.Args[1] { 9359 break 9360 } 9361 v.reset(OpARM64MOVDconst) 9362 v.AuxInt = 0 9363 return true 9364 } 9365 // match: (SUB x (SUB y z)) 9366 // cond: 9367 // result: (SUB (ADD <v.Type> x z) y) 9368 for { 9369 _ = v.Args[1] 9370 x := v.Args[0] 9371 v_1 := v.Args[1] 9372 if v_1.Op != OpARM64SUB { 9373 break 9374 } 9375 _ = v_1.Args[1] 9376 y := v_1.Args[0] 9377 z := v_1.Args[1] 9378 v.reset(OpARM64SUB) 9379 v0 := b.NewValue0(v.Pos, OpARM64ADD, v.Type) 9380 v0.AddArg(x) 9381 v0.AddArg(z) 9382 v.AddArg(v0) 9383 v.AddArg(y) 9384 return true 9385 } 9386 // match: (SUB (SUB x y) z) 9387 // cond: 9388 // result: (SUB x (ADD <y.Type> y z)) 9389 for { 9390 _ = v.Args[1] 9391 v_0 := v.Args[0] 9392 if v_0.Op != OpARM64SUB { 9393 break 9394 } 9395 _ = v_0.Args[1] 9396 x := v_0.Args[0] 9397 y := v_0.Args[1] 9398 z := v.Args[1] 9399 v.reset(OpARM64SUB) 9400 v.AddArg(x) 9401 v0 := b.NewValue0(v.Pos, OpARM64ADD, y.Type) 9402 v0.AddArg(y) 9403 v0.AddArg(z) 9404 v.AddArg(v0) 9405 return true 9406 } 9407 // match: (SUB x (SLLconst [c] y)) 9408 // cond: 9409 // result: (SUBshiftLL x y [c]) 9410 for { 9411 _ = v.Args[1] 9412 x := v.Args[0] 9413 v_1 := v.Args[1] 9414 if v_1.Op != OpARM64SLLconst { 9415 break 9416 } 9417 c := v_1.AuxInt 9418 y := v_1.Args[0] 9419 v.reset(OpARM64SUBshiftLL) 9420 v.AuxInt = c 9421 v.AddArg(x) 9422 v.AddArg(y) 9423 return true 9424 } 9425 // match: (SUB x (SRLconst [c] y)) 9426 // cond: 9427 // result: (SUBshiftRL x y [c]) 9428 for { 9429 _ = v.Args[1] 9430 x := v.Args[0] 9431 v_1 := v.Args[1] 9432 if v_1.Op != OpARM64SRLconst { 9433 break 9434 } 9435 c := v_1.AuxInt 9436 y := v_1.Args[0] 9437 v.reset(OpARM64SUBshiftRL) 9438 v.AuxInt = c 9439 v.AddArg(x) 9440 v.AddArg(y) 9441 return true 9442 } 9443 // match: (SUB x (SRAconst [c] y)) 9444 // cond: 9445 // result: (SUBshiftRA x y [c]) 9446 for { 9447 _ = v.Args[1] 9448 x := v.Args[0] 9449 v_1 := v.Args[1] 9450 if v_1.Op != OpARM64SRAconst { 9451 break 9452 } 9453 c := v_1.AuxInt 9454 y := v_1.Args[0] 9455 v.reset(OpARM64SUBshiftRA) 9456 v.AuxInt = c 9457 v.AddArg(x) 9458 v.AddArg(y) 9459 return true 9460 } 9461 return false 9462 } 9463 func rewriteValueARM64_OpARM64SUBconst_0(v *Value) bool { 9464 // match: (SUBconst [0] x) 9465 // cond: 9466 // result: x 9467 for { 9468 if v.AuxInt != 0 { 9469 break 9470 } 9471 x := v.Args[0] 9472 v.reset(OpCopy) 9473 v.Type = x.Type 9474 v.AddArg(x) 9475 return true 9476 } 9477 // match: (SUBconst [c] (MOVDconst [d])) 9478 // cond: 9479 // result: (MOVDconst [d-c]) 9480 for { 9481 c := v.AuxInt 9482 v_0 := v.Args[0] 9483 if v_0.Op != OpARM64MOVDconst { 9484 break 9485 } 9486 d := v_0.AuxInt 9487 v.reset(OpARM64MOVDconst) 9488 v.AuxInt = d - c 9489 return true 9490 } 9491 // match: (SUBconst [c] (SUBconst [d] x)) 9492 // cond: 9493 // result: (ADDconst [-c-d] x) 9494 for { 9495 c := v.AuxInt 9496 v_0 := v.Args[0] 9497 if v_0.Op != OpARM64SUBconst { 9498 break 9499 } 9500 d := v_0.AuxInt 9501 x := v_0.Args[0] 9502 v.reset(OpARM64ADDconst) 9503 v.AuxInt = -c - d 9504 v.AddArg(x) 9505 return true 9506 } 9507 // match: (SUBconst [c] (ADDconst [d] x)) 9508 // cond: 9509 // result: (ADDconst [-c+d] x) 9510 for { 9511 c := v.AuxInt 9512 v_0 := v.Args[0] 9513 if v_0.Op != OpARM64ADDconst { 9514 break 9515 } 9516 d := v_0.AuxInt 9517 x := v_0.Args[0] 9518 v.reset(OpARM64ADDconst) 9519 v.AuxInt = -c + d 9520 v.AddArg(x) 9521 return true 9522 } 9523 return false 9524 } 9525 func rewriteValueARM64_OpARM64SUBshiftLL_0(v *Value) bool { 9526 // match: (SUBshiftLL x (MOVDconst [c]) [d]) 9527 // cond: 9528 // result: (SUBconst x [int64(uint64(c)<<uint64(d))]) 9529 for { 9530 d := v.AuxInt 9531 _ = v.Args[1] 9532 x := v.Args[0] 9533 v_1 := v.Args[1] 9534 if v_1.Op != OpARM64MOVDconst { 9535 break 9536 } 9537 c := v_1.AuxInt 9538 v.reset(OpARM64SUBconst) 9539 v.AuxInt = int64(uint64(c) << uint64(d)) 9540 v.AddArg(x) 9541 return true 9542 } 9543 // match: (SUBshiftLL x (SLLconst x [c]) [d]) 9544 // cond: c==d 9545 // result: (MOVDconst [0]) 9546 for { 9547 d := v.AuxInt 9548 _ = v.Args[1] 9549 x := v.Args[0] 9550 v_1 := v.Args[1] 9551 if v_1.Op != OpARM64SLLconst { 9552 break 9553 } 9554 c := v_1.AuxInt 9555 if x != v_1.Args[0] { 9556 break 9557 } 9558 if !(c == d) { 9559 break 9560 } 9561 v.reset(OpARM64MOVDconst) 9562 v.AuxInt = 0 9563 return true 9564 } 9565 return false 9566 } 9567 func rewriteValueARM64_OpARM64SUBshiftRA_0(v *Value) bool { 9568 // match: (SUBshiftRA x (MOVDconst [c]) [d]) 9569 // cond: 9570 // result: (SUBconst x [int64(int64(c)>>uint64(d))]) 9571 for { 9572 d := v.AuxInt 9573 _ = v.Args[1] 9574 x := v.Args[0] 9575 v_1 := v.Args[1] 9576 if v_1.Op != OpARM64MOVDconst { 9577 break 9578 } 9579 c := v_1.AuxInt 9580 v.reset(OpARM64SUBconst) 9581 v.AuxInt = int64(int64(c) >> uint64(d)) 9582 v.AddArg(x) 9583 return true 9584 } 9585 // match: (SUBshiftRA x (SRAconst x [c]) [d]) 9586 // cond: c==d 9587 // result: (MOVDconst [0]) 9588 for { 9589 d := v.AuxInt 9590 _ = v.Args[1] 9591 x := v.Args[0] 9592 v_1 := v.Args[1] 9593 if v_1.Op != OpARM64SRAconst { 9594 break 9595 } 9596 c := v_1.AuxInt 9597 if x != v_1.Args[0] { 9598 break 9599 } 9600 if !(c == d) { 9601 break 9602 } 9603 v.reset(OpARM64MOVDconst) 9604 v.AuxInt = 0 9605 return true 9606 } 9607 return false 9608 } 9609 func rewriteValueARM64_OpARM64SUBshiftRL_0(v *Value) bool { 9610 // match: (SUBshiftRL x (MOVDconst [c]) [d]) 9611 // cond: 9612 // result: (SUBconst x [int64(uint64(c)>>uint64(d))]) 9613 for { 9614 d := v.AuxInt 9615 _ = v.Args[1] 9616 x := v.Args[0] 9617 v_1 := v.Args[1] 9618 if v_1.Op != OpARM64MOVDconst { 9619 break 9620 } 9621 c := v_1.AuxInt 9622 v.reset(OpARM64SUBconst) 9623 v.AuxInt = int64(uint64(c) >> uint64(d)) 9624 v.AddArg(x) 9625 return true 9626 } 9627 // match: (SUBshiftRL x (SRLconst x [c]) [d]) 9628 // cond: c==d 9629 // result: (MOVDconst [0]) 9630 for { 9631 d := v.AuxInt 9632 _ = v.Args[1] 9633 x := v.Args[0] 9634 v_1 := v.Args[1] 9635 if v_1.Op != OpARM64SRLconst { 9636 break 9637 } 9638 c := v_1.AuxInt 9639 if x != v_1.Args[0] { 9640 break 9641 } 9642 if !(c == d) { 9643 break 9644 } 9645 v.reset(OpARM64MOVDconst) 9646 v.AuxInt = 0 9647 return true 9648 } 9649 return false 9650 } 9651 func rewriteValueARM64_OpARM64UDIV_0(v *Value) bool { 9652 // match: (UDIV x (MOVDconst [1])) 9653 // cond: 9654 // result: x 9655 for { 9656 _ = v.Args[1] 9657 x := v.Args[0] 9658 v_1 := v.Args[1] 9659 if v_1.Op != OpARM64MOVDconst { 9660 break 9661 } 9662 if v_1.AuxInt != 1 { 9663 break 9664 } 9665 v.reset(OpCopy) 9666 v.Type = x.Type 9667 v.AddArg(x) 9668 return true 9669 } 9670 // match: (UDIV x (MOVDconst [c])) 9671 // cond: isPowerOfTwo(c) 9672 // result: (SRLconst [log2(c)] x) 9673 for { 9674 _ = v.Args[1] 9675 x := v.Args[0] 9676 v_1 := v.Args[1] 9677 if v_1.Op != OpARM64MOVDconst { 9678 break 9679 } 9680 c := v_1.AuxInt 9681 if !(isPowerOfTwo(c)) { 9682 break 9683 } 9684 v.reset(OpARM64SRLconst) 9685 v.AuxInt = log2(c) 9686 v.AddArg(x) 9687 return true 9688 } 9689 // match: (UDIV (MOVDconst [c]) (MOVDconst [d])) 9690 // cond: 9691 // result: (MOVDconst [int64(uint64(c)/uint64(d))]) 9692 for { 9693 _ = v.Args[1] 9694 v_0 := v.Args[0] 9695 if v_0.Op != OpARM64MOVDconst { 9696 break 9697 } 9698 c := v_0.AuxInt 9699 v_1 := v.Args[1] 9700 if v_1.Op != OpARM64MOVDconst { 9701 break 9702 } 9703 d := v_1.AuxInt 9704 v.reset(OpARM64MOVDconst) 9705 v.AuxInt = int64(uint64(c) / uint64(d)) 9706 return true 9707 } 9708 return false 9709 } 9710 func rewriteValueARM64_OpARM64UDIVW_0(v *Value) bool { 9711 // match: (UDIVW x (MOVDconst [c])) 9712 // cond: uint32(c)==1 9713 // result: x 9714 for { 9715 _ = v.Args[1] 9716 x := v.Args[0] 9717 v_1 := v.Args[1] 9718 if v_1.Op != OpARM64MOVDconst { 9719 break 9720 } 9721 c := v_1.AuxInt 9722 if !(uint32(c) == 1) { 9723 break 9724 } 9725 v.reset(OpCopy) 9726 v.Type = x.Type 9727 v.AddArg(x) 9728 return true 9729 } 9730 // match: (UDIVW x (MOVDconst [c])) 9731 // cond: isPowerOfTwo(c) && is32Bit(c) 9732 // result: (SRLconst [log2(c)] x) 9733 for { 9734 _ = v.Args[1] 9735 x := v.Args[0] 9736 v_1 := v.Args[1] 9737 if v_1.Op != OpARM64MOVDconst { 9738 break 9739 } 9740 c := v_1.AuxInt 9741 if !(isPowerOfTwo(c) && is32Bit(c)) { 9742 break 9743 } 9744 v.reset(OpARM64SRLconst) 9745 v.AuxInt = log2(c) 9746 v.AddArg(x) 9747 return true 9748 } 9749 // match: (UDIVW (MOVDconst [c]) (MOVDconst [d])) 9750 // cond: 9751 // result: (MOVDconst [int64(uint32(c)/uint32(d))]) 9752 for { 9753 _ = v.Args[1] 9754 v_0 := v.Args[0] 9755 if v_0.Op != OpARM64MOVDconst { 9756 break 9757 } 9758 c := v_0.AuxInt 9759 v_1 := v.Args[1] 9760 if v_1.Op != OpARM64MOVDconst { 9761 break 9762 } 9763 d := v_1.AuxInt 9764 v.reset(OpARM64MOVDconst) 9765 v.AuxInt = int64(uint32(c) / uint32(d)) 9766 return true 9767 } 9768 return false 9769 } 9770 func rewriteValueARM64_OpARM64UMOD_0(v *Value) bool { 9771 // match: (UMOD _ (MOVDconst [1])) 9772 // cond: 9773 // result: (MOVDconst [0]) 9774 for { 9775 _ = v.Args[1] 9776 v_1 := v.Args[1] 9777 if v_1.Op != OpARM64MOVDconst { 9778 break 9779 } 9780 if v_1.AuxInt != 1 { 9781 break 9782 } 9783 v.reset(OpARM64MOVDconst) 9784 v.AuxInt = 0 9785 return true 9786 } 9787 // match: (UMOD x (MOVDconst [c])) 9788 // cond: isPowerOfTwo(c) 9789 // result: (ANDconst [c-1] x) 9790 for { 9791 _ = v.Args[1] 9792 x := v.Args[0] 9793 v_1 := v.Args[1] 9794 if v_1.Op != OpARM64MOVDconst { 9795 break 9796 } 9797 c := v_1.AuxInt 9798 if !(isPowerOfTwo(c)) { 9799 break 9800 } 9801 v.reset(OpARM64ANDconst) 9802 v.AuxInt = c - 1 9803 v.AddArg(x) 9804 return true 9805 } 9806 // match: (UMOD (MOVDconst [c]) (MOVDconst [d])) 9807 // cond: 9808 // result: (MOVDconst [int64(uint64(c)%uint64(d))]) 9809 for { 9810 _ = v.Args[1] 9811 v_0 := v.Args[0] 9812 if v_0.Op != OpARM64MOVDconst { 9813 break 9814 } 9815 c := v_0.AuxInt 9816 v_1 := v.Args[1] 9817 if v_1.Op != OpARM64MOVDconst { 9818 break 9819 } 9820 d := v_1.AuxInt 9821 v.reset(OpARM64MOVDconst) 9822 v.AuxInt = int64(uint64(c) % uint64(d)) 9823 return true 9824 } 9825 return false 9826 } 9827 func rewriteValueARM64_OpARM64UMODW_0(v *Value) bool { 9828 // match: (UMODW _ (MOVDconst [c])) 9829 // cond: uint32(c)==1 9830 // result: (MOVDconst [0]) 9831 for { 9832 _ = v.Args[1] 9833 v_1 := v.Args[1] 9834 if v_1.Op != OpARM64MOVDconst { 9835 break 9836 } 9837 c := v_1.AuxInt 9838 if !(uint32(c) == 1) { 9839 break 9840 } 9841 v.reset(OpARM64MOVDconst) 9842 v.AuxInt = 0 9843 return true 9844 } 9845 // match: (UMODW x (MOVDconst [c])) 9846 // cond: isPowerOfTwo(c) && is32Bit(c) 9847 // result: (ANDconst [c-1] x) 9848 for { 9849 _ = v.Args[1] 9850 x := v.Args[0] 9851 v_1 := v.Args[1] 9852 if v_1.Op != OpARM64MOVDconst { 9853 break 9854 } 9855 c := v_1.AuxInt 9856 if !(isPowerOfTwo(c) && is32Bit(c)) { 9857 break 9858 } 9859 v.reset(OpARM64ANDconst) 9860 v.AuxInt = c - 1 9861 v.AddArg(x) 9862 return true 9863 } 9864 // match: (UMODW (MOVDconst [c]) (MOVDconst [d])) 9865 // cond: 9866 // result: (MOVDconst [int64(uint32(c)%uint32(d))]) 9867 for { 9868 _ = v.Args[1] 9869 v_0 := v.Args[0] 9870 if v_0.Op != OpARM64MOVDconst { 9871 break 9872 } 9873 c := v_0.AuxInt 9874 v_1 := v.Args[1] 9875 if v_1.Op != OpARM64MOVDconst { 9876 break 9877 } 9878 d := v_1.AuxInt 9879 v.reset(OpARM64MOVDconst) 9880 v.AuxInt = int64(uint32(c) % uint32(d)) 9881 return true 9882 } 9883 return false 9884 } 9885 func rewriteValueARM64_OpARM64XOR_0(v *Value) bool { 9886 // match: (XOR x (MOVDconst [c])) 9887 // cond: 9888 // result: (XORconst [c] x) 9889 for { 9890 _ = v.Args[1] 9891 x := v.Args[0] 9892 v_1 := v.Args[1] 9893 if v_1.Op != OpARM64MOVDconst { 9894 break 9895 } 9896 c := v_1.AuxInt 9897 v.reset(OpARM64XORconst) 9898 v.AuxInt = c 9899 v.AddArg(x) 9900 return true 9901 } 9902 // match: (XOR (MOVDconst [c]) x) 9903 // cond: 9904 // result: (XORconst [c] x) 9905 for { 9906 _ = v.Args[1] 9907 v_0 := v.Args[0] 9908 if v_0.Op != OpARM64MOVDconst { 9909 break 9910 } 9911 c := v_0.AuxInt 9912 x := v.Args[1] 9913 v.reset(OpARM64XORconst) 9914 v.AuxInt = c 9915 v.AddArg(x) 9916 return true 9917 } 9918 // match: (XOR x x) 9919 // cond: 9920 // result: (MOVDconst [0]) 9921 for { 9922 _ = v.Args[1] 9923 x := v.Args[0] 9924 if x != v.Args[1] { 9925 break 9926 } 9927 v.reset(OpARM64MOVDconst) 9928 v.AuxInt = 0 9929 return true 9930 } 9931 // match: (XOR x (SLLconst [c] y)) 9932 // cond: 9933 // result: (XORshiftLL x y [c]) 9934 for { 9935 _ = v.Args[1] 9936 x := v.Args[0] 9937 v_1 := v.Args[1] 9938 if v_1.Op != OpARM64SLLconst { 9939 break 9940 } 9941 c := v_1.AuxInt 9942 y := v_1.Args[0] 9943 v.reset(OpARM64XORshiftLL) 9944 v.AuxInt = c 9945 v.AddArg(x) 9946 v.AddArg(y) 9947 return true 9948 } 9949 // match: (XOR (SLLconst [c] y) x) 9950 // cond: 9951 // result: (XORshiftLL x y [c]) 9952 for { 9953 _ = v.Args[1] 9954 v_0 := v.Args[0] 9955 if v_0.Op != OpARM64SLLconst { 9956 break 9957 } 9958 c := v_0.AuxInt 9959 y := v_0.Args[0] 9960 x := v.Args[1] 9961 v.reset(OpARM64XORshiftLL) 9962 v.AuxInt = c 9963 v.AddArg(x) 9964 v.AddArg(y) 9965 return true 9966 } 9967 // match: (XOR x (SRLconst [c] y)) 9968 // cond: 9969 // result: (XORshiftRL x y [c]) 9970 for { 9971 _ = v.Args[1] 9972 x := v.Args[0] 9973 v_1 := v.Args[1] 9974 if v_1.Op != OpARM64SRLconst { 9975 break 9976 } 9977 c := v_1.AuxInt 9978 y := v_1.Args[0] 9979 v.reset(OpARM64XORshiftRL) 9980 v.AuxInt = c 9981 v.AddArg(x) 9982 v.AddArg(y) 9983 return true 9984 } 9985 // match: (XOR (SRLconst [c] y) x) 9986 // cond: 9987 // result: (XORshiftRL x y [c]) 9988 for { 9989 _ = v.Args[1] 9990 v_0 := v.Args[0] 9991 if v_0.Op != OpARM64SRLconst { 9992 break 9993 } 9994 c := v_0.AuxInt 9995 y := v_0.Args[0] 9996 x := v.Args[1] 9997 v.reset(OpARM64XORshiftRL) 9998 v.AuxInt = c 9999 v.AddArg(x) 10000 v.AddArg(y) 10001 return true 10002 } 10003 // match: (XOR x (SRAconst [c] y)) 10004 // cond: 10005 // result: (XORshiftRA x y [c]) 10006 for { 10007 _ = v.Args[1] 10008 x := v.Args[0] 10009 v_1 := v.Args[1] 10010 if v_1.Op != OpARM64SRAconst { 10011 break 10012 } 10013 c := v_1.AuxInt 10014 y := v_1.Args[0] 10015 v.reset(OpARM64XORshiftRA) 10016 v.AuxInt = c 10017 v.AddArg(x) 10018 v.AddArg(y) 10019 return true 10020 } 10021 // match: (XOR (SRAconst [c] y) x) 10022 // cond: 10023 // result: (XORshiftRA x y [c]) 10024 for { 10025 _ = v.Args[1] 10026 v_0 := v.Args[0] 10027 if v_0.Op != OpARM64SRAconst { 10028 break 10029 } 10030 c := v_0.AuxInt 10031 y := v_0.Args[0] 10032 x := v.Args[1] 10033 v.reset(OpARM64XORshiftRA) 10034 v.AuxInt = c 10035 v.AddArg(x) 10036 v.AddArg(y) 10037 return true 10038 } 10039 return false 10040 } 10041 func rewriteValueARM64_OpARM64XORconst_0(v *Value) bool { 10042 // match: (XORconst [0] x) 10043 // cond: 10044 // result: x 10045 for { 10046 if v.AuxInt != 0 { 10047 break 10048 } 10049 x := v.Args[0] 10050 v.reset(OpCopy) 10051 v.Type = x.Type 10052 v.AddArg(x) 10053 return true 10054 } 10055 // match: (XORconst [-1] x) 10056 // cond: 10057 // result: (MVN x) 10058 for { 10059 if v.AuxInt != -1 { 10060 break 10061 } 10062 x := v.Args[0] 10063 v.reset(OpARM64MVN) 10064 v.AddArg(x) 10065 return true 10066 } 10067 // match: (XORconst [c] (MOVDconst [d])) 10068 // cond: 10069 // result: (MOVDconst [c^d]) 10070 for { 10071 c := v.AuxInt 10072 v_0 := v.Args[0] 10073 if v_0.Op != OpARM64MOVDconst { 10074 break 10075 } 10076 d := v_0.AuxInt 10077 v.reset(OpARM64MOVDconst) 10078 v.AuxInt = c ^ d 10079 return true 10080 } 10081 // match: (XORconst [c] (XORconst [d] x)) 10082 // cond: 10083 // result: (XORconst [c^d] x) 10084 for { 10085 c := v.AuxInt 10086 v_0 := v.Args[0] 10087 if v_0.Op != OpARM64XORconst { 10088 break 10089 } 10090 d := v_0.AuxInt 10091 x := v_0.Args[0] 10092 v.reset(OpARM64XORconst) 10093 v.AuxInt = c ^ d 10094 v.AddArg(x) 10095 return true 10096 } 10097 return false 10098 } 10099 func rewriteValueARM64_OpARM64XORshiftLL_0(v *Value) bool { 10100 b := v.Block 10101 _ = b 10102 // match: (XORshiftLL (MOVDconst [c]) x [d]) 10103 // cond: 10104 // result: (XORconst [c] (SLLconst <x.Type> x [d])) 10105 for { 10106 d := v.AuxInt 10107 _ = v.Args[1] 10108 v_0 := v.Args[0] 10109 if v_0.Op != OpARM64MOVDconst { 10110 break 10111 } 10112 c := v_0.AuxInt 10113 x := v.Args[1] 10114 v.reset(OpARM64XORconst) 10115 v.AuxInt = c 10116 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 10117 v0.AuxInt = d 10118 v0.AddArg(x) 10119 v.AddArg(v0) 10120 return true 10121 } 10122 // match: (XORshiftLL x (MOVDconst [c]) [d]) 10123 // cond: 10124 // result: (XORconst x [int64(uint64(c)<<uint64(d))]) 10125 for { 10126 d := v.AuxInt 10127 _ = v.Args[1] 10128 x := v.Args[0] 10129 v_1 := v.Args[1] 10130 if v_1.Op != OpARM64MOVDconst { 10131 break 10132 } 10133 c := v_1.AuxInt 10134 v.reset(OpARM64XORconst) 10135 v.AuxInt = int64(uint64(c) << uint64(d)) 10136 v.AddArg(x) 10137 return true 10138 } 10139 // match: (XORshiftLL x (SLLconst x [c]) [d]) 10140 // cond: c==d 10141 // result: (MOVDconst [0]) 10142 for { 10143 d := v.AuxInt 10144 _ = v.Args[1] 10145 x := v.Args[0] 10146 v_1 := v.Args[1] 10147 if v_1.Op != OpARM64SLLconst { 10148 break 10149 } 10150 c := v_1.AuxInt 10151 if x != v_1.Args[0] { 10152 break 10153 } 10154 if !(c == d) { 10155 break 10156 } 10157 v.reset(OpARM64MOVDconst) 10158 v.AuxInt = 0 10159 return true 10160 } 10161 // match: (XORshiftLL [c] (SRLconst x [64-c]) x) 10162 // cond: 10163 // result: (RORconst [64-c] x) 10164 for { 10165 c := v.AuxInt 10166 _ = v.Args[1] 10167 v_0 := v.Args[0] 10168 if v_0.Op != OpARM64SRLconst { 10169 break 10170 } 10171 if v_0.AuxInt != 64-c { 10172 break 10173 } 10174 x := v_0.Args[0] 10175 if x != v.Args[1] { 10176 break 10177 } 10178 v.reset(OpARM64RORconst) 10179 v.AuxInt = 64 - c 10180 v.AddArg(x) 10181 return true 10182 } 10183 // match: (XORshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x) 10184 // cond: c < 32 && t.Size() == 4 10185 // result: (RORWconst [32-c] x) 10186 for { 10187 t := v.Type 10188 c := v.AuxInt 10189 _ = v.Args[1] 10190 v_0 := v.Args[0] 10191 if v_0.Op != OpARM64SRLconst { 10192 break 10193 } 10194 if v_0.AuxInt != 32-c { 10195 break 10196 } 10197 v_0_0 := v_0.Args[0] 10198 if v_0_0.Op != OpARM64MOVWUreg { 10199 break 10200 } 10201 x := v_0_0.Args[0] 10202 if x != v.Args[1] { 10203 break 10204 } 10205 if !(c < 32 && t.Size() == 4) { 10206 break 10207 } 10208 v.reset(OpARM64RORWconst) 10209 v.AuxInt = 32 - c 10210 v.AddArg(x) 10211 return true 10212 } 10213 return false 10214 } 10215 func rewriteValueARM64_OpARM64XORshiftRA_0(v *Value) bool { 10216 b := v.Block 10217 _ = b 10218 // match: (XORshiftRA (MOVDconst [c]) x [d]) 10219 // cond: 10220 // result: (XORconst [c] (SRAconst <x.Type> x [d])) 10221 for { 10222 d := v.AuxInt 10223 _ = v.Args[1] 10224 v_0 := v.Args[0] 10225 if v_0.Op != OpARM64MOVDconst { 10226 break 10227 } 10228 c := v_0.AuxInt 10229 x := v.Args[1] 10230 v.reset(OpARM64XORconst) 10231 v.AuxInt = c 10232 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 10233 v0.AuxInt = d 10234 v0.AddArg(x) 10235 v.AddArg(v0) 10236 return true 10237 } 10238 // match: (XORshiftRA x (MOVDconst [c]) [d]) 10239 // cond: 10240 // result: (XORconst x [int64(int64(c)>>uint64(d))]) 10241 for { 10242 d := v.AuxInt 10243 _ = v.Args[1] 10244 x := v.Args[0] 10245 v_1 := v.Args[1] 10246 if v_1.Op != OpARM64MOVDconst { 10247 break 10248 } 10249 c := v_1.AuxInt 10250 v.reset(OpARM64XORconst) 10251 v.AuxInt = int64(int64(c) >> uint64(d)) 10252 v.AddArg(x) 10253 return true 10254 } 10255 // match: (XORshiftRA x (SRAconst x [c]) [d]) 10256 // cond: c==d 10257 // result: (MOVDconst [0]) 10258 for { 10259 d := v.AuxInt 10260 _ = v.Args[1] 10261 x := v.Args[0] 10262 v_1 := v.Args[1] 10263 if v_1.Op != OpARM64SRAconst { 10264 break 10265 } 10266 c := v_1.AuxInt 10267 if x != v_1.Args[0] { 10268 break 10269 } 10270 if !(c == d) { 10271 break 10272 } 10273 v.reset(OpARM64MOVDconst) 10274 v.AuxInt = 0 10275 return true 10276 } 10277 return false 10278 } 10279 func rewriteValueARM64_OpARM64XORshiftRL_0(v *Value) bool { 10280 b := v.Block 10281 _ = b 10282 // match: (XORshiftRL (MOVDconst [c]) x [d]) 10283 // cond: 10284 // result: (XORconst [c] (SRLconst <x.Type> x [d])) 10285 for { 10286 d := v.AuxInt 10287 _ = v.Args[1] 10288 v_0 := v.Args[0] 10289 if v_0.Op != OpARM64MOVDconst { 10290 break 10291 } 10292 c := v_0.AuxInt 10293 x := v.Args[1] 10294 v.reset(OpARM64XORconst) 10295 v.AuxInt = c 10296 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 10297 v0.AuxInt = d 10298 v0.AddArg(x) 10299 v.AddArg(v0) 10300 return true 10301 } 10302 // match: (XORshiftRL x (MOVDconst [c]) [d]) 10303 // cond: 10304 // result: (XORconst x [int64(uint64(c)>>uint64(d))]) 10305 for { 10306 d := v.AuxInt 10307 _ = v.Args[1] 10308 x := v.Args[0] 10309 v_1 := v.Args[1] 10310 if v_1.Op != OpARM64MOVDconst { 10311 break 10312 } 10313 c := v_1.AuxInt 10314 v.reset(OpARM64XORconst) 10315 v.AuxInt = int64(uint64(c) >> uint64(d)) 10316 v.AddArg(x) 10317 return true 10318 } 10319 // match: (XORshiftRL x (SRLconst x [c]) [d]) 10320 // cond: c==d 10321 // result: (MOVDconst [0]) 10322 for { 10323 d := v.AuxInt 10324 _ = v.Args[1] 10325 x := v.Args[0] 10326 v_1 := v.Args[1] 10327 if v_1.Op != OpARM64SRLconst { 10328 break 10329 } 10330 c := v_1.AuxInt 10331 if x != v_1.Args[0] { 10332 break 10333 } 10334 if !(c == d) { 10335 break 10336 } 10337 v.reset(OpARM64MOVDconst) 10338 v.AuxInt = 0 10339 return true 10340 } 10341 // match: (XORshiftRL [c] (SLLconst x [64-c]) x) 10342 // cond: 10343 // result: (RORconst [ c] x) 10344 for { 10345 c := v.AuxInt 10346 _ = v.Args[1] 10347 v_0 := v.Args[0] 10348 if v_0.Op != OpARM64SLLconst { 10349 break 10350 } 10351 if v_0.AuxInt != 64-c { 10352 break 10353 } 10354 x := v_0.Args[0] 10355 if x != v.Args[1] { 10356 break 10357 } 10358 v.reset(OpARM64RORconst) 10359 v.AuxInt = c 10360 v.AddArg(x) 10361 return true 10362 } 10363 // match: (XORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 10364 // cond: c < 32 && t.Size() == 4 10365 // result: (RORWconst [ c] x) 10366 for { 10367 t := v.Type 10368 c := v.AuxInt 10369 _ = v.Args[1] 10370 v_0 := v.Args[0] 10371 if v_0.Op != OpARM64SLLconst { 10372 break 10373 } 10374 if v_0.AuxInt != 32-c { 10375 break 10376 } 10377 x := v_0.Args[0] 10378 v_1 := v.Args[1] 10379 if v_1.Op != OpARM64MOVWUreg { 10380 break 10381 } 10382 if x != v_1.Args[0] { 10383 break 10384 } 10385 if !(c < 32 && t.Size() == 4) { 10386 break 10387 } 10388 v.reset(OpARM64RORWconst) 10389 v.AuxInt = c 10390 v.AddArg(x) 10391 return true 10392 } 10393 return false 10394 } 10395 func rewriteValueARM64_OpAdd16_0(v *Value) bool { 10396 // match: (Add16 x y) 10397 // cond: 10398 // result: (ADD x y) 10399 for { 10400 _ = v.Args[1] 10401 x := v.Args[0] 10402 y := v.Args[1] 10403 v.reset(OpARM64ADD) 10404 v.AddArg(x) 10405 v.AddArg(y) 10406 return true 10407 } 10408 } 10409 func rewriteValueARM64_OpAdd32_0(v *Value) bool { 10410 // match: (Add32 x y) 10411 // cond: 10412 // result: (ADD x y) 10413 for { 10414 _ = v.Args[1] 10415 x := v.Args[0] 10416 y := v.Args[1] 10417 v.reset(OpARM64ADD) 10418 v.AddArg(x) 10419 v.AddArg(y) 10420 return true 10421 } 10422 } 10423 func rewriteValueARM64_OpAdd32F_0(v *Value) bool { 10424 // match: (Add32F x y) 10425 // cond: 10426 // result: (FADDS x y) 10427 for { 10428 _ = v.Args[1] 10429 x := v.Args[0] 10430 y := v.Args[1] 10431 v.reset(OpARM64FADDS) 10432 v.AddArg(x) 10433 v.AddArg(y) 10434 return true 10435 } 10436 } 10437 func rewriteValueARM64_OpAdd64_0(v *Value) bool { 10438 // match: (Add64 x y) 10439 // cond: 10440 // result: (ADD x y) 10441 for { 10442 _ = v.Args[1] 10443 x := v.Args[0] 10444 y := v.Args[1] 10445 v.reset(OpARM64ADD) 10446 v.AddArg(x) 10447 v.AddArg(y) 10448 return true 10449 } 10450 } 10451 func rewriteValueARM64_OpAdd64F_0(v *Value) bool { 10452 // match: (Add64F x y) 10453 // cond: 10454 // result: (FADDD x y) 10455 for { 10456 _ = v.Args[1] 10457 x := v.Args[0] 10458 y := v.Args[1] 10459 v.reset(OpARM64FADDD) 10460 v.AddArg(x) 10461 v.AddArg(y) 10462 return true 10463 } 10464 } 10465 func rewriteValueARM64_OpAdd8_0(v *Value) bool { 10466 // match: (Add8 x y) 10467 // cond: 10468 // result: (ADD x y) 10469 for { 10470 _ = v.Args[1] 10471 x := v.Args[0] 10472 y := v.Args[1] 10473 v.reset(OpARM64ADD) 10474 v.AddArg(x) 10475 v.AddArg(y) 10476 return true 10477 } 10478 } 10479 func rewriteValueARM64_OpAddPtr_0(v *Value) bool { 10480 // match: (AddPtr x y) 10481 // cond: 10482 // result: (ADD x y) 10483 for { 10484 _ = v.Args[1] 10485 x := v.Args[0] 10486 y := v.Args[1] 10487 v.reset(OpARM64ADD) 10488 v.AddArg(x) 10489 v.AddArg(y) 10490 return true 10491 } 10492 } 10493 func rewriteValueARM64_OpAddr_0(v *Value) bool { 10494 // match: (Addr {sym} base) 10495 // cond: 10496 // result: (MOVDaddr {sym} base) 10497 for { 10498 sym := v.Aux 10499 base := v.Args[0] 10500 v.reset(OpARM64MOVDaddr) 10501 v.Aux = sym 10502 v.AddArg(base) 10503 return true 10504 } 10505 } 10506 func rewriteValueARM64_OpAnd16_0(v *Value) bool { 10507 // match: (And16 x y) 10508 // cond: 10509 // result: (AND x y) 10510 for { 10511 _ = v.Args[1] 10512 x := v.Args[0] 10513 y := v.Args[1] 10514 v.reset(OpARM64AND) 10515 v.AddArg(x) 10516 v.AddArg(y) 10517 return true 10518 } 10519 } 10520 func rewriteValueARM64_OpAnd32_0(v *Value) bool { 10521 // match: (And32 x y) 10522 // cond: 10523 // result: (AND x y) 10524 for { 10525 _ = v.Args[1] 10526 x := v.Args[0] 10527 y := v.Args[1] 10528 v.reset(OpARM64AND) 10529 v.AddArg(x) 10530 v.AddArg(y) 10531 return true 10532 } 10533 } 10534 func rewriteValueARM64_OpAnd64_0(v *Value) bool { 10535 // match: (And64 x y) 10536 // cond: 10537 // result: (AND x y) 10538 for { 10539 _ = v.Args[1] 10540 x := v.Args[0] 10541 y := v.Args[1] 10542 v.reset(OpARM64AND) 10543 v.AddArg(x) 10544 v.AddArg(y) 10545 return true 10546 } 10547 } 10548 func rewriteValueARM64_OpAnd8_0(v *Value) bool { 10549 // match: (And8 x y) 10550 // cond: 10551 // result: (AND x y) 10552 for { 10553 _ = v.Args[1] 10554 x := v.Args[0] 10555 y := v.Args[1] 10556 v.reset(OpARM64AND) 10557 v.AddArg(x) 10558 v.AddArg(y) 10559 return true 10560 } 10561 } 10562 func rewriteValueARM64_OpAndB_0(v *Value) bool { 10563 // match: (AndB x y) 10564 // cond: 10565 // result: (AND x y) 10566 for { 10567 _ = v.Args[1] 10568 x := v.Args[0] 10569 y := v.Args[1] 10570 v.reset(OpARM64AND) 10571 v.AddArg(x) 10572 v.AddArg(y) 10573 return true 10574 } 10575 } 10576 func rewriteValueARM64_OpAtomicAdd32_0(v *Value) bool { 10577 // match: (AtomicAdd32 ptr val mem) 10578 // cond: 10579 // result: (LoweredAtomicAdd32 ptr val mem) 10580 for { 10581 _ = v.Args[2] 10582 ptr := v.Args[0] 10583 val := v.Args[1] 10584 mem := v.Args[2] 10585 v.reset(OpARM64LoweredAtomicAdd32) 10586 v.AddArg(ptr) 10587 v.AddArg(val) 10588 v.AddArg(mem) 10589 return true 10590 } 10591 } 10592 func rewriteValueARM64_OpAtomicAdd64_0(v *Value) bool { 10593 // match: (AtomicAdd64 ptr val mem) 10594 // cond: 10595 // result: (LoweredAtomicAdd64 ptr val mem) 10596 for { 10597 _ = v.Args[2] 10598 ptr := v.Args[0] 10599 val := v.Args[1] 10600 mem := v.Args[2] 10601 v.reset(OpARM64LoweredAtomicAdd64) 10602 v.AddArg(ptr) 10603 v.AddArg(val) 10604 v.AddArg(mem) 10605 return true 10606 } 10607 } 10608 func rewriteValueARM64_OpAtomicAnd8_0(v *Value) bool { 10609 // match: (AtomicAnd8 ptr val mem) 10610 // cond: 10611 // result: (LoweredAtomicAnd8 ptr val mem) 10612 for { 10613 _ = v.Args[2] 10614 ptr := v.Args[0] 10615 val := v.Args[1] 10616 mem := v.Args[2] 10617 v.reset(OpARM64LoweredAtomicAnd8) 10618 v.AddArg(ptr) 10619 v.AddArg(val) 10620 v.AddArg(mem) 10621 return true 10622 } 10623 } 10624 func rewriteValueARM64_OpAtomicCompareAndSwap32_0(v *Value) bool { 10625 // match: (AtomicCompareAndSwap32 ptr old new_ mem) 10626 // cond: 10627 // result: (LoweredAtomicCas32 ptr old new_ mem) 10628 for { 10629 _ = v.Args[3] 10630 ptr := v.Args[0] 10631 old := v.Args[1] 10632 new_ := v.Args[2] 10633 mem := v.Args[3] 10634 v.reset(OpARM64LoweredAtomicCas32) 10635 v.AddArg(ptr) 10636 v.AddArg(old) 10637 v.AddArg(new_) 10638 v.AddArg(mem) 10639 return true 10640 } 10641 } 10642 func rewriteValueARM64_OpAtomicCompareAndSwap64_0(v *Value) bool { 10643 // match: (AtomicCompareAndSwap64 ptr old new_ mem) 10644 // cond: 10645 // result: (LoweredAtomicCas64 ptr old new_ mem) 10646 for { 10647 _ = v.Args[3] 10648 ptr := v.Args[0] 10649 old := v.Args[1] 10650 new_ := v.Args[2] 10651 mem := v.Args[3] 10652 v.reset(OpARM64LoweredAtomicCas64) 10653 v.AddArg(ptr) 10654 v.AddArg(old) 10655 v.AddArg(new_) 10656 v.AddArg(mem) 10657 return true 10658 } 10659 } 10660 func rewriteValueARM64_OpAtomicExchange32_0(v *Value) bool { 10661 // match: (AtomicExchange32 ptr val mem) 10662 // cond: 10663 // result: (LoweredAtomicExchange32 ptr val mem) 10664 for { 10665 _ = v.Args[2] 10666 ptr := v.Args[0] 10667 val := v.Args[1] 10668 mem := v.Args[2] 10669 v.reset(OpARM64LoweredAtomicExchange32) 10670 v.AddArg(ptr) 10671 v.AddArg(val) 10672 v.AddArg(mem) 10673 return true 10674 } 10675 } 10676 func rewriteValueARM64_OpAtomicExchange64_0(v *Value) bool { 10677 // match: (AtomicExchange64 ptr val mem) 10678 // cond: 10679 // result: (LoweredAtomicExchange64 ptr val mem) 10680 for { 10681 _ = v.Args[2] 10682 ptr := v.Args[0] 10683 val := v.Args[1] 10684 mem := v.Args[2] 10685 v.reset(OpARM64LoweredAtomicExchange64) 10686 v.AddArg(ptr) 10687 v.AddArg(val) 10688 v.AddArg(mem) 10689 return true 10690 } 10691 } 10692 func rewriteValueARM64_OpAtomicLoad32_0(v *Value) bool { 10693 // match: (AtomicLoad32 ptr mem) 10694 // cond: 10695 // result: (LDARW ptr mem) 10696 for { 10697 _ = v.Args[1] 10698 ptr := v.Args[0] 10699 mem := v.Args[1] 10700 v.reset(OpARM64LDARW) 10701 v.AddArg(ptr) 10702 v.AddArg(mem) 10703 return true 10704 } 10705 } 10706 func rewriteValueARM64_OpAtomicLoad64_0(v *Value) bool { 10707 // match: (AtomicLoad64 ptr mem) 10708 // cond: 10709 // result: (LDAR ptr mem) 10710 for { 10711 _ = v.Args[1] 10712 ptr := v.Args[0] 10713 mem := v.Args[1] 10714 v.reset(OpARM64LDAR) 10715 v.AddArg(ptr) 10716 v.AddArg(mem) 10717 return true 10718 } 10719 } 10720 func rewriteValueARM64_OpAtomicLoadPtr_0(v *Value) bool { 10721 // match: (AtomicLoadPtr ptr mem) 10722 // cond: 10723 // result: (LDAR ptr mem) 10724 for { 10725 _ = v.Args[1] 10726 ptr := v.Args[0] 10727 mem := v.Args[1] 10728 v.reset(OpARM64LDAR) 10729 v.AddArg(ptr) 10730 v.AddArg(mem) 10731 return true 10732 } 10733 } 10734 func rewriteValueARM64_OpAtomicOr8_0(v *Value) bool { 10735 // match: (AtomicOr8 ptr val mem) 10736 // cond: 10737 // result: (LoweredAtomicOr8 ptr val mem) 10738 for { 10739 _ = v.Args[2] 10740 ptr := v.Args[0] 10741 val := v.Args[1] 10742 mem := v.Args[2] 10743 v.reset(OpARM64LoweredAtomicOr8) 10744 v.AddArg(ptr) 10745 v.AddArg(val) 10746 v.AddArg(mem) 10747 return true 10748 } 10749 } 10750 func rewriteValueARM64_OpAtomicStore32_0(v *Value) bool { 10751 // match: (AtomicStore32 ptr val mem) 10752 // cond: 10753 // result: (STLRW ptr val mem) 10754 for { 10755 _ = v.Args[2] 10756 ptr := v.Args[0] 10757 val := v.Args[1] 10758 mem := v.Args[2] 10759 v.reset(OpARM64STLRW) 10760 v.AddArg(ptr) 10761 v.AddArg(val) 10762 v.AddArg(mem) 10763 return true 10764 } 10765 } 10766 func rewriteValueARM64_OpAtomicStore64_0(v *Value) bool { 10767 // match: (AtomicStore64 ptr val mem) 10768 // cond: 10769 // result: (STLR ptr val mem) 10770 for { 10771 _ = v.Args[2] 10772 ptr := v.Args[0] 10773 val := v.Args[1] 10774 mem := v.Args[2] 10775 v.reset(OpARM64STLR) 10776 v.AddArg(ptr) 10777 v.AddArg(val) 10778 v.AddArg(mem) 10779 return true 10780 } 10781 } 10782 func rewriteValueARM64_OpAtomicStorePtrNoWB_0(v *Value) bool { 10783 // match: (AtomicStorePtrNoWB ptr val mem) 10784 // cond: 10785 // result: (STLR ptr val mem) 10786 for { 10787 _ = v.Args[2] 10788 ptr := v.Args[0] 10789 val := v.Args[1] 10790 mem := v.Args[2] 10791 v.reset(OpARM64STLR) 10792 v.AddArg(ptr) 10793 v.AddArg(val) 10794 v.AddArg(mem) 10795 return true 10796 } 10797 } 10798 func rewriteValueARM64_OpAvg64u_0(v *Value) bool { 10799 b := v.Block 10800 _ = b 10801 // match: (Avg64u <t> x y) 10802 // cond: 10803 // result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y) 10804 for { 10805 t := v.Type 10806 _ = v.Args[1] 10807 x := v.Args[0] 10808 y := v.Args[1] 10809 v.reset(OpARM64ADD) 10810 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, t) 10811 v0.AuxInt = 1 10812 v1 := b.NewValue0(v.Pos, OpARM64SUB, t) 10813 v1.AddArg(x) 10814 v1.AddArg(y) 10815 v0.AddArg(v1) 10816 v.AddArg(v0) 10817 v.AddArg(y) 10818 return true 10819 } 10820 } 10821 func rewriteValueARM64_OpBitLen64_0(v *Value) bool { 10822 b := v.Block 10823 _ = b 10824 typ := &b.Func.Config.Types 10825 _ = typ 10826 // match: (BitLen64 x) 10827 // cond: 10828 // result: (SUB (MOVDconst [64]) (CLZ <typ.Int> x)) 10829 for { 10830 x := v.Args[0] 10831 v.reset(OpARM64SUB) 10832 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 10833 v0.AuxInt = 64 10834 v.AddArg(v0) 10835 v1 := b.NewValue0(v.Pos, OpARM64CLZ, typ.Int) 10836 v1.AddArg(x) 10837 v.AddArg(v1) 10838 return true 10839 } 10840 } 10841 func rewriteValueARM64_OpBitRev16_0(v *Value) bool { 10842 b := v.Block 10843 _ = b 10844 typ := &b.Func.Config.Types 10845 _ = typ 10846 // match: (BitRev16 x) 10847 // cond: 10848 // result: (SRLconst [48] (RBIT <typ.UInt64> x)) 10849 for { 10850 x := v.Args[0] 10851 v.reset(OpARM64SRLconst) 10852 v.AuxInt = 48 10853 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64) 10854 v0.AddArg(x) 10855 v.AddArg(v0) 10856 return true 10857 } 10858 } 10859 func rewriteValueARM64_OpBitRev32_0(v *Value) bool { 10860 // match: (BitRev32 x) 10861 // cond: 10862 // result: (RBITW x) 10863 for { 10864 x := v.Args[0] 10865 v.reset(OpARM64RBITW) 10866 v.AddArg(x) 10867 return true 10868 } 10869 } 10870 func rewriteValueARM64_OpBitRev64_0(v *Value) bool { 10871 // match: (BitRev64 x) 10872 // cond: 10873 // result: (RBIT x) 10874 for { 10875 x := v.Args[0] 10876 v.reset(OpARM64RBIT) 10877 v.AddArg(x) 10878 return true 10879 } 10880 } 10881 func rewriteValueARM64_OpBitRev8_0(v *Value) bool { 10882 b := v.Block 10883 _ = b 10884 typ := &b.Func.Config.Types 10885 _ = typ 10886 // match: (BitRev8 x) 10887 // cond: 10888 // result: (SRLconst [56] (RBIT <typ.UInt64> x)) 10889 for { 10890 x := v.Args[0] 10891 v.reset(OpARM64SRLconst) 10892 v.AuxInt = 56 10893 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64) 10894 v0.AddArg(x) 10895 v.AddArg(v0) 10896 return true 10897 } 10898 } 10899 func rewriteValueARM64_OpBswap32_0(v *Value) bool { 10900 // match: (Bswap32 x) 10901 // cond: 10902 // result: (REVW x) 10903 for { 10904 x := v.Args[0] 10905 v.reset(OpARM64REVW) 10906 v.AddArg(x) 10907 return true 10908 } 10909 } 10910 func rewriteValueARM64_OpBswap64_0(v *Value) bool { 10911 // match: (Bswap64 x) 10912 // cond: 10913 // result: (REV x) 10914 for { 10915 x := v.Args[0] 10916 v.reset(OpARM64REV) 10917 v.AddArg(x) 10918 return true 10919 } 10920 } 10921 func rewriteValueARM64_OpClosureCall_0(v *Value) bool { 10922 // match: (ClosureCall [argwid] entry closure mem) 10923 // cond: 10924 // result: (CALLclosure [argwid] entry closure mem) 10925 for { 10926 argwid := v.AuxInt 10927 _ = v.Args[2] 10928 entry := v.Args[0] 10929 closure := v.Args[1] 10930 mem := v.Args[2] 10931 v.reset(OpARM64CALLclosure) 10932 v.AuxInt = argwid 10933 v.AddArg(entry) 10934 v.AddArg(closure) 10935 v.AddArg(mem) 10936 return true 10937 } 10938 } 10939 func rewriteValueARM64_OpCom16_0(v *Value) bool { 10940 // match: (Com16 x) 10941 // cond: 10942 // result: (MVN x) 10943 for { 10944 x := v.Args[0] 10945 v.reset(OpARM64MVN) 10946 v.AddArg(x) 10947 return true 10948 } 10949 } 10950 func rewriteValueARM64_OpCom32_0(v *Value) bool { 10951 // match: (Com32 x) 10952 // cond: 10953 // result: (MVN x) 10954 for { 10955 x := v.Args[0] 10956 v.reset(OpARM64MVN) 10957 v.AddArg(x) 10958 return true 10959 } 10960 } 10961 func rewriteValueARM64_OpCom64_0(v *Value) bool { 10962 // match: (Com64 x) 10963 // cond: 10964 // result: (MVN x) 10965 for { 10966 x := v.Args[0] 10967 v.reset(OpARM64MVN) 10968 v.AddArg(x) 10969 return true 10970 } 10971 } 10972 func rewriteValueARM64_OpCom8_0(v *Value) bool { 10973 // match: (Com8 x) 10974 // cond: 10975 // result: (MVN x) 10976 for { 10977 x := v.Args[0] 10978 v.reset(OpARM64MVN) 10979 v.AddArg(x) 10980 return true 10981 } 10982 } 10983 func rewriteValueARM64_OpConst16_0(v *Value) bool { 10984 // match: (Const16 [val]) 10985 // cond: 10986 // result: (MOVDconst [val]) 10987 for { 10988 val := v.AuxInt 10989 v.reset(OpARM64MOVDconst) 10990 v.AuxInt = val 10991 return true 10992 } 10993 } 10994 func rewriteValueARM64_OpConst32_0(v *Value) bool { 10995 // match: (Const32 [val]) 10996 // cond: 10997 // result: (MOVDconst [val]) 10998 for { 10999 val := v.AuxInt 11000 v.reset(OpARM64MOVDconst) 11001 v.AuxInt = val 11002 return true 11003 } 11004 } 11005 func rewriteValueARM64_OpConst32F_0(v *Value) bool { 11006 // match: (Const32F [val]) 11007 // cond: 11008 // result: (FMOVSconst [val]) 11009 for { 11010 val := v.AuxInt 11011 v.reset(OpARM64FMOVSconst) 11012 v.AuxInt = val 11013 return true 11014 } 11015 } 11016 func rewriteValueARM64_OpConst64_0(v *Value) bool { 11017 // match: (Const64 [val]) 11018 // cond: 11019 // result: (MOVDconst [val]) 11020 for { 11021 val := v.AuxInt 11022 v.reset(OpARM64MOVDconst) 11023 v.AuxInt = val 11024 return true 11025 } 11026 } 11027 func rewriteValueARM64_OpConst64F_0(v *Value) bool { 11028 // match: (Const64F [val]) 11029 // cond: 11030 // result: (FMOVDconst [val]) 11031 for { 11032 val := v.AuxInt 11033 v.reset(OpARM64FMOVDconst) 11034 v.AuxInt = val 11035 return true 11036 } 11037 } 11038 func rewriteValueARM64_OpConst8_0(v *Value) bool { 11039 // match: (Const8 [val]) 11040 // cond: 11041 // result: (MOVDconst [val]) 11042 for { 11043 val := v.AuxInt 11044 v.reset(OpARM64MOVDconst) 11045 v.AuxInt = val 11046 return true 11047 } 11048 } 11049 func rewriteValueARM64_OpConstBool_0(v *Value) bool { 11050 // match: (ConstBool [b]) 11051 // cond: 11052 // result: (MOVDconst [b]) 11053 for { 11054 b := v.AuxInt 11055 v.reset(OpARM64MOVDconst) 11056 v.AuxInt = b 11057 return true 11058 } 11059 } 11060 func rewriteValueARM64_OpConstNil_0(v *Value) bool { 11061 // match: (ConstNil) 11062 // cond: 11063 // result: (MOVDconst [0]) 11064 for { 11065 v.reset(OpARM64MOVDconst) 11066 v.AuxInt = 0 11067 return true 11068 } 11069 } 11070 func rewriteValueARM64_OpConvert_0(v *Value) bool { 11071 // match: (Convert x mem) 11072 // cond: 11073 // result: (MOVDconvert x mem) 11074 for { 11075 _ = v.Args[1] 11076 x := v.Args[0] 11077 mem := v.Args[1] 11078 v.reset(OpARM64MOVDconvert) 11079 v.AddArg(x) 11080 v.AddArg(mem) 11081 return true 11082 } 11083 } 11084 func rewriteValueARM64_OpCtz32_0(v *Value) bool { 11085 b := v.Block 11086 _ = b 11087 // match: (Ctz32 <t> x) 11088 // cond: 11089 // result: (CLZW (RBITW <t> x)) 11090 for { 11091 t := v.Type 11092 x := v.Args[0] 11093 v.reset(OpARM64CLZW) 11094 v0 := b.NewValue0(v.Pos, OpARM64RBITW, t) 11095 v0.AddArg(x) 11096 v.AddArg(v0) 11097 return true 11098 } 11099 } 11100 func rewriteValueARM64_OpCtz64_0(v *Value) bool { 11101 b := v.Block 11102 _ = b 11103 // match: (Ctz64 <t> x) 11104 // cond: 11105 // result: (CLZ (RBIT <t> x)) 11106 for { 11107 t := v.Type 11108 x := v.Args[0] 11109 v.reset(OpARM64CLZ) 11110 v0 := b.NewValue0(v.Pos, OpARM64RBIT, t) 11111 v0.AddArg(x) 11112 v.AddArg(v0) 11113 return true 11114 } 11115 } 11116 func rewriteValueARM64_OpCvt32Fto32_0(v *Value) bool { 11117 // match: (Cvt32Fto32 x) 11118 // cond: 11119 // result: (FCVTZSSW x) 11120 for { 11121 x := v.Args[0] 11122 v.reset(OpARM64FCVTZSSW) 11123 v.AddArg(x) 11124 return true 11125 } 11126 } 11127 func rewriteValueARM64_OpCvt32Fto32U_0(v *Value) bool { 11128 // match: (Cvt32Fto32U x) 11129 // cond: 11130 // result: (FCVTZUSW x) 11131 for { 11132 x := v.Args[0] 11133 v.reset(OpARM64FCVTZUSW) 11134 v.AddArg(x) 11135 return true 11136 } 11137 } 11138 func rewriteValueARM64_OpCvt32Fto64_0(v *Value) bool { 11139 // match: (Cvt32Fto64 x) 11140 // cond: 11141 // result: (FCVTZSS x) 11142 for { 11143 x := v.Args[0] 11144 v.reset(OpARM64FCVTZSS) 11145 v.AddArg(x) 11146 return true 11147 } 11148 } 11149 func rewriteValueARM64_OpCvt32Fto64F_0(v *Value) bool { 11150 // match: (Cvt32Fto64F x) 11151 // cond: 11152 // result: (FCVTSD x) 11153 for { 11154 x := v.Args[0] 11155 v.reset(OpARM64FCVTSD) 11156 v.AddArg(x) 11157 return true 11158 } 11159 } 11160 func rewriteValueARM64_OpCvt32Fto64U_0(v *Value) bool { 11161 // match: (Cvt32Fto64U x) 11162 // cond: 11163 // result: (FCVTZUS x) 11164 for { 11165 x := v.Args[0] 11166 v.reset(OpARM64FCVTZUS) 11167 v.AddArg(x) 11168 return true 11169 } 11170 } 11171 func rewriteValueARM64_OpCvt32Uto32F_0(v *Value) bool { 11172 // match: (Cvt32Uto32F x) 11173 // cond: 11174 // result: (UCVTFWS x) 11175 for { 11176 x := v.Args[0] 11177 v.reset(OpARM64UCVTFWS) 11178 v.AddArg(x) 11179 return true 11180 } 11181 } 11182 func rewriteValueARM64_OpCvt32Uto64F_0(v *Value) bool { 11183 // match: (Cvt32Uto64F x) 11184 // cond: 11185 // result: (UCVTFWD x) 11186 for { 11187 x := v.Args[0] 11188 v.reset(OpARM64UCVTFWD) 11189 v.AddArg(x) 11190 return true 11191 } 11192 } 11193 func rewriteValueARM64_OpCvt32to32F_0(v *Value) bool { 11194 // match: (Cvt32to32F x) 11195 // cond: 11196 // result: (SCVTFWS x) 11197 for { 11198 x := v.Args[0] 11199 v.reset(OpARM64SCVTFWS) 11200 v.AddArg(x) 11201 return true 11202 } 11203 } 11204 func rewriteValueARM64_OpCvt32to64F_0(v *Value) bool { 11205 // match: (Cvt32to64F x) 11206 // cond: 11207 // result: (SCVTFWD x) 11208 for { 11209 x := v.Args[0] 11210 v.reset(OpARM64SCVTFWD) 11211 v.AddArg(x) 11212 return true 11213 } 11214 } 11215 func rewriteValueARM64_OpCvt64Fto32_0(v *Value) bool { 11216 // match: (Cvt64Fto32 x) 11217 // cond: 11218 // result: (FCVTZSDW x) 11219 for { 11220 x := v.Args[0] 11221 v.reset(OpARM64FCVTZSDW) 11222 v.AddArg(x) 11223 return true 11224 } 11225 } 11226 func rewriteValueARM64_OpCvt64Fto32F_0(v *Value) bool { 11227 // match: (Cvt64Fto32F x) 11228 // cond: 11229 // result: (FCVTDS x) 11230 for { 11231 x := v.Args[0] 11232 v.reset(OpARM64FCVTDS) 11233 v.AddArg(x) 11234 return true 11235 } 11236 } 11237 func rewriteValueARM64_OpCvt64Fto32U_0(v *Value) bool { 11238 // match: (Cvt64Fto32U x) 11239 // cond: 11240 // result: (FCVTZUDW x) 11241 for { 11242 x := v.Args[0] 11243 v.reset(OpARM64FCVTZUDW) 11244 v.AddArg(x) 11245 return true 11246 } 11247 } 11248 func rewriteValueARM64_OpCvt64Fto64_0(v *Value) bool { 11249 // match: (Cvt64Fto64 x) 11250 // cond: 11251 // result: (FCVTZSD x) 11252 for { 11253 x := v.Args[0] 11254 v.reset(OpARM64FCVTZSD) 11255 v.AddArg(x) 11256 return true 11257 } 11258 } 11259 func rewriteValueARM64_OpCvt64Fto64U_0(v *Value) bool { 11260 // match: (Cvt64Fto64U x) 11261 // cond: 11262 // result: (FCVTZUD x) 11263 for { 11264 x := v.Args[0] 11265 v.reset(OpARM64FCVTZUD) 11266 v.AddArg(x) 11267 return true 11268 } 11269 } 11270 func rewriteValueARM64_OpCvt64Uto32F_0(v *Value) bool { 11271 // match: (Cvt64Uto32F x) 11272 // cond: 11273 // result: (UCVTFS x) 11274 for { 11275 x := v.Args[0] 11276 v.reset(OpARM64UCVTFS) 11277 v.AddArg(x) 11278 return true 11279 } 11280 } 11281 func rewriteValueARM64_OpCvt64Uto64F_0(v *Value) bool { 11282 // match: (Cvt64Uto64F x) 11283 // cond: 11284 // result: (UCVTFD x) 11285 for { 11286 x := v.Args[0] 11287 v.reset(OpARM64UCVTFD) 11288 v.AddArg(x) 11289 return true 11290 } 11291 } 11292 func rewriteValueARM64_OpCvt64to32F_0(v *Value) bool { 11293 // match: (Cvt64to32F x) 11294 // cond: 11295 // result: (SCVTFS x) 11296 for { 11297 x := v.Args[0] 11298 v.reset(OpARM64SCVTFS) 11299 v.AddArg(x) 11300 return true 11301 } 11302 } 11303 func rewriteValueARM64_OpCvt64to64F_0(v *Value) bool { 11304 // match: (Cvt64to64F x) 11305 // cond: 11306 // result: (SCVTFD x) 11307 for { 11308 x := v.Args[0] 11309 v.reset(OpARM64SCVTFD) 11310 v.AddArg(x) 11311 return true 11312 } 11313 } 11314 func rewriteValueARM64_OpDiv16_0(v *Value) bool { 11315 b := v.Block 11316 _ = b 11317 typ := &b.Func.Config.Types 11318 _ = typ 11319 // match: (Div16 x y) 11320 // cond: 11321 // result: (DIVW (SignExt16to32 x) (SignExt16to32 y)) 11322 for { 11323 _ = v.Args[1] 11324 x := v.Args[0] 11325 y := v.Args[1] 11326 v.reset(OpARM64DIVW) 11327 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 11328 v0.AddArg(x) 11329 v.AddArg(v0) 11330 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 11331 v1.AddArg(y) 11332 v.AddArg(v1) 11333 return true 11334 } 11335 } 11336 func rewriteValueARM64_OpDiv16u_0(v *Value) bool { 11337 b := v.Block 11338 _ = b 11339 typ := &b.Func.Config.Types 11340 _ = typ 11341 // match: (Div16u x y) 11342 // cond: 11343 // result: (UDIVW (ZeroExt16to32 x) (ZeroExt16to32 y)) 11344 for { 11345 _ = v.Args[1] 11346 x := v.Args[0] 11347 y := v.Args[1] 11348 v.reset(OpARM64UDIVW) 11349 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 11350 v0.AddArg(x) 11351 v.AddArg(v0) 11352 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 11353 v1.AddArg(y) 11354 v.AddArg(v1) 11355 return true 11356 } 11357 } 11358 func rewriteValueARM64_OpDiv32_0(v *Value) bool { 11359 // match: (Div32 x y) 11360 // cond: 11361 // result: (DIVW x y) 11362 for { 11363 _ = v.Args[1] 11364 x := v.Args[0] 11365 y := v.Args[1] 11366 v.reset(OpARM64DIVW) 11367 v.AddArg(x) 11368 v.AddArg(y) 11369 return true 11370 } 11371 } 11372 func rewriteValueARM64_OpDiv32F_0(v *Value) bool { 11373 // match: (Div32F x y) 11374 // cond: 11375 // result: (FDIVS x y) 11376 for { 11377 _ = v.Args[1] 11378 x := v.Args[0] 11379 y := v.Args[1] 11380 v.reset(OpARM64FDIVS) 11381 v.AddArg(x) 11382 v.AddArg(y) 11383 return true 11384 } 11385 } 11386 func rewriteValueARM64_OpDiv32u_0(v *Value) bool { 11387 // match: (Div32u x y) 11388 // cond: 11389 // result: (UDIVW x y) 11390 for { 11391 _ = v.Args[1] 11392 x := v.Args[0] 11393 y := v.Args[1] 11394 v.reset(OpARM64UDIVW) 11395 v.AddArg(x) 11396 v.AddArg(y) 11397 return true 11398 } 11399 } 11400 func rewriteValueARM64_OpDiv64_0(v *Value) bool { 11401 // match: (Div64 x y) 11402 // cond: 11403 // result: (DIV x y) 11404 for { 11405 _ = v.Args[1] 11406 x := v.Args[0] 11407 y := v.Args[1] 11408 v.reset(OpARM64DIV) 11409 v.AddArg(x) 11410 v.AddArg(y) 11411 return true 11412 } 11413 } 11414 func rewriteValueARM64_OpDiv64F_0(v *Value) bool { 11415 // match: (Div64F x y) 11416 // cond: 11417 // result: (FDIVD x y) 11418 for { 11419 _ = v.Args[1] 11420 x := v.Args[0] 11421 y := v.Args[1] 11422 v.reset(OpARM64FDIVD) 11423 v.AddArg(x) 11424 v.AddArg(y) 11425 return true 11426 } 11427 } 11428 func rewriteValueARM64_OpDiv64u_0(v *Value) bool { 11429 // match: (Div64u x y) 11430 // cond: 11431 // result: (UDIV x y) 11432 for { 11433 _ = v.Args[1] 11434 x := v.Args[0] 11435 y := v.Args[1] 11436 v.reset(OpARM64UDIV) 11437 v.AddArg(x) 11438 v.AddArg(y) 11439 return true 11440 } 11441 } 11442 func rewriteValueARM64_OpDiv8_0(v *Value) bool { 11443 b := v.Block 11444 _ = b 11445 typ := &b.Func.Config.Types 11446 _ = typ 11447 // match: (Div8 x y) 11448 // cond: 11449 // result: (DIVW (SignExt8to32 x) (SignExt8to32 y)) 11450 for { 11451 _ = v.Args[1] 11452 x := v.Args[0] 11453 y := v.Args[1] 11454 v.reset(OpARM64DIVW) 11455 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 11456 v0.AddArg(x) 11457 v.AddArg(v0) 11458 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 11459 v1.AddArg(y) 11460 v.AddArg(v1) 11461 return true 11462 } 11463 } 11464 func rewriteValueARM64_OpDiv8u_0(v *Value) bool { 11465 b := v.Block 11466 _ = b 11467 typ := &b.Func.Config.Types 11468 _ = typ 11469 // match: (Div8u x y) 11470 // cond: 11471 // result: (UDIVW (ZeroExt8to32 x) (ZeroExt8to32 y)) 11472 for { 11473 _ = v.Args[1] 11474 x := v.Args[0] 11475 y := v.Args[1] 11476 v.reset(OpARM64UDIVW) 11477 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 11478 v0.AddArg(x) 11479 v.AddArg(v0) 11480 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 11481 v1.AddArg(y) 11482 v.AddArg(v1) 11483 return true 11484 } 11485 } 11486 func rewriteValueARM64_OpEq16_0(v *Value) bool { 11487 b := v.Block 11488 _ = b 11489 typ := &b.Func.Config.Types 11490 _ = typ 11491 // match: (Eq16 x y) 11492 // cond: 11493 // result: (Equal (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 11494 for { 11495 _ = v.Args[1] 11496 x := v.Args[0] 11497 y := v.Args[1] 11498 v.reset(OpARM64Equal) 11499 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11500 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 11501 v1.AddArg(x) 11502 v0.AddArg(v1) 11503 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 11504 v2.AddArg(y) 11505 v0.AddArg(v2) 11506 v.AddArg(v0) 11507 return true 11508 } 11509 } 11510 func rewriteValueARM64_OpEq32_0(v *Value) bool { 11511 b := v.Block 11512 _ = b 11513 // match: (Eq32 x y) 11514 // cond: 11515 // result: (Equal (CMPW x y)) 11516 for { 11517 _ = v.Args[1] 11518 x := v.Args[0] 11519 y := v.Args[1] 11520 v.reset(OpARM64Equal) 11521 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11522 v0.AddArg(x) 11523 v0.AddArg(y) 11524 v.AddArg(v0) 11525 return true 11526 } 11527 } 11528 func rewriteValueARM64_OpEq32F_0(v *Value) bool { 11529 b := v.Block 11530 _ = b 11531 // match: (Eq32F x y) 11532 // cond: 11533 // result: (Equal (FCMPS x y)) 11534 for { 11535 _ = v.Args[1] 11536 x := v.Args[0] 11537 y := v.Args[1] 11538 v.reset(OpARM64Equal) 11539 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 11540 v0.AddArg(x) 11541 v0.AddArg(y) 11542 v.AddArg(v0) 11543 return true 11544 } 11545 } 11546 func rewriteValueARM64_OpEq64_0(v *Value) bool { 11547 b := v.Block 11548 _ = b 11549 // match: (Eq64 x y) 11550 // cond: 11551 // result: (Equal (CMP x y)) 11552 for { 11553 _ = v.Args[1] 11554 x := v.Args[0] 11555 y := v.Args[1] 11556 v.reset(OpARM64Equal) 11557 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 11558 v0.AddArg(x) 11559 v0.AddArg(y) 11560 v.AddArg(v0) 11561 return true 11562 } 11563 } 11564 func rewriteValueARM64_OpEq64F_0(v *Value) bool { 11565 b := v.Block 11566 _ = b 11567 // match: (Eq64F x y) 11568 // cond: 11569 // result: (Equal (FCMPD x y)) 11570 for { 11571 _ = v.Args[1] 11572 x := v.Args[0] 11573 y := v.Args[1] 11574 v.reset(OpARM64Equal) 11575 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 11576 v0.AddArg(x) 11577 v0.AddArg(y) 11578 v.AddArg(v0) 11579 return true 11580 } 11581 } 11582 func rewriteValueARM64_OpEq8_0(v *Value) bool { 11583 b := v.Block 11584 _ = b 11585 typ := &b.Func.Config.Types 11586 _ = typ 11587 // match: (Eq8 x y) 11588 // cond: 11589 // result: (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 11590 for { 11591 _ = v.Args[1] 11592 x := v.Args[0] 11593 y := v.Args[1] 11594 v.reset(OpARM64Equal) 11595 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11596 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 11597 v1.AddArg(x) 11598 v0.AddArg(v1) 11599 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 11600 v2.AddArg(y) 11601 v0.AddArg(v2) 11602 v.AddArg(v0) 11603 return true 11604 } 11605 } 11606 func rewriteValueARM64_OpEqB_0(v *Value) bool { 11607 b := v.Block 11608 _ = b 11609 typ := &b.Func.Config.Types 11610 _ = typ 11611 // match: (EqB x y) 11612 // cond: 11613 // result: (XOR (MOVDconst [1]) (XOR <typ.Bool> x y)) 11614 for { 11615 _ = v.Args[1] 11616 x := v.Args[0] 11617 y := v.Args[1] 11618 v.reset(OpARM64XOR) 11619 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 11620 v0.AuxInt = 1 11621 v.AddArg(v0) 11622 v1 := b.NewValue0(v.Pos, OpARM64XOR, typ.Bool) 11623 v1.AddArg(x) 11624 v1.AddArg(y) 11625 v.AddArg(v1) 11626 return true 11627 } 11628 } 11629 func rewriteValueARM64_OpEqPtr_0(v *Value) bool { 11630 b := v.Block 11631 _ = b 11632 // match: (EqPtr x y) 11633 // cond: 11634 // result: (Equal (CMP x y)) 11635 for { 11636 _ = v.Args[1] 11637 x := v.Args[0] 11638 y := v.Args[1] 11639 v.reset(OpARM64Equal) 11640 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 11641 v0.AddArg(x) 11642 v0.AddArg(y) 11643 v.AddArg(v0) 11644 return true 11645 } 11646 } 11647 func rewriteValueARM64_OpGeq16_0(v *Value) bool { 11648 b := v.Block 11649 _ = b 11650 typ := &b.Func.Config.Types 11651 _ = typ 11652 // match: (Geq16 x y) 11653 // cond: 11654 // result: (GreaterEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) 11655 for { 11656 _ = v.Args[1] 11657 x := v.Args[0] 11658 y := v.Args[1] 11659 v.reset(OpARM64GreaterEqual) 11660 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11661 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 11662 v1.AddArg(x) 11663 v0.AddArg(v1) 11664 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 11665 v2.AddArg(y) 11666 v0.AddArg(v2) 11667 v.AddArg(v0) 11668 return true 11669 } 11670 } 11671 func rewriteValueARM64_OpGeq16U_0(v *Value) bool { 11672 b := v.Block 11673 _ = b 11674 typ := &b.Func.Config.Types 11675 _ = typ 11676 // match: (Geq16U x y) 11677 // cond: 11678 // result: (GreaterEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 11679 for { 11680 _ = v.Args[1] 11681 x := v.Args[0] 11682 y := v.Args[1] 11683 v.reset(OpARM64GreaterEqualU) 11684 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11685 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 11686 v1.AddArg(x) 11687 v0.AddArg(v1) 11688 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 11689 v2.AddArg(y) 11690 v0.AddArg(v2) 11691 v.AddArg(v0) 11692 return true 11693 } 11694 } 11695 func rewriteValueARM64_OpGeq32_0(v *Value) bool { 11696 b := v.Block 11697 _ = b 11698 // match: (Geq32 x y) 11699 // cond: 11700 // result: (GreaterEqual (CMPW x y)) 11701 for { 11702 _ = v.Args[1] 11703 x := v.Args[0] 11704 y := v.Args[1] 11705 v.reset(OpARM64GreaterEqual) 11706 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11707 v0.AddArg(x) 11708 v0.AddArg(y) 11709 v.AddArg(v0) 11710 return true 11711 } 11712 } 11713 func rewriteValueARM64_OpGeq32F_0(v *Value) bool { 11714 b := v.Block 11715 _ = b 11716 // match: (Geq32F x y) 11717 // cond: 11718 // result: (GreaterEqual (FCMPS x y)) 11719 for { 11720 _ = v.Args[1] 11721 x := v.Args[0] 11722 y := v.Args[1] 11723 v.reset(OpARM64GreaterEqual) 11724 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 11725 v0.AddArg(x) 11726 v0.AddArg(y) 11727 v.AddArg(v0) 11728 return true 11729 } 11730 } 11731 func rewriteValueARM64_OpGeq32U_0(v *Value) bool { 11732 b := v.Block 11733 _ = b 11734 // match: (Geq32U x y) 11735 // cond: 11736 // result: (GreaterEqualU (CMPW x y)) 11737 for { 11738 _ = v.Args[1] 11739 x := v.Args[0] 11740 y := v.Args[1] 11741 v.reset(OpARM64GreaterEqualU) 11742 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11743 v0.AddArg(x) 11744 v0.AddArg(y) 11745 v.AddArg(v0) 11746 return true 11747 } 11748 } 11749 func rewriteValueARM64_OpGeq64_0(v *Value) bool { 11750 b := v.Block 11751 _ = b 11752 // match: (Geq64 x y) 11753 // cond: 11754 // result: (GreaterEqual (CMP x y)) 11755 for { 11756 _ = v.Args[1] 11757 x := v.Args[0] 11758 y := v.Args[1] 11759 v.reset(OpARM64GreaterEqual) 11760 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 11761 v0.AddArg(x) 11762 v0.AddArg(y) 11763 v.AddArg(v0) 11764 return true 11765 } 11766 } 11767 func rewriteValueARM64_OpGeq64F_0(v *Value) bool { 11768 b := v.Block 11769 _ = b 11770 // match: (Geq64F x y) 11771 // cond: 11772 // result: (GreaterEqual (FCMPD x y)) 11773 for { 11774 _ = v.Args[1] 11775 x := v.Args[0] 11776 y := v.Args[1] 11777 v.reset(OpARM64GreaterEqual) 11778 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 11779 v0.AddArg(x) 11780 v0.AddArg(y) 11781 v.AddArg(v0) 11782 return true 11783 } 11784 } 11785 func rewriteValueARM64_OpGeq64U_0(v *Value) bool { 11786 b := v.Block 11787 _ = b 11788 // match: (Geq64U x y) 11789 // cond: 11790 // result: (GreaterEqualU (CMP x y)) 11791 for { 11792 _ = v.Args[1] 11793 x := v.Args[0] 11794 y := v.Args[1] 11795 v.reset(OpARM64GreaterEqualU) 11796 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 11797 v0.AddArg(x) 11798 v0.AddArg(y) 11799 v.AddArg(v0) 11800 return true 11801 } 11802 } 11803 func rewriteValueARM64_OpGeq8_0(v *Value) bool { 11804 b := v.Block 11805 _ = b 11806 typ := &b.Func.Config.Types 11807 _ = typ 11808 // match: (Geq8 x y) 11809 // cond: 11810 // result: (GreaterEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) 11811 for { 11812 _ = v.Args[1] 11813 x := v.Args[0] 11814 y := v.Args[1] 11815 v.reset(OpARM64GreaterEqual) 11816 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11817 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 11818 v1.AddArg(x) 11819 v0.AddArg(v1) 11820 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 11821 v2.AddArg(y) 11822 v0.AddArg(v2) 11823 v.AddArg(v0) 11824 return true 11825 } 11826 } 11827 func rewriteValueARM64_OpGeq8U_0(v *Value) bool { 11828 b := v.Block 11829 _ = b 11830 typ := &b.Func.Config.Types 11831 _ = typ 11832 // match: (Geq8U x y) 11833 // cond: 11834 // result: (GreaterEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 11835 for { 11836 _ = v.Args[1] 11837 x := v.Args[0] 11838 y := v.Args[1] 11839 v.reset(OpARM64GreaterEqualU) 11840 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11841 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 11842 v1.AddArg(x) 11843 v0.AddArg(v1) 11844 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 11845 v2.AddArg(y) 11846 v0.AddArg(v2) 11847 v.AddArg(v0) 11848 return true 11849 } 11850 } 11851 func rewriteValueARM64_OpGetCallerSP_0(v *Value) bool { 11852 // match: (GetCallerSP) 11853 // cond: 11854 // result: (LoweredGetCallerSP) 11855 for { 11856 v.reset(OpARM64LoweredGetCallerSP) 11857 return true 11858 } 11859 } 11860 func rewriteValueARM64_OpGetClosurePtr_0(v *Value) bool { 11861 // match: (GetClosurePtr) 11862 // cond: 11863 // result: (LoweredGetClosurePtr) 11864 for { 11865 v.reset(OpARM64LoweredGetClosurePtr) 11866 return true 11867 } 11868 } 11869 func rewriteValueARM64_OpGreater16_0(v *Value) bool { 11870 b := v.Block 11871 _ = b 11872 typ := &b.Func.Config.Types 11873 _ = typ 11874 // match: (Greater16 x y) 11875 // cond: 11876 // result: (GreaterThan (CMPW (SignExt16to32 x) (SignExt16to32 y))) 11877 for { 11878 _ = v.Args[1] 11879 x := v.Args[0] 11880 y := v.Args[1] 11881 v.reset(OpARM64GreaterThan) 11882 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11883 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 11884 v1.AddArg(x) 11885 v0.AddArg(v1) 11886 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 11887 v2.AddArg(y) 11888 v0.AddArg(v2) 11889 v.AddArg(v0) 11890 return true 11891 } 11892 } 11893 func rewriteValueARM64_OpGreater16U_0(v *Value) bool { 11894 b := v.Block 11895 _ = b 11896 typ := &b.Func.Config.Types 11897 _ = typ 11898 // match: (Greater16U x y) 11899 // cond: 11900 // result: (GreaterThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 11901 for { 11902 _ = v.Args[1] 11903 x := v.Args[0] 11904 y := v.Args[1] 11905 v.reset(OpARM64GreaterThanU) 11906 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11907 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 11908 v1.AddArg(x) 11909 v0.AddArg(v1) 11910 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 11911 v2.AddArg(y) 11912 v0.AddArg(v2) 11913 v.AddArg(v0) 11914 return true 11915 } 11916 } 11917 func rewriteValueARM64_OpGreater32_0(v *Value) bool { 11918 b := v.Block 11919 _ = b 11920 // match: (Greater32 x y) 11921 // cond: 11922 // result: (GreaterThan (CMPW x y)) 11923 for { 11924 _ = v.Args[1] 11925 x := v.Args[0] 11926 y := v.Args[1] 11927 v.reset(OpARM64GreaterThan) 11928 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11929 v0.AddArg(x) 11930 v0.AddArg(y) 11931 v.AddArg(v0) 11932 return true 11933 } 11934 } 11935 func rewriteValueARM64_OpGreater32F_0(v *Value) bool { 11936 b := v.Block 11937 _ = b 11938 // match: (Greater32F x y) 11939 // cond: 11940 // result: (GreaterThan (FCMPS x y)) 11941 for { 11942 _ = v.Args[1] 11943 x := v.Args[0] 11944 y := v.Args[1] 11945 v.reset(OpARM64GreaterThan) 11946 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 11947 v0.AddArg(x) 11948 v0.AddArg(y) 11949 v.AddArg(v0) 11950 return true 11951 } 11952 } 11953 func rewriteValueARM64_OpGreater32U_0(v *Value) bool { 11954 b := v.Block 11955 _ = b 11956 // match: (Greater32U x y) 11957 // cond: 11958 // result: (GreaterThanU (CMPW x y)) 11959 for { 11960 _ = v.Args[1] 11961 x := v.Args[0] 11962 y := v.Args[1] 11963 v.reset(OpARM64GreaterThanU) 11964 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11965 v0.AddArg(x) 11966 v0.AddArg(y) 11967 v.AddArg(v0) 11968 return true 11969 } 11970 } 11971 func rewriteValueARM64_OpGreater64_0(v *Value) bool { 11972 b := v.Block 11973 _ = b 11974 // match: (Greater64 x y) 11975 // cond: 11976 // result: (GreaterThan (CMP x y)) 11977 for { 11978 _ = v.Args[1] 11979 x := v.Args[0] 11980 y := v.Args[1] 11981 v.reset(OpARM64GreaterThan) 11982 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 11983 v0.AddArg(x) 11984 v0.AddArg(y) 11985 v.AddArg(v0) 11986 return true 11987 } 11988 } 11989 func rewriteValueARM64_OpGreater64F_0(v *Value) bool { 11990 b := v.Block 11991 _ = b 11992 // match: (Greater64F x y) 11993 // cond: 11994 // result: (GreaterThan (FCMPD x y)) 11995 for { 11996 _ = v.Args[1] 11997 x := v.Args[0] 11998 y := v.Args[1] 11999 v.reset(OpARM64GreaterThan) 12000 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 12001 v0.AddArg(x) 12002 v0.AddArg(y) 12003 v.AddArg(v0) 12004 return true 12005 } 12006 } 12007 func rewriteValueARM64_OpGreater64U_0(v *Value) bool { 12008 b := v.Block 12009 _ = b 12010 // match: (Greater64U x y) 12011 // cond: 12012 // result: (GreaterThanU (CMP x y)) 12013 for { 12014 _ = v.Args[1] 12015 x := v.Args[0] 12016 y := v.Args[1] 12017 v.reset(OpARM64GreaterThanU) 12018 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 12019 v0.AddArg(x) 12020 v0.AddArg(y) 12021 v.AddArg(v0) 12022 return true 12023 } 12024 } 12025 func rewriteValueARM64_OpGreater8_0(v *Value) bool { 12026 b := v.Block 12027 _ = b 12028 typ := &b.Func.Config.Types 12029 _ = typ 12030 // match: (Greater8 x y) 12031 // cond: 12032 // result: (GreaterThan (CMPW (SignExt8to32 x) (SignExt8to32 y))) 12033 for { 12034 _ = v.Args[1] 12035 x := v.Args[0] 12036 y := v.Args[1] 12037 v.reset(OpARM64GreaterThan) 12038 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12039 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 12040 v1.AddArg(x) 12041 v0.AddArg(v1) 12042 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 12043 v2.AddArg(y) 12044 v0.AddArg(v2) 12045 v.AddArg(v0) 12046 return true 12047 } 12048 } 12049 func rewriteValueARM64_OpGreater8U_0(v *Value) bool { 12050 b := v.Block 12051 _ = b 12052 typ := &b.Func.Config.Types 12053 _ = typ 12054 // match: (Greater8U x y) 12055 // cond: 12056 // result: (GreaterThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 12057 for { 12058 _ = v.Args[1] 12059 x := v.Args[0] 12060 y := v.Args[1] 12061 v.reset(OpARM64GreaterThanU) 12062 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12063 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 12064 v1.AddArg(x) 12065 v0.AddArg(v1) 12066 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 12067 v2.AddArg(y) 12068 v0.AddArg(v2) 12069 v.AddArg(v0) 12070 return true 12071 } 12072 } 12073 func rewriteValueARM64_OpHmul32_0(v *Value) bool { 12074 b := v.Block 12075 _ = b 12076 typ := &b.Func.Config.Types 12077 _ = typ 12078 // match: (Hmul32 x y) 12079 // cond: 12080 // result: (SRAconst (MULL <typ.Int64> x y) [32]) 12081 for { 12082 _ = v.Args[1] 12083 x := v.Args[0] 12084 y := v.Args[1] 12085 v.reset(OpARM64SRAconst) 12086 v.AuxInt = 32 12087 v0 := b.NewValue0(v.Pos, OpARM64MULL, typ.Int64) 12088 v0.AddArg(x) 12089 v0.AddArg(y) 12090 v.AddArg(v0) 12091 return true 12092 } 12093 } 12094 func rewriteValueARM64_OpHmul32u_0(v *Value) bool { 12095 b := v.Block 12096 _ = b 12097 typ := &b.Func.Config.Types 12098 _ = typ 12099 // match: (Hmul32u x y) 12100 // cond: 12101 // result: (SRAconst (UMULL <typ.UInt64> x y) [32]) 12102 for { 12103 _ = v.Args[1] 12104 x := v.Args[0] 12105 y := v.Args[1] 12106 v.reset(OpARM64SRAconst) 12107 v.AuxInt = 32 12108 v0 := b.NewValue0(v.Pos, OpARM64UMULL, typ.UInt64) 12109 v0.AddArg(x) 12110 v0.AddArg(y) 12111 v.AddArg(v0) 12112 return true 12113 } 12114 } 12115 func rewriteValueARM64_OpHmul64_0(v *Value) bool { 12116 // match: (Hmul64 x y) 12117 // cond: 12118 // result: (MULH x y) 12119 for { 12120 _ = v.Args[1] 12121 x := v.Args[0] 12122 y := v.Args[1] 12123 v.reset(OpARM64MULH) 12124 v.AddArg(x) 12125 v.AddArg(y) 12126 return true 12127 } 12128 } 12129 func rewriteValueARM64_OpHmul64u_0(v *Value) bool { 12130 // match: (Hmul64u x y) 12131 // cond: 12132 // result: (UMULH x y) 12133 for { 12134 _ = v.Args[1] 12135 x := v.Args[0] 12136 y := v.Args[1] 12137 v.reset(OpARM64UMULH) 12138 v.AddArg(x) 12139 v.AddArg(y) 12140 return true 12141 } 12142 } 12143 func rewriteValueARM64_OpInterCall_0(v *Value) bool { 12144 // match: (InterCall [argwid] entry mem) 12145 // cond: 12146 // result: (CALLinter [argwid] entry mem) 12147 for { 12148 argwid := v.AuxInt 12149 _ = v.Args[1] 12150 entry := v.Args[0] 12151 mem := v.Args[1] 12152 v.reset(OpARM64CALLinter) 12153 v.AuxInt = argwid 12154 v.AddArg(entry) 12155 v.AddArg(mem) 12156 return true 12157 } 12158 } 12159 func rewriteValueARM64_OpIsInBounds_0(v *Value) bool { 12160 b := v.Block 12161 _ = b 12162 // match: (IsInBounds idx len) 12163 // cond: 12164 // result: (LessThanU (CMP idx len)) 12165 for { 12166 _ = v.Args[1] 12167 idx := v.Args[0] 12168 len := v.Args[1] 12169 v.reset(OpARM64LessThanU) 12170 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 12171 v0.AddArg(idx) 12172 v0.AddArg(len) 12173 v.AddArg(v0) 12174 return true 12175 } 12176 } 12177 func rewriteValueARM64_OpIsNonNil_0(v *Value) bool { 12178 b := v.Block 12179 _ = b 12180 // match: (IsNonNil ptr) 12181 // cond: 12182 // result: (NotEqual (CMPconst [0] ptr)) 12183 for { 12184 ptr := v.Args[0] 12185 v.reset(OpARM64NotEqual) 12186 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 12187 v0.AuxInt = 0 12188 v0.AddArg(ptr) 12189 v.AddArg(v0) 12190 return true 12191 } 12192 } 12193 func rewriteValueARM64_OpIsSliceInBounds_0(v *Value) bool { 12194 b := v.Block 12195 _ = b 12196 // match: (IsSliceInBounds idx len) 12197 // cond: 12198 // result: (LessEqualU (CMP idx len)) 12199 for { 12200 _ = v.Args[1] 12201 idx := v.Args[0] 12202 len := v.Args[1] 12203 v.reset(OpARM64LessEqualU) 12204 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 12205 v0.AddArg(idx) 12206 v0.AddArg(len) 12207 v.AddArg(v0) 12208 return true 12209 } 12210 } 12211 func rewriteValueARM64_OpLeq16_0(v *Value) bool { 12212 b := v.Block 12213 _ = b 12214 typ := &b.Func.Config.Types 12215 _ = typ 12216 // match: (Leq16 x y) 12217 // cond: 12218 // result: (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) 12219 for { 12220 _ = v.Args[1] 12221 x := v.Args[0] 12222 y := v.Args[1] 12223 v.reset(OpARM64LessEqual) 12224 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12225 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 12226 v1.AddArg(x) 12227 v0.AddArg(v1) 12228 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 12229 v2.AddArg(y) 12230 v0.AddArg(v2) 12231 v.AddArg(v0) 12232 return true 12233 } 12234 } 12235 func rewriteValueARM64_OpLeq16U_0(v *Value) bool { 12236 b := v.Block 12237 _ = b 12238 typ := &b.Func.Config.Types 12239 _ = typ 12240 // match: (Leq16U x y) 12241 // cond: 12242 // result: (LessEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 12243 for { 12244 _ = v.Args[1] 12245 x := v.Args[0] 12246 y := v.Args[1] 12247 v.reset(OpARM64LessEqualU) 12248 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12249 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 12250 v1.AddArg(x) 12251 v0.AddArg(v1) 12252 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 12253 v2.AddArg(y) 12254 v0.AddArg(v2) 12255 v.AddArg(v0) 12256 return true 12257 } 12258 } 12259 func rewriteValueARM64_OpLeq32_0(v *Value) bool { 12260 b := v.Block 12261 _ = b 12262 // match: (Leq32 x y) 12263 // cond: 12264 // result: (LessEqual (CMPW x y)) 12265 for { 12266 _ = v.Args[1] 12267 x := v.Args[0] 12268 y := v.Args[1] 12269 v.reset(OpARM64LessEqual) 12270 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12271 v0.AddArg(x) 12272 v0.AddArg(y) 12273 v.AddArg(v0) 12274 return true 12275 } 12276 } 12277 func rewriteValueARM64_OpLeq32F_0(v *Value) bool { 12278 b := v.Block 12279 _ = b 12280 // match: (Leq32F x y) 12281 // cond: 12282 // result: (GreaterEqual (FCMPS y x)) 12283 for { 12284 _ = v.Args[1] 12285 x := v.Args[0] 12286 y := v.Args[1] 12287 v.reset(OpARM64GreaterEqual) 12288 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 12289 v0.AddArg(y) 12290 v0.AddArg(x) 12291 v.AddArg(v0) 12292 return true 12293 } 12294 } 12295 func rewriteValueARM64_OpLeq32U_0(v *Value) bool { 12296 b := v.Block 12297 _ = b 12298 // match: (Leq32U x y) 12299 // cond: 12300 // result: (LessEqualU (CMPW x y)) 12301 for { 12302 _ = v.Args[1] 12303 x := v.Args[0] 12304 y := v.Args[1] 12305 v.reset(OpARM64LessEqualU) 12306 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12307 v0.AddArg(x) 12308 v0.AddArg(y) 12309 v.AddArg(v0) 12310 return true 12311 } 12312 } 12313 func rewriteValueARM64_OpLeq64_0(v *Value) bool { 12314 b := v.Block 12315 _ = b 12316 // match: (Leq64 x y) 12317 // cond: 12318 // result: (LessEqual (CMP x y)) 12319 for { 12320 _ = v.Args[1] 12321 x := v.Args[0] 12322 y := v.Args[1] 12323 v.reset(OpARM64LessEqual) 12324 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 12325 v0.AddArg(x) 12326 v0.AddArg(y) 12327 v.AddArg(v0) 12328 return true 12329 } 12330 } 12331 func rewriteValueARM64_OpLeq64F_0(v *Value) bool { 12332 b := v.Block 12333 _ = b 12334 // match: (Leq64F x y) 12335 // cond: 12336 // result: (GreaterEqual (FCMPD y x)) 12337 for { 12338 _ = v.Args[1] 12339 x := v.Args[0] 12340 y := v.Args[1] 12341 v.reset(OpARM64GreaterEqual) 12342 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 12343 v0.AddArg(y) 12344 v0.AddArg(x) 12345 v.AddArg(v0) 12346 return true 12347 } 12348 } 12349 func rewriteValueARM64_OpLeq64U_0(v *Value) bool { 12350 b := v.Block 12351 _ = b 12352 // match: (Leq64U x y) 12353 // cond: 12354 // result: (LessEqualU (CMP x y)) 12355 for { 12356 _ = v.Args[1] 12357 x := v.Args[0] 12358 y := v.Args[1] 12359 v.reset(OpARM64LessEqualU) 12360 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 12361 v0.AddArg(x) 12362 v0.AddArg(y) 12363 v.AddArg(v0) 12364 return true 12365 } 12366 } 12367 func rewriteValueARM64_OpLeq8_0(v *Value) bool { 12368 b := v.Block 12369 _ = b 12370 typ := &b.Func.Config.Types 12371 _ = typ 12372 // match: (Leq8 x y) 12373 // cond: 12374 // result: (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) 12375 for { 12376 _ = v.Args[1] 12377 x := v.Args[0] 12378 y := v.Args[1] 12379 v.reset(OpARM64LessEqual) 12380 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12381 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 12382 v1.AddArg(x) 12383 v0.AddArg(v1) 12384 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 12385 v2.AddArg(y) 12386 v0.AddArg(v2) 12387 v.AddArg(v0) 12388 return true 12389 } 12390 } 12391 func rewriteValueARM64_OpLeq8U_0(v *Value) bool { 12392 b := v.Block 12393 _ = b 12394 typ := &b.Func.Config.Types 12395 _ = typ 12396 // match: (Leq8U x y) 12397 // cond: 12398 // result: (LessEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 12399 for { 12400 _ = v.Args[1] 12401 x := v.Args[0] 12402 y := v.Args[1] 12403 v.reset(OpARM64LessEqualU) 12404 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12405 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 12406 v1.AddArg(x) 12407 v0.AddArg(v1) 12408 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 12409 v2.AddArg(y) 12410 v0.AddArg(v2) 12411 v.AddArg(v0) 12412 return true 12413 } 12414 } 12415 func rewriteValueARM64_OpLess16_0(v *Value) bool { 12416 b := v.Block 12417 _ = b 12418 typ := &b.Func.Config.Types 12419 _ = typ 12420 // match: (Less16 x y) 12421 // cond: 12422 // result: (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y))) 12423 for { 12424 _ = v.Args[1] 12425 x := v.Args[0] 12426 y := v.Args[1] 12427 v.reset(OpARM64LessThan) 12428 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12429 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 12430 v1.AddArg(x) 12431 v0.AddArg(v1) 12432 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 12433 v2.AddArg(y) 12434 v0.AddArg(v2) 12435 v.AddArg(v0) 12436 return true 12437 } 12438 } 12439 func rewriteValueARM64_OpLess16U_0(v *Value) bool { 12440 b := v.Block 12441 _ = b 12442 typ := &b.Func.Config.Types 12443 _ = typ 12444 // match: (Less16U x y) 12445 // cond: 12446 // result: (LessThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 12447 for { 12448 _ = v.Args[1] 12449 x := v.Args[0] 12450 y := v.Args[1] 12451 v.reset(OpARM64LessThanU) 12452 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12453 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 12454 v1.AddArg(x) 12455 v0.AddArg(v1) 12456 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 12457 v2.AddArg(y) 12458 v0.AddArg(v2) 12459 v.AddArg(v0) 12460 return true 12461 } 12462 } 12463 func rewriteValueARM64_OpLess32_0(v *Value) bool { 12464 b := v.Block 12465 _ = b 12466 // match: (Less32 x y) 12467 // cond: 12468 // result: (LessThan (CMPW x y)) 12469 for { 12470 _ = v.Args[1] 12471 x := v.Args[0] 12472 y := v.Args[1] 12473 v.reset(OpARM64LessThan) 12474 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12475 v0.AddArg(x) 12476 v0.AddArg(y) 12477 v.AddArg(v0) 12478 return true 12479 } 12480 } 12481 func rewriteValueARM64_OpLess32F_0(v *Value) bool { 12482 b := v.Block 12483 _ = b 12484 // match: (Less32F x y) 12485 // cond: 12486 // result: (GreaterThan (FCMPS y x)) 12487 for { 12488 _ = v.Args[1] 12489 x := v.Args[0] 12490 y := v.Args[1] 12491 v.reset(OpARM64GreaterThan) 12492 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 12493 v0.AddArg(y) 12494 v0.AddArg(x) 12495 v.AddArg(v0) 12496 return true 12497 } 12498 } 12499 func rewriteValueARM64_OpLess32U_0(v *Value) bool { 12500 b := v.Block 12501 _ = b 12502 // match: (Less32U x y) 12503 // cond: 12504 // result: (LessThanU (CMPW x y)) 12505 for { 12506 _ = v.Args[1] 12507 x := v.Args[0] 12508 y := v.Args[1] 12509 v.reset(OpARM64LessThanU) 12510 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12511 v0.AddArg(x) 12512 v0.AddArg(y) 12513 v.AddArg(v0) 12514 return true 12515 } 12516 } 12517 func rewriteValueARM64_OpLess64_0(v *Value) bool { 12518 b := v.Block 12519 _ = b 12520 // match: (Less64 x y) 12521 // cond: 12522 // result: (LessThan (CMP x y)) 12523 for { 12524 _ = v.Args[1] 12525 x := v.Args[0] 12526 y := v.Args[1] 12527 v.reset(OpARM64LessThan) 12528 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 12529 v0.AddArg(x) 12530 v0.AddArg(y) 12531 v.AddArg(v0) 12532 return true 12533 } 12534 } 12535 func rewriteValueARM64_OpLess64F_0(v *Value) bool { 12536 b := v.Block 12537 _ = b 12538 // match: (Less64F x y) 12539 // cond: 12540 // result: (GreaterThan (FCMPD y x)) 12541 for { 12542 _ = v.Args[1] 12543 x := v.Args[0] 12544 y := v.Args[1] 12545 v.reset(OpARM64GreaterThan) 12546 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 12547 v0.AddArg(y) 12548 v0.AddArg(x) 12549 v.AddArg(v0) 12550 return true 12551 } 12552 } 12553 func rewriteValueARM64_OpLess64U_0(v *Value) bool { 12554 b := v.Block 12555 _ = b 12556 // match: (Less64U x y) 12557 // cond: 12558 // result: (LessThanU (CMP x y)) 12559 for { 12560 _ = v.Args[1] 12561 x := v.Args[0] 12562 y := v.Args[1] 12563 v.reset(OpARM64LessThanU) 12564 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 12565 v0.AddArg(x) 12566 v0.AddArg(y) 12567 v.AddArg(v0) 12568 return true 12569 } 12570 } 12571 func rewriteValueARM64_OpLess8_0(v *Value) bool { 12572 b := v.Block 12573 _ = b 12574 typ := &b.Func.Config.Types 12575 _ = typ 12576 // match: (Less8 x y) 12577 // cond: 12578 // result: (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y))) 12579 for { 12580 _ = v.Args[1] 12581 x := v.Args[0] 12582 y := v.Args[1] 12583 v.reset(OpARM64LessThan) 12584 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12585 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 12586 v1.AddArg(x) 12587 v0.AddArg(v1) 12588 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 12589 v2.AddArg(y) 12590 v0.AddArg(v2) 12591 v.AddArg(v0) 12592 return true 12593 } 12594 } 12595 func rewriteValueARM64_OpLess8U_0(v *Value) bool { 12596 b := v.Block 12597 _ = b 12598 typ := &b.Func.Config.Types 12599 _ = typ 12600 // match: (Less8U x y) 12601 // cond: 12602 // result: (LessThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 12603 for { 12604 _ = v.Args[1] 12605 x := v.Args[0] 12606 y := v.Args[1] 12607 v.reset(OpARM64LessThanU) 12608 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12609 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 12610 v1.AddArg(x) 12611 v0.AddArg(v1) 12612 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 12613 v2.AddArg(y) 12614 v0.AddArg(v2) 12615 v.AddArg(v0) 12616 return true 12617 } 12618 } 12619 func rewriteValueARM64_OpLoad_0(v *Value) bool { 12620 // match: (Load <t> ptr mem) 12621 // cond: t.IsBoolean() 12622 // result: (MOVBUload ptr mem) 12623 for { 12624 t := v.Type 12625 _ = v.Args[1] 12626 ptr := v.Args[0] 12627 mem := v.Args[1] 12628 if !(t.IsBoolean()) { 12629 break 12630 } 12631 v.reset(OpARM64MOVBUload) 12632 v.AddArg(ptr) 12633 v.AddArg(mem) 12634 return true 12635 } 12636 // match: (Load <t> ptr mem) 12637 // cond: (is8BitInt(t) && isSigned(t)) 12638 // result: (MOVBload ptr mem) 12639 for { 12640 t := v.Type 12641 _ = v.Args[1] 12642 ptr := v.Args[0] 12643 mem := v.Args[1] 12644 if !(is8BitInt(t) && isSigned(t)) { 12645 break 12646 } 12647 v.reset(OpARM64MOVBload) 12648 v.AddArg(ptr) 12649 v.AddArg(mem) 12650 return true 12651 } 12652 // match: (Load <t> ptr mem) 12653 // cond: (is8BitInt(t) && !isSigned(t)) 12654 // result: (MOVBUload ptr mem) 12655 for { 12656 t := v.Type 12657 _ = v.Args[1] 12658 ptr := v.Args[0] 12659 mem := v.Args[1] 12660 if !(is8BitInt(t) && !isSigned(t)) { 12661 break 12662 } 12663 v.reset(OpARM64MOVBUload) 12664 v.AddArg(ptr) 12665 v.AddArg(mem) 12666 return true 12667 } 12668 // match: (Load <t> ptr mem) 12669 // cond: (is16BitInt(t) && isSigned(t)) 12670 // result: (MOVHload ptr mem) 12671 for { 12672 t := v.Type 12673 _ = v.Args[1] 12674 ptr := v.Args[0] 12675 mem := v.Args[1] 12676 if !(is16BitInt(t) && isSigned(t)) { 12677 break 12678 } 12679 v.reset(OpARM64MOVHload) 12680 v.AddArg(ptr) 12681 v.AddArg(mem) 12682 return true 12683 } 12684 // match: (Load <t> ptr mem) 12685 // cond: (is16BitInt(t) && !isSigned(t)) 12686 // result: (MOVHUload ptr mem) 12687 for { 12688 t := v.Type 12689 _ = v.Args[1] 12690 ptr := v.Args[0] 12691 mem := v.Args[1] 12692 if !(is16BitInt(t) && !isSigned(t)) { 12693 break 12694 } 12695 v.reset(OpARM64MOVHUload) 12696 v.AddArg(ptr) 12697 v.AddArg(mem) 12698 return true 12699 } 12700 // match: (Load <t> ptr mem) 12701 // cond: (is32BitInt(t) && isSigned(t)) 12702 // result: (MOVWload ptr mem) 12703 for { 12704 t := v.Type 12705 _ = v.Args[1] 12706 ptr := v.Args[0] 12707 mem := v.Args[1] 12708 if !(is32BitInt(t) && isSigned(t)) { 12709 break 12710 } 12711 v.reset(OpARM64MOVWload) 12712 v.AddArg(ptr) 12713 v.AddArg(mem) 12714 return true 12715 } 12716 // match: (Load <t> ptr mem) 12717 // cond: (is32BitInt(t) && !isSigned(t)) 12718 // result: (MOVWUload ptr mem) 12719 for { 12720 t := v.Type 12721 _ = v.Args[1] 12722 ptr := v.Args[0] 12723 mem := v.Args[1] 12724 if !(is32BitInt(t) && !isSigned(t)) { 12725 break 12726 } 12727 v.reset(OpARM64MOVWUload) 12728 v.AddArg(ptr) 12729 v.AddArg(mem) 12730 return true 12731 } 12732 // match: (Load <t> ptr mem) 12733 // cond: (is64BitInt(t) || isPtr(t)) 12734 // result: (MOVDload ptr mem) 12735 for { 12736 t := v.Type 12737 _ = v.Args[1] 12738 ptr := v.Args[0] 12739 mem := v.Args[1] 12740 if !(is64BitInt(t) || isPtr(t)) { 12741 break 12742 } 12743 v.reset(OpARM64MOVDload) 12744 v.AddArg(ptr) 12745 v.AddArg(mem) 12746 return true 12747 } 12748 // match: (Load <t> ptr mem) 12749 // cond: is32BitFloat(t) 12750 // result: (FMOVSload ptr mem) 12751 for { 12752 t := v.Type 12753 _ = v.Args[1] 12754 ptr := v.Args[0] 12755 mem := v.Args[1] 12756 if !(is32BitFloat(t)) { 12757 break 12758 } 12759 v.reset(OpARM64FMOVSload) 12760 v.AddArg(ptr) 12761 v.AddArg(mem) 12762 return true 12763 } 12764 // match: (Load <t> ptr mem) 12765 // cond: is64BitFloat(t) 12766 // result: (FMOVDload ptr mem) 12767 for { 12768 t := v.Type 12769 _ = v.Args[1] 12770 ptr := v.Args[0] 12771 mem := v.Args[1] 12772 if !(is64BitFloat(t)) { 12773 break 12774 } 12775 v.reset(OpARM64FMOVDload) 12776 v.AddArg(ptr) 12777 v.AddArg(mem) 12778 return true 12779 } 12780 return false 12781 } 12782 func rewriteValueARM64_OpLsh16x16_0(v *Value) bool { 12783 b := v.Block 12784 _ = b 12785 typ := &b.Func.Config.Types 12786 _ = typ 12787 // match: (Lsh16x16 <t> x y) 12788 // cond: 12789 // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 12790 for { 12791 t := v.Type 12792 _ = v.Args[1] 12793 x := v.Args[0] 12794 y := v.Args[1] 12795 v.reset(OpARM64CSELULT) 12796 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12797 v0.AddArg(x) 12798 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 12799 v1.AddArg(y) 12800 v0.AddArg(v1) 12801 v.AddArg(v0) 12802 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 12803 v2.AuxInt = 0 12804 v.AddArg(v2) 12805 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 12806 v3.AuxInt = 64 12807 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 12808 v4.AddArg(y) 12809 v3.AddArg(v4) 12810 v.AddArg(v3) 12811 return true 12812 } 12813 } 12814 func rewriteValueARM64_OpLsh16x32_0(v *Value) bool { 12815 b := v.Block 12816 _ = b 12817 typ := &b.Func.Config.Types 12818 _ = typ 12819 // match: (Lsh16x32 <t> x y) 12820 // cond: 12821 // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 12822 for { 12823 t := v.Type 12824 _ = v.Args[1] 12825 x := v.Args[0] 12826 y := v.Args[1] 12827 v.reset(OpARM64CSELULT) 12828 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12829 v0.AddArg(x) 12830 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 12831 v1.AddArg(y) 12832 v0.AddArg(v1) 12833 v.AddArg(v0) 12834 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 12835 v2.AuxInt = 0 12836 v.AddArg(v2) 12837 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 12838 v3.AuxInt = 64 12839 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 12840 v4.AddArg(y) 12841 v3.AddArg(v4) 12842 v.AddArg(v3) 12843 return true 12844 } 12845 } 12846 func rewriteValueARM64_OpLsh16x64_0(v *Value) bool { 12847 b := v.Block 12848 _ = b 12849 // match: (Lsh16x64 <t> x y) 12850 // cond: 12851 // result: (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y)) 12852 for { 12853 t := v.Type 12854 _ = v.Args[1] 12855 x := v.Args[0] 12856 y := v.Args[1] 12857 v.reset(OpARM64CSELULT) 12858 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12859 v0.AddArg(x) 12860 v0.AddArg(y) 12861 v.AddArg(v0) 12862 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 12863 v1.AuxInt = 0 12864 v.AddArg(v1) 12865 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 12866 v2.AuxInt = 64 12867 v2.AddArg(y) 12868 v.AddArg(v2) 12869 return true 12870 } 12871 } 12872 func rewriteValueARM64_OpLsh16x8_0(v *Value) bool { 12873 b := v.Block 12874 _ = b 12875 typ := &b.Func.Config.Types 12876 _ = typ 12877 // match: (Lsh16x8 <t> x y) 12878 // cond: 12879 // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 12880 for { 12881 t := v.Type 12882 _ = v.Args[1] 12883 x := v.Args[0] 12884 y := v.Args[1] 12885 v.reset(OpARM64CSELULT) 12886 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12887 v0.AddArg(x) 12888 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 12889 v1.AddArg(y) 12890 v0.AddArg(v1) 12891 v.AddArg(v0) 12892 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 12893 v2.AuxInt = 0 12894 v.AddArg(v2) 12895 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 12896 v3.AuxInt = 64 12897 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 12898 v4.AddArg(y) 12899 v3.AddArg(v4) 12900 v.AddArg(v3) 12901 return true 12902 } 12903 } 12904 func rewriteValueARM64_OpLsh32x16_0(v *Value) bool { 12905 b := v.Block 12906 _ = b 12907 typ := &b.Func.Config.Types 12908 _ = typ 12909 // match: (Lsh32x16 <t> x y) 12910 // cond: 12911 // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 12912 for { 12913 t := v.Type 12914 _ = v.Args[1] 12915 x := v.Args[0] 12916 y := v.Args[1] 12917 v.reset(OpARM64CSELULT) 12918 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12919 v0.AddArg(x) 12920 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 12921 v1.AddArg(y) 12922 v0.AddArg(v1) 12923 v.AddArg(v0) 12924 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 12925 v2.AuxInt = 0 12926 v.AddArg(v2) 12927 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 12928 v3.AuxInt = 64 12929 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 12930 v4.AddArg(y) 12931 v3.AddArg(v4) 12932 v.AddArg(v3) 12933 return true 12934 } 12935 } 12936 func rewriteValueARM64_OpLsh32x32_0(v *Value) bool { 12937 b := v.Block 12938 _ = b 12939 typ := &b.Func.Config.Types 12940 _ = typ 12941 // match: (Lsh32x32 <t> x y) 12942 // cond: 12943 // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 12944 for { 12945 t := v.Type 12946 _ = v.Args[1] 12947 x := v.Args[0] 12948 y := v.Args[1] 12949 v.reset(OpARM64CSELULT) 12950 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12951 v0.AddArg(x) 12952 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 12953 v1.AddArg(y) 12954 v0.AddArg(v1) 12955 v.AddArg(v0) 12956 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 12957 v2.AuxInt = 0 12958 v.AddArg(v2) 12959 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 12960 v3.AuxInt = 64 12961 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 12962 v4.AddArg(y) 12963 v3.AddArg(v4) 12964 v.AddArg(v3) 12965 return true 12966 } 12967 } 12968 func rewriteValueARM64_OpLsh32x64_0(v *Value) bool { 12969 b := v.Block 12970 _ = b 12971 // match: (Lsh32x64 <t> x y) 12972 // cond: 12973 // result: (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y)) 12974 for { 12975 t := v.Type 12976 _ = v.Args[1] 12977 x := v.Args[0] 12978 y := v.Args[1] 12979 v.reset(OpARM64CSELULT) 12980 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12981 v0.AddArg(x) 12982 v0.AddArg(y) 12983 v.AddArg(v0) 12984 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 12985 v1.AuxInt = 0 12986 v.AddArg(v1) 12987 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 12988 v2.AuxInt = 64 12989 v2.AddArg(y) 12990 v.AddArg(v2) 12991 return true 12992 } 12993 } 12994 func rewriteValueARM64_OpLsh32x8_0(v *Value) bool { 12995 b := v.Block 12996 _ = b 12997 typ := &b.Func.Config.Types 12998 _ = typ 12999 // match: (Lsh32x8 <t> x y) 13000 // cond: 13001 // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 13002 for { 13003 t := v.Type 13004 _ = v.Args[1] 13005 x := v.Args[0] 13006 y := v.Args[1] 13007 v.reset(OpARM64CSELULT) 13008 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 13009 v0.AddArg(x) 13010 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 13011 v1.AddArg(y) 13012 v0.AddArg(v1) 13013 v.AddArg(v0) 13014 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13015 v2.AuxInt = 0 13016 v.AddArg(v2) 13017 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13018 v3.AuxInt = 64 13019 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 13020 v4.AddArg(y) 13021 v3.AddArg(v4) 13022 v.AddArg(v3) 13023 return true 13024 } 13025 } 13026 func rewriteValueARM64_OpLsh64x16_0(v *Value) bool { 13027 b := v.Block 13028 _ = b 13029 typ := &b.Func.Config.Types 13030 _ = typ 13031 // match: (Lsh64x16 <t> x y) 13032 // cond: 13033 // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 13034 for { 13035 t := v.Type 13036 _ = v.Args[1] 13037 x := v.Args[0] 13038 y := v.Args[1] 13039 v.reset(OpARM64CSELULT) 13040 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 13041 v0.AddArg(x) 13042 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 13043 v1.AddArg(y) 13044 v0.AddArg(v1) 13045 v.AddArg(v0) 13046 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13047 v2.AuxInt = 0 13048 v.AddArg(v2) 13049 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13050 v3.AuxInt = 64 13051 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 13052 v4.AddArg(y) 13053 v3.AddArg(v4) 13054 v.AddArg(v3) 13055 return true 13056 } 13057 } 13058 func rewriteValueARM64_OpLsh64x32_0(v *Value) bool { 13059 b := v.Block 13060 _ = b 13061 typ := &b.Func.Config.Types 13062 _ = typ 13063 // match: (Lsh64x32 <t> x y) 13064 // cond: 13065 // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 13066 for { 13067 t := v.Type 13068 _ = v.Args[1] 13069 x := v.Args[0] 13070 y := v.Args[1] 13071 v.reset(OpARM64CSELULT) 13072 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 13073 v0.AddArg(x) 13074 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 13075 v1.AddArg(y) 13076 v0.AddArg(v1) 13077 v.AddArg(v0) 13078 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13079 v2.AuxInt = 0 13080 v.AddArg(v2) 13081 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13082 v3.AuxInt = 64 13083 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 13084 v4.AddArg(y) 13085 v3.AddArg(v4) 13086 v.AddArg(v3) 13087 return true 13088 } 13089 } 13090 func rewriteValueARM64_OpLsh64x64_0(v *Value) bool { 13091 b := v.Block 13092 _ = b 13093 // match: (Lsh64x64 <t> x y) 13094 // cond: 13095 // result: (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y)) 13096 for { 13097 t := v.Type 13098 _ = v.Args[1] 13099 x := v.Args[0] 13100 y := v.Args[1] 13101 v.reset(OpARM64CSELULT) 13102 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 13103 v0.AddArg(x) 13104 v0.AddArg(y) 13105 v.AddArg(v0) 13106 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13107 v1.AuxInt = 0 13108 v.AddArg(v1) 13109 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13110 v2.AuxInt = 64 13111 v2.AddArg(y) 13112 v.AddArg(v2) 13113 return true 13114 } 13115 } 13116 func rewriteValueARM64_OpLsh64x8_0(v *Value) bool { 13117 b := v.Block 13118 _ = b 13119 typ := &b.Func.Config.Types 13120 _ = typ 13121 // match: (Lsh64x8 <t> x y) 13122 // cond: 13123 // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 13124 for { 13125 t := v.Type 13126 _ = v.Args[1] 13127 x := v.Args[0] 13128 y := v.Args[1] 13129 v.reset(OpARM64CSELULT) 13130 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 13131 v0.AddArg(x) 13132 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 13133 v1.AddArg(y) 13134 v0.AddArg(v1) 13135 v.AddArg(v0) 13136 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13137 v2.AuxInt = 0 13138 v.AddArg(v2) 13139 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13140 v3.AuxInt = 64 13141 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 13142 v4.AddArg(y) 13143 v3.AddArg(v4) 13144 v.AddArg(v3) 13145 return true 13146 } 13147 } 13148 func rewriteValueARM64_OpLsh8x16_0(v *Value) bool { 13149 b := v.Block 13150 _ = b 13151 typ := &b.Func.Config.Types 13152 _ = typ 13153 // match: (Lsh8x16 <t> x y) 13154 // cond: 13155 // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 13156 for { 13157 t := v.Type 13158 _ = v.Args[1] 13159 x := v.Args[0] 13160 y := v.Args[1] 13161 v.reset(OpARM64CSELULT) 13162 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 13163 v0.AddArg(x) 13164 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 13165 v1.AddArg(y) 13166 v0.AddArg(v1) 13167 v.AddArg(v0) 13168 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13169 v2.AuxInt = 0 13170 v.AddArg(v2) 13171 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13172 v3.AuxInt = 64 13173 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 13174 v4.AddArg(y) 13175 v3.AddArg(v4) 13176 v.AddArg(v3) 13177 return true 13178 } 13179 } 13180 func rewriteValueARM64_OpLsh8x32_0(v *Value) bool { 13181 b := v.Block 13182 _ = b 13183 typ := &b.Func.Config.Types 13184 _ = typ 13185 // match: (Lsh8x32 <t> x y) 13186 // cond: 13187 // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 13188 for { 13189 t := v.Type 13190 _ = v.Args[1] 13191 x := v.Args[0] 13192 y := v.Args[1] 13193 v.reset(OpARM64CSELULT) 13194 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 13195 v0.AddArg(x) 13196 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 13197 v1.AddArg(y) 13198 v0.AddArg(v1) 13199 v.AddArg(v0) 13200 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13201 v2.AuxInt = 0 13202 v.AddArg(v2) 13203 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13204 v3.AuxInt = 64 13205 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 13206 v4.AddArg(y) 13207 v3.AddArg(v4) 13208 v.AddArg(v3) 13209 return true 13210 } 13211 } 13212 func rewriteValueARM64_OpLsh8x64_0(v *Value) bool { 13213 b := v.Block 13214 _ = b 13215 // match: (Lsh8x64 <t> x y) 13216 // cond: 13217 // result: (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y)) 13218 for { 13219 t := v.Type 13220 _ = v.Args[1] 13221 x := v.Args[0] 13222 y := v.Args[1] 13223 v.reset(OpARM64CSELULT) 13224 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 13225 v0.AddArg(x) 13226 v0.AddArg(y) 13227 v.AddArg(v0) 13228 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13229 v1.AuxInt = 0 13230 v.AddArg(v1) 13231 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13232 v2.AuxInt = 64 13233 v2.AddArg(y) 13234 v.AddArg(v2) 13235 return true 13236 } 13237 } 13238 func rewriteValueARM64_OpLsh8x8_0(v *Value) bool { 13239 b := v.Block 13240 _ = b 13241 typ := &b.Func.Config.Types 13242 _ = typ 13243 // match: (Lsh8x8 <t> x y) 13244 // cond: 13245 // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 13246 for { 13247 t := v.Type 13248 _ = v.Args[1] 13249 x := v.Args[0] 13250 y := v.Args[1] 13251 v.reset(OpARM64CSELULT) 13252 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 13253 v0.AddArg(x) 13254 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 13255 v1.AddArg(y) 13256 v0.AddArg(v1) 13257 v.AddArg(v0) 13258 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13259 v2.AuxInt = 0 13260 v.AddArg(v2) 13261 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13262 v3.AuxInt = 64 13263 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 13264 v4.AddArg(y) 13265 v3.AddArg(v4) 13266 v.AddArg(v3) 13267 return true 13268 } 13269 } 13270 func rewriteValueARM64_OpMod16_0(v *Value) bool { 13271 b := v.Block 13272 _ = b 13273 typ := &b.Func.Config.Types 13274 _ = typ 13275 // match: (Mod16 x y) 13276 // cond: 13277 // result: (MODW (SignExt16to32 x) (SignExt16to32 y)) 13278 for { 13279 _ = v.Args[1] 13280 x := v.Args[0] 13281 y := v.Args[1] 13282 v.reset(OpARM64MODW) 13283 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 13284 v0.AddArg(x) 13285 v.AddArg(v0) 13286 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 13287 v1.AddArg(y) 13288 v.AddArg(v1) 13289 return true 13290 } 13291 } 13292 func rewriteValueARM64_OpMod16u_0(v *Value) bool { 13293 b := v.Block 13294 _ = b 13295 typ := &b.Func.Config.Types 13296 _ = typ 13297 // match: (Mod16u x y) 13298 // cond: 13299 // result: (UMODW (ZeroExt16to32 x) (ZeroExt16to32 y)) 13300 for { 13301 _ = v.Args[1] 13302 x := v.Args[0] 13303 y := v.Args[1] 13304 v.reset(OpARM64UMODW) 13305 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 13306 v0.AddArg(x) 13307 v.AddArg(v0) 13308 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 13309 v1.AddArg(y) 13310 v.AddArg(v1) 13311 return true 13312 } 13313 } 13314 func rewriteValueARM64_OpMod32_0(v *Value) bool { 13315 // match: (Mod32 x y) 13316 // cond: 13317 // result: (MODW x y) 13318 for { 13319 _ = v.Args[1] 13320 x := v.Args[0] 13321 y := v.Args[1] 13322 v.reset(OpARM64MODW) 13323 v.AddArg(x) 13324 v.AddArg(y) 13325 return true 13326 } 13327 } 13328 func rewriteValueARM64_OpMod32u_0(v *Value) bool { 13329 // match: (Mod32u x y) 13330 // cond: 13331 // result: (UMODW x y) 13332 for { 13333 _ = v.Args[1] 13334 x := v.Args[0] 13335 y := v.Args[1] 13336 v.reset(OpARM64UMODW) 13337 v.AddArg(x) 13338 v.AddArg(y) 13339 return true 13340 } 13341 } 13342 func rewriteValueARM64_OpMod64_0(v *Value) bool { 13343 // match: (Mod64 x y) 13344 // cond: 13345 // result: (MOD x y) 13346 for { 13347 _ = v.Args[1] 13348 x := v.Args[0] 13349 y := v.Args[1] 13350 v.reset(OpARM64MOD) 13351 v.AddArg(x) 13352 v.AddArg(y) 13353 return true 13354 } 13355 } 13356 func rewriteValueARM64_OpMod64u_0(v *Value) bool { 13357 // match: (Mod64u x y) 13358 // cond: 13359 // result: (UMOD x y) 13360 for { 13361 _ = v.Args[1] 13362 x := v.Args[0] 13363 y := v.Args[1] 13364 v.reset(OpARM64UMOD) 13365 v.AddArg(x) 13366 v.AddArg(y) 13367 return true 13368 } 13369 } 13370 func rewriteValueARM64_OpMod8_0(v *Value) bool { 13371 b := v.Block 13372 _ = b 13373 typ := &b.Func.Config.Types 13374 _ = typ 13375 // match: (Mod8 x y) 13376 // cond: 13377 // result: (MODW (SignExt8to32 x) (SignExt8to32 y)) 13378 for { 13379 _ = v.Args[1] 13380 x := v.Args[0] 13381 y := v.Args[1] 13382 v.reset(OpARM64MODW) 13383 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 13384 v0.AddArg(x) 13385 v.AddArg(v0) 13386 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 13387 v1.AddArg(y) 13388 v.AddArg(v1) 13389 return true 13390 } 13391 } 13392 func rewriteValueARM64_OpMod8u_0(v *Value) bool { 13393 b := v.Block 13394 _ = b 13395 typ := &b.Func.Config.Types 13396 _ = typ 13397 // match: (Mod8u x y) 13398 // cond: 13399 // result: (UMODW (ZeroExt8to32 x) (ZeroExt8to32 y)) 13400 for { 13401 _ = v.Args[1] 13402 x := v.Args[0] 13403 y := v.Args[1] 13404 v.reset(OpARM64UMODW) 13405 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 13406 v0.AddArg(x) 13407 v.AddArg(v0) 13408 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 13409 v1.AddArg(y) 13410 v.AddArg(v1) 13411 return true 13412 } 13413 } 13414 func rewriteValueARM64_OpMove_0(v *Value) bool { 13415 b := v.Block 13416 _ = b 13417 typ := &b.Func.Config.Types 13418 _ = typ 13419 // match: (Move [0] _ _ mem) 13420 // cond: 13421 // result: mem 13422 for { 13423 if v.AuxInt != 0 { 13424 break 13425 } 13426 _ = v.Args[2] 13427 mem := v.Args[2] 13428 v.reset(OpCopy) 13429 v.Type = mem.Type 13430 v.AddArg(mem) 13431 return true 13432 } 13433 // match: (Move [1] dst src mem) 13434 // cond: 13435 // result: (MOVBstore dst (MOVBUload src mem) mem) 13436 for { 13437 if v.AuxInt != 1 { 13438 break 13439 } 13440 _ = v.Args[2] 13441 dst := v.Args[0] 13442 src := v.Args[1] 13443 mem := v.Args[2] 13444 v.reset(OpARM64MOVBstore) 13445 v.AddArg(dst) 13446 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 13447 v0.AddArg(src) 13448 v0.AddArg(mem) 13449 v.AddArg(v0) 13450 v.AddArg(mem) 13451 return true 13452 } 13453 // match: (Move [2] dst src mem) 13454 // cond: 13455 // result: (MOVHstore dst (MOVHUload src mem) mem) 13456 for { 13457 if v.AuxInt != 2 { 13458 break 13459 } 13460 _ = v.Args[2] 13461 dst := v.Args[0] 13462 src := v.Args[1] 13463 mem := v.Args[2] 13464 v.reset(OpARM64MOVHstore) 13465 v.AddArg(dst) 13466 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 13467 v0.AddArg(src) 13468 v0.AddArg(mem) 13469 v.AddArg(v0) 13470 v.AddArg(mem) 13471 return true 13472 } 13473 // match: (Move [4] dst src mem) 13474 // cond: 13475 // result: (MOVWstore dst (MOVWUload src mem) mem) 13476 for { 13477 if v.AuxInt != 4 { 13478 break 13479 } 13480 _ = v.Args[2] 13481 dst := v.Args[0] 13482 src := v.Args[1] 13483 mem := v.Args[2] 13484 v.reset(OpARM64MOVWstore) 13485 v.AddArg(dst) 13486 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 13487 v0.AddArg(src) 13488 v0.AddArg(mem) 13489 v.AddArg(v0) 13490 v.AddArg(mem) 13491 return true 13492 } 13493 // match: (Move [8] dst src mem) 13494 // cond: 13495 // result: (MOVDstore dst (MOVDload src mem) mem) 13496 for { 13497 if v.AuxInt != 8 { 13498 break 13499 } 13500 _ = v.Args[2] 13501 dst := v.Args[0] 13502 src := v.Args[1] 13503 mem := v.Args[2] 13504 v.reset(OpARM64MOVDstore) 13505 v.AddArg(dst) 13506 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 13507 v0.AddArg(src) 13508 v0.AddArg(mem) 13509 v.AddArg(v0) 13510 v.AddArg(mem) 13511 return true 13512 } 13513 // match: (Move [3] dst src mem) 13514 // cond: 13515 // result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem)) 13516 for { 13517 if v.AuxInt != 3 { 13518 break 13519 } 13520 _ = v.Args[2] 13521 dst := v.Args[0] 13522 src := v.Args[1] 13523 mem := v.Args[2] 13524 v.reset(OpARM64MOVBstore) 13525 v.AuxInt = 2 13526 v.AddArg(dst) 13527 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 13528 v0.AuxInt = 2 13529 v0.AddArg(src) 13530 v0.AddArg(mem) 13531 v.AddArg(v0) 13532 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 13533 v1.AddArg(dst) 13534 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 13535 v2.AddArg(src) 13536 v2.AddArg(mem) 13537 v1.AddArg(v2) 13538 v1.AddArg(mem) 13539 v.AddArg(v1) 13540 return true 13541 } 13542 // match: (Move [5] dst src mem) 13543 // cond: 13544 // result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 13545 for { 13546 if v.AuxInt != 5 { 13547 break 13548 } 13549 _ = v.Args[2] 13550 dst := v.Args[0] 13551 src := v.Args[1] 13552 mem := v.Args[2] 13553 v.reset(OpARM64MOVBstore) 13554 v.AuxInt = 4 13555 v.AddArg(dst) 13556 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 13557 v0.AuxInt = 4 13558 v0.AddArg(src) 13559 v0.AddArg(mem) 13560 v.AddArg(v0) 13561 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 13562 v1.AddArg(dst) 13563 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 13564 v2.AddArg(src) 13565 v2.AddArg(mem) 13566 v1.AddArg(v2) 13567 v1.AddArg(mem) 13568 v.AddArg(v1) 13569 return true 13570 } 13571 // match: (Move [6] dst src mem) 13572 // cond: 13573 // result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 13574 for { 13575 if v.AuxInt != 6 { 13576 break 13577 } 13578 _ = v.Args[2] 13579 dst := v.Args[0] 13580 src := v.Args[1] 13581 mem := v.Args[2] 13582 v.reset(OpARM64MOVHstore) 13583 v.AuxInt = 4 13584 v.AddArg(dst) 13585 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 13586 v0.AuxInt = 4 13587 v0.AddArg(src) 13588 v0.AddArg(mem) 13589 v.AddArg(v0) 13590 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 13591 v1.AddArg(dst) 13592 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 13593 v2.AddArg(src) 13594 v2.AddArg(mem) 13595 v1.AddArg(v2) 13596 v1.AddArg(mem) 13597 v.AddArg(v1) 13598 return true 13599 } 13600 // match: (Move [7] dst src mem) 13601 // cond: 13602 // result: (MOVBstore [6] dst (MOVBUload [6] src mem) (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))) 13603 for { 13604 if v.AuxInt != 7 { 13605 break 13606 } 13607 _ = v.Args[2] 13608 dst := v.Args[0] 13609 src := v.Args[1] 13610 mem := v.Args[2] 13611 v.reset(OpARM64MOVBstore) 13612 v.AuxInt = 6 13613 v.AddArg(dst) 13614 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 13615 v0.AuxInt = 6 13616 v0.AddArg(src) 13617 v0.AddArg(mem) 13618 v.AddArg(v0) 13619 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 13620 v1.AuxInt = 4 13621 v1.AddArg(dst) 13622 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 13623 v2.AuxInt = 4 13624 v2.AddArg(src) 13625 v2.AddArg(mem) 13626 v1.AddArg(v2) 13627 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 13628 v3.AddArg(dst) 13629 v4 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 13630 v4.AddArg(src) 13631 v4.AddArg(mem) 13632 v3.AddArg(v4) 13633 v3.AddArg(mem) 13634 v1.AddArg(v3) 13635 v.AddArg(v1) 13636 return true 13637 } 13638 // match: (Move [12] dst src mem) 13639 // cond: 13640 // result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 13641 for { 13642 if v.AuxInt != 12 { 13643 break 13644 } 13645 _ = v.Args[2] 13646 dst := v.Args[0] 13647 src := v.Args[1] 13648 mem := v.Args[2] 13649 v.reset(OpARM64MOVWstore) 13650 v.AuxInt = 8 13651 v.AddArg(dst) 13652 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 13653 v0.AuxInt = 8 13654 v0.AddArg(src) 13655 v0.AddArg(mem) 13656 v.AddArg(v0) 13657 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 13658 v1.AddArg(dst) 13659 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 13660 v2.AddArg(src) 13661 v2.AddArg(mem) 13662 v1.AddArg(v2) 13663 v1.AddArg(mem) 13664 v.AddArg(v1) 13665 return true 13666 } 13667 return false 13668 } 13669 func rewriteValueARM64_OpMove_10(v *Value) bool { 13670 b := v.Block 13671 _ = b 13672 config := b.Func.Config 13673 _ = config 13674 typ := &b.Func.Config.Types 13675 _ = typ 13676 // match: (Move [16] dst src mem) 13677 // cond: 13678 // result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 13679 for { 13680 if v.AuxInt != 16 { 13681 break 13682 } 13683 _ = v.Args[2] 13684 dst := v.Args[0] 13685 src := v.Args[1] 13686 mem := v.Args[2] 13687 v.reset(OpARM64MOVDstore) 13688 v.AuxInt = 8 13689 v.AddArg(dst) 13690 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 13691 v0.AuxInt = 8 13692 v0.AddArg(src) 13693 v0.AddArg(mem) 13694 v.AddArg(v0) 13695 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 13696 v1.AddArg(dst) 13697 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 13698 v2.AddArg(src) 13699 v2.AddArg(mem) 13700 v1.AddArg(v2) 13701 v1.AddArg(mem) 13702 v.AddArg(v1) 13703 return true 13704 } 13705 // match: (Move [24] dst src mem) 13706 // cond: 13707 // result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))) 13708 for { 13709 if v.AuxInt != 24 { 13710 break 13711 } 13712 _ = v.Args[2] 13713 dst := v.Args[0] 13714 src := v.Args[1] 13715 mem := v.Args[2] 13716 v.reset(OpARM64MOVDstore) 13717 v.AuxInt = 16 13718 v.AddArg(dst) 13719 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 13720 v0.AuxInt = 16 13721 v0.AddArg(src) 13722 v0.AddArg(mem) 13723 v.AddArg(v0) 13724 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 13725 v1.AuxInt = 8 13726 v1.AddArg(dst) 13727 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 13728 v2.AuxInt = 8 13729 v2.AddArg(src) 13730 v2.AddArg(mem) 13731 v1.AddArg(v2) 13732 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 13733 v3.AddArg(dst) 13734 v4 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 13735 v4.AddArg(src) 13736 v4.AddArg(mem) 13737 v3.AddArg(v4) 13738 v3.AddArg(mem) 13739 v1.AddArg(v3) 13740 v.AddArg(v1) 13741 return true 13742 } 13743 // match: (Move [s] dst src mem) 13744 // cond: s%8 != 0 && s > 8 13745 // result: (Move [s%8] (OffPtr <dst.Type> dst [s-s%8]) (OffPtr <src.Type> src [s-s%8]) (Move [s-s%8] dst src mem)) 13746 for { 13747 s := v.AuxInt 13748 _ = v.Args[2] 13749 dst := v.Args[0] 13750 src := v.Args[1] 13751 mem := v.Args[2] 13752 if !(s%8 != 0 && s > 8) { 13753 break 13754 } 13755 v.reset(OpMove) 13756 v.AuxInt = s % 8 13757 v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type) 13758 v0.AuxInt = s - s%8 13759 v0.AddArg(dst) 13760 v.AddArg(v0) 13761 v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type) 13762 v1.AuxInt = s - s%8 13763 v1.AddArg(src) 13764 v.AddArg(v1) 13765 v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem) 13766 v2.AuxInt = s - s%8 13767 v2.AddArg(dst) 13768 v2.AddArg(src) 13769 v2.AddArg(mem) 13770 v.AddArg(v2) 13771 return true 13772 } 13773 // match: (Move [s] dst src mem) 13774 // cond: s%8 == 0 && s > 24 && s <= 8*128 && !config.noDuffDevice 13775 // result: (DUFFCOPY [8 * (128 - int64(s/8))] dst src mem) 13776 for { 13777 s := v.AuxInt 13778 _ = v.Args[2] 13779 dst := v.Args[0] 13780 src := v.Args[1] 13781 mem := v.Args[2] 13782 if !(s%8 == 0 && s > 24 && s <= 8*128 && !config.noDuffDevice) { 13783 break 13784 } 13785 v.reset(OpARM64DUFFCOPY) 13786 v.AuxInt = 8 * (128 - int64(s/8)) 13787 v.AddArg(dst) 13788 v.AddArg(src) 13789 v.AddArg(mem) 13790 return true 13791 } 13792 // match: (Move [s] dst src mem) 13793 // cond: s > 24 && s%8 == 0 13794 // result: (LoweredMove dst src (ADDconst <src.Type> src [s-8]) mem) 13795 for { 13796 s := v.AuxInt 13797 _ = v.Args[2] 13798 dst := v.Args[0] 13799 src := v.Args[1] 13800 mem := v.Args[2] 13801 if !(s > 24 && s%8 == 0) { 13802 break 13803 } 13804 v.reset(OpARM64LoweredMove) 13805 v.AddArg(dst) 13806 v.AddArg(src) 13807 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, src.Type) 13808 v0.AuxInt = s - 8 13809 v0.AddArg(src) 13810 v.AddArg(v0) 13811 v.AddArg(mem) 13812 return true 13813 } 13814 return false 13815 } 13816 func rewriteValueARM64_OpMul16_0(v *Value) bool { 13817 // match: (Mul16 x y) 13818 // cond: 13819 // result: (MULW x y) 13820 for { 13821 _ = v.Args[1] 13822 x := v.Args[0] 13823 y := v.Args[1] 13824 v.reset(OpARM64MULW) 13825 v.AddArg(x) 13826 v.AddArg(y) 13827 return true 13828 } 13829 } 13830 func rewriteValueARM64_OpMul32_0(v *Value) bool { 13831 // match: (Mul32 x y) 13832 // cond: 13833 // result: (MULW x y) 13834 for { 13835 _ = v.Args[1] 13836 x := v.Args[0] 13837 y := v.Args[1] 13838 v.reset(OpARM64MULW) 13839 v.AddArg(x) 13840 v.AddArg(y) 13841 return true 13842 } 13843 } 13844 func rewriteValueARM64_OpMul32F_0(v *Value) bool { 13845 // match: (Mul32F x y) 13846 // cond: 13847 // result: (FMULS x y) 13848 for { 13849 _ = v.Args[1] 13850 x := v.Args[0] 13851 y := v.Args[1] 13852 v.reset(OpARM64FMULS) 13853 v.AddArg(x) 13854 v.AddArg(y) 13855 return true 13856 } 13857 } 13858 func rewriteValueARM64_OpMul64_0(v *Value) bool { 13859 // match: (Mul64 x y) 13860 // cond: 13861 // result: (MUL x y) 13862 for { 13863 _ = v.Args[1] 13864 x := v.Args[0] 13865 y := v.Args[1] 13866 v.reset(OpARM64MUL) 13867 v.AddArg(x) 13868 v.AddArg(y) 13869 return true 13870 } 13871 } 13872 func rewriteValueARM64_OpMul64F_0(v *Value) bool { 13873 // match: (Mul64F x y) 13874 // cond: 13875 // result: (FMULD x y) 13876 for { 13877 _ = v.Args[1] 13878 x := v.Args[0] 13879 y := v.Args[1] 13880 v.reset(OpARM64FMULD) 13881 v.AddArg(x) 13882 v.AddArg(y) 13883 return true 13884 } 13885 } 13886 func rewriteValueARM64_OpMul8_0(v *Value) bool { 13887 // match: (Mul8 x y) 13888 // cond: 13889 // result: (MULW x y) 13890 for { 13891 _ = v.Args[1] 13892 x := v.Args[0] 13893 y := v.Args[1] 13894 v.reset(OpARM64MULW) 13895 v.AddArg(x) 13896 v.AddArg(y) 13897 return true 13898 } 13899 } 13900 func rewriteValueARM64_OpNeg16_0(v *Value) bool { 13901 // match: (Neg16 x) 13902 // cond: 13903 // result: (NEG x) 13904 for { 13905 x := v.Args[0] 13906 v.reset(OpARM64NEG) 13907 v.AddArg(x) 13908 return true 13909 } 13910 } 13911 func rewriteValueARM64_OpNeg32_0(v *Value) bool { 13912 // match: (Neg32 x) 13913 // cond: 13914 // result: (NEG x) 13915 for { 13916 x := v.Args[0] 13917 v.reset(OpARM64NEG) 13918 v.AddArg(x) 13919 return true 13920 } 13921 } 13922 func rewriteValueARM64_OpNeg32F_0(v *Value) bool { 13923 // match: (Neg32F x) 13924 // cond: 13925 // result: (FNEGS x) 13926 for { 13927 x := v.Args[0] 13928 v.reset(OpARM64FNEGS) 13929 v.AddArg(x) 13930 return true 13931 } 13932 } 13933 func rewriteValueARM64_OpNeg64_0(v *Value) bool { 13934 // match: (Neg64 x) 13935 // cond: 13936 // result: (NEG x) 13937 for { 13938 x := v.Args[0] 13939 v.reset(OpARM64NEG) 13940 v.AddArg(x) 13941 return true 13942 } 13943 } 13944 func rewriteValueARM64_OpNeg64F_0(v *Value) bool { 13945 // match: (Neg64F x) 13946 // cond: 13947 // result: (FNEGD x) 13948 for { 13949 x := v.Args[0] 13950 v.reset(OpARM64FNEGD) 13951 v.AddArg(x) 13952 return true 13953 } 13954 } 13955 func rewriteValueARM64_OpNeg8_0(v *Value) bool { 13956 // match: (Neg8 x) 13957 // cond: 13958 // result: (NEG x) 13959 for { 13960 x := v.Args[0] 13961 v.reset(OpARM64NEG) 13962 v.AddArg(x) 13963 return true 13964 } 13965 } 13966 func rewriteValueARM64_OpNeq16_0(v *Value) bool { 13967 b := v.Block 13968 _ = b 13969 typ := &b.Func.Config.Types 13970 _ = typ 13971 // match: (Neq16 x y) 13972 // cond: 13973 // result: (NotEqual (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 13974 for { 13975 _ = v.Args[1] 13976 x := v.Args[0] 13977 y := v.Args[1] 13978 v.reset(OpARM64NotEqual) 13979 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 13980 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 13981 v1.AddArg(x) 13982 v0.AddArg(v1) 13983 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 13984 v2.AddArg(y) 13985 v0.AddArg(v2) 13986 v.AddArg(v0) 13987 return true 13988 } 13989 } 13990 func rewriteValueARM64_OpNeq32_0(v *Value) bool { 13991 b := v.Block 13992 _ = b 13993 // match: (Neq32 x y) 13994 // cond: 13995 // result: (NotEqual (CMPW x y)) 13996 for { 13997 _ = v.Args[1] 13998 x := v.Args[0] 13999 y := v.Args[1] 14000 v.reset(OpARM64NotEqual) 14001 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 14002 v0.AddArg(x) 14003 v0.AddArg(y) 14004 v.AddArg(v0) 14005 return true 14006 } 14007 } 14008 func rewriteValueARM64_OpNeq32F_0(v *Value) bool { 14009 b := v.Block 14010 _ = b 14011 // match: (Neq32F x y) 14012 // cond: 14013 // result: (NotEqual (FCMPS x y)) 14014 for { 14015 _ = v.Args[1] 14016 x := v.Args[0] 14017 y := v.Args[1] 14018 v.reset(OpARM64NotEqual) 14019 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 14020 v0.AddArg(x) 14021 v0.AddArg(y) 14022 v.AddArg(v0) 14023 return true 14024 } 14025 } 14026 func rewriteValueARM64_OpNeq64_0(v *Value) bool { 14027 b := v.Block 14028 _ = b 14029 // match: (Neq64 x y) 14030 // cond: 14031 // result: (NotEqual (CMP x y)) 14032 for { 14033 _ = v.Args[1] 14034 x := v.Args[0] 14035 y := v.Args[1] 14036 v.reset(OpARM64NotEqual) 14037 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 14038 v0.AddArg(x) 14039 v0.AddArg(y) 14040 v.AddArg(v0) 14041 return true 14042 } 14043 } 14044 func rewriteValueARM64_OpNeq64F_0(v *Value) bool { 14045 b := v.Block 14046 _ = b 14047 // match: (Neq64F x y) 14048 // cond: 14049 // result: (NotEqual (FCMPD x y)) 14050 for { 14051 _ = v.Args[1] 14052 x := v.Args[0] 14053 y := v.Args[1] 14054 v.reset(OpARM64NotEqual) 14055 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 14056 v0.AddArg(x) 14057 v0.AddArg(y) 14058 v.AddArg(v0) 14059 return true 14060 } 14061 } 14062 func rewriteValueARM64_OpNeq8_0(v *Value) bool { 14063 b := v.Block 14064 _ = b 14065 typ := &b.Func.Config.Types 14066 _ = typ 14067 // match: (Neq8 x y) 14068 // cond: 14069 // result: (NotEqual (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 14070 for { 14071 _ = v.Args[1] 14072 x := v.Args[0] 14073 y := v.Args[1] 14074 v.reset(OpARM64NotEqual) 14075 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 14076 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 14077 v1.AddArg(x) 14078 v0.AddArg(v1) 14079 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 14080 v2.AddArg(y) 14081 v0.AddArg(v2) 14082 v.AddArg(v0) 14083 return true 14084 } 14085 } 14086 func rewriteValueARM64_OpNeqB_0(v *Value) bool { 14087 // match: (NeqB x y) 14088 // cond: 14089 // result: (XOR x y) 14090 for { 14091 _ = v.Args[1] 14092 x := v.Args[0] 14093 y := v.Args[1] 14094 v.reset(OpARM64XOR) 14095 v.AddArg(x) 14096 v.AddArg(y) 14097 return true 14098 } 14099 } 14100 func rewriteValueARM64_OpNeqPtr_0(v *Value) bool { 14101 b := v.Block 14102 _ = b 14103 // match: (NeqPtr x y) 14104 // cond: 14105 // result: (NotEqual (CMP x y)) 14106 for { 14107 _ = v.Args[1] 14108 x := v.Args[0] 14109 y := v.Args[1] 14110 v.reset(OpARM64NotEqual) 14111 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 14112 v0.AddArg(x) 14113 v0.AddArg(y) 14114 v.AddArg(v0) 14115 return true 14116 } 14117 } 14118 func rewriteValueARM64_OpNilCheck_0(v *Value) bool { 14119 // match: (NilCheck ptr mem) 14120 // cond: 14121 // result: (LoweredNilCheck ptr mem) 14122 for { 14123 _ = v.Args[1] 14124 ptr := v.Args[0] 14125 mem := v.Args[1] 14126 v.reset(OpARM64LoweredNilCheck) 14127 v.AddArg(ptr) 14128 v.AddArg(mem) 14129 return true 14130 } 14131 } 14132 func rewriteValueARM64_OpNot_0(v *Value) bool { 14133 b := v.Block 14134 _ = b 14135 typ := &b.Func.Config.Types 14136 _ = typ 14137 // match: (Not x) 14138 // cond: 14139 // result: (XOR (MOVDconst [1]) x) 14140 for { 14141 x := v.Args[0] 14142 v.reset(OpARM64XOR) 14143 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 14144 v0.AuxInt = 1 14145 v.AddArg(v0) 14146 v.AddArg(x) 14147 return true 14148 } 14149 } 14150 func rewriteValueARM64_OpOffPtr_0(v *Value) bool { 14151 // match: (OffPtr [off] ptr:(SP)) 14152 // cond: 14153 // result: (MOVDaddr [off] ptr) 14154 for { 14155 off := v.AuxInt 14156 ptr := v.Args[0] 14157 if ptr.Op != OpSP { 14158 break 14159 } 14160 v.reset(OpARM64MOVDaddr) 14161 v.AuxInt = off 14162 v.AddArg(ptr) 14163 return true 14164 } 14165 // match: (OffPtr [off] ptr) 14166 // cond: 14167 // result: (ADDconst [off] ptr) 14168 for { 14169 off := v.AuxInt 14170 ptr := v.Args[0] 14171 v.reset(OpARM64ADDconst) 14172 v.AuxInt = off 14173 v.AddArg(ptr) 14174 return true 14175 } 14176 } 14177 func rewriteValueARM64_OpOr16_0(v *Value) bool { 14178 // match: (Or16 x y) 14179 // cond: 14180 // result: (OR x y) 14181 for { 14182 _ = v.Args[1] 14183 x := v.Args[0] 14184 y := v.Args[1] 14185 v.reset(OpARM64OR) 14186 v.AddArg(x) 14187 v.AddArg(y) 14188 return true 14189 } 14190 } 14191 func rewriteValueARM64_OpOr32_0(v *Value) bool { 14192 // match: (Or32 x y) 14193 // cond: 14194 // result: (OR x y) 14195 for { 14196 _ = v.Args[1] 14197 x := v.Args[0] 14198 y := v.Args[1] 14199 v.reset(OpARM64OR) 14200 v.AddArg(x) 14201 v.AddArg(y) 14202 return true 14203 } 14204 } 14205 func rewriteValueARM64_OpOr64_0(v *Value) bool { 14206 // match: (Or64 x y) 14207 // cond: 14208 // result: (OR x y) 14209 for { 14210 _ = v.Args[1] 14211 x := v.Args[0] 14212 y := v.Args[1] 14213 v.reset(OpARM64OR) 14214 v.AddArg(x) 14215 v.AddArg(y) 14216 return true 14217 } 14218 } 14219 func rewriteValueARM64_OpOr8_0(v *Value) bool { 14220 // match: (Or8 x y) 14221 // cond: 14222 // result: (OR x y) 14223 for { 14224 _ = v.Args[1] 14225 x := v.Args[0] 14226 y := v.Args[1] 14227 v.reset(OpARM64OR) 14228 v.AddArg(x) 14229 v.AddArg(y) 14230 return true 14231 } 14232 } 14233 func rewriteValueARM64_OpOrB_0(v *Value) bool { 14234 // match: (OrB x y) 14235 // cond: 14236 // result: (OR x y) 14237 for { 14238 _ = v.Args[1] 14239 x := v.Args[0] 14240 y := v.Args[1] 14241 v.reset(OpARM64OR) 14242 v.AddArg(x) 14243 v.AddArg(y) 14244 return true 14245 } 14246 } 14247 func rewriteValueARM64_OpRound32F_0(v *Value) bool { 14248 // match: (Round32F x) 14249 // cond: 14250 // result: x 14251 for { 14252 x := v.Args[0] 14253 v.reset(OpCopy) 14254 v.Type = x.Type 14255 v.AddArg(x) 14256 return true 14257 } 14258 } 14259 func rewriteValueARM64_OpRound64F_0(v *Value) bool { 14260 // match: (Round64F x) 14261 // cond: 14262 // result: x 14263 for { 14264 x := v.Args[0] 14265 v.reset(OpCopy) 14266 v.Type = x.Type 14267 v.AddArg(x) 14268 return true 14269 } 14270 } 14271 func rewriteValueARM64_OpRsh16Ux16_0(v *Value) bool { 14272 b := v.Block 14273 _ = b 14274 typ := &b.Func.Config.Types 14275 _ = typ 14276 // match: (Rsh16Ux16 <t> x y) 14277 // cond: 14278 // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 14279 for { 14280 t := v.Type 14281 _ = v.Args[1] 14282 x := v.Args[0] 14283 y := v.Args[1] 14284 v.reset(OpARM64CSELULT) 14285 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14286 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14287 v1.AddArg(x) 14288 v0.AddArg(v1) 14289 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14290 v2.AddArg(y) 14291 v0.AddArg(v2) 14292 v.AddArg(v0) 14293 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14294 v3.AuxInt = 0 14295 v.AddArg(v3) 14296 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14297 v4.AuxInt = 64 14298 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14299 v5.AddArg(y) 14300 v4.AddArg(v5) 14301 v.AddArg(v4) 14302 return true 14303 } 14304 } 14305 func rewriteValueARM64_OpRsh16Ux32_0(v *Value) bool { 14306 b := v.Block 14307 _ = b 14308 typ := &b.Func.Config.Types 14309 _ = typ 14310 // match: (Rsh16Ux32 <t> x y) 14311 // cond: 14312 // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 14313 for { 14314 t := v.Type 14315 _ = v.Args[1] 14316 x := v.Args[0] 14317 y := v.Args[1] 14318 v.reset(OpARM64CSELULT) 14319 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14320 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14321 v1.AddArg(x) 14322 v0.AddArg(v1) 14323 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14324 v2.AddArg(y) 14325 v0.AddArg(v2) 14326 v.AddArg(v0) 14327 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14328 v3.AuxInt = 0 14329 v.AddArg(v3) 14330 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14331 v4.AuxInt = 64 14332 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14333 v5.AddArg(y) 14334 v4.AddArg(v5) 14335 v.AddArg(v4) 14336 return true 14337 } 14338 } 14339 func rewriteValueARM64_OpRsh16Ux64_0(v *Value) bool { 14340 b := v.Block 14341 _ = b 14342 typ := &b.Func.Config.Types 14343 _ = typ 14344 // match: (Rsh16Ux64 <t> x y) 14345 // cond: 14346 // result: (CSELULT (SRL <t> (ZeroExt16to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y)) 14347 for { 14348 t := v.Type 14349 _ = v.Args[1] 14350 x := v.Args[0] 14351 y := v.Args[1] 14352 v.reset(OpARM64CSELULT) 14353 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14354 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14355 v1.AddArg(x) 14356 v0.AddArg(v1) 14357 v0.AddArg(y) 14358 v.AddArg(v0) 14359 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14360 v2.AuxInt = 0 14361 v.AddArg(v2) 14362 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14363 v3.AuxInt = 64 14364 v3.AddArg(y) 14365 v.AddArg(v3) 14366 return true 14367 } 14368 } 14369 func rewriteValueARM64_OpRsh16Ux8_0(v *Value) bool { 14370 b := v.Block 14371 _ = b 14372 typ := &b.Func.Config.Types 14373 _ = typ 14374 // match: (Rsh16Ux8 <t> x y) 14375 // cond: 14376 // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 14377 for { 14378 t := v.Type 14379 _ = v.Args[1] 14380 x := v.Args[0] 14381 y := v.Args[1] 14382 v.reset(OpARM64CSELULT) 14383 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14384 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14385 v1.AddArg(x) 14386 v0.AddArg(v1) 14387 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14388 v2.AddArg(y) 14389 v0.AddArg(v2) 14390 v.AddArg(v0) 14391 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14392 v3.AuxInt = 0 14393 v.AddArg(v3) 14394 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14395 v4.AuxInt = 64 14396 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14397 v5.AddArg(y) 14398 v4.AddArg(v5) 14399 v.AddArg(v4) 14400 return true 14401 } 14402 } 14403 func rewriteValueARM64_OpRsh16x16_0(v *Value) bool { 14404 b := v.Block 14405 _ = b 14406 typ := &b.Func.Config.Types 14407 _ = typ 14408 // match: (Rsh16x16 x y) 14409 // cond: 14410 // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 14411 for { 14412 _ = v.Args[1] 14413 x := v.Args[0] 14414 y := v.Args[1] 14415 v.reset(OpARM64SRA) 14416 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 14417 v0.AddArg(x) 14418 v.AddArg(v0) 14419 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14420 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14421 v2.AddArg(y) 14422 v1.AddArg(v2) 14423 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14424 v3.AuxInt = 63 14425 v1.AddArg(v3) 14426 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14427 v4.AuxInt = 64 14428 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14429 v5.AddArg(y) 14430 v4.AddArg(v5) 14431 v1.AddArg(v4) 14432 v.AddArg(v1) 14433 return true 14434 } 14435 } 14436 func rewriteValueARM64_OpRsh16x32_0(v *Value) bool { 14437 b := v.Block 14438 _ = b 14439 typ := &b.Func.Config.Types 14440 _ = typ 14441 // match: (Rsh16x32 x y) 14442 // cond: 14443 // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 14444 for { 14445 _ = v.Args[1] 14446 x := v.Args[0] 14447 y := v.Args[1] 14448 v.reset(OpARM64SRA) 14449 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 14450 v0.AddArg(x) 14451 v.AddArg(v0) 14452 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14453 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14454 v2.AddArg(y) 14455 v1.AddArg(v2) 14456 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14457 v3.AuxInt = 63 14458 v1.AddArg(v3) 14459 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14460 v4.AuxInt = 64 14461 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14462 v5.AddArg(y) 14463 v4.AddArg(v5) 14464 v1.AddArg(v4) 14465 v.AddArg(v1) 14466 return true 14467 } 14468 } 14469 func rewriteValueARM64_OpRsh16x64_0(v *Value) bool { 14470 b := v.Block 14471 _ = b 14472 typ := &b.Func.Config.Types 14473 _ = typ 14474 // match: (Rsh16x64 x y) 14475 // cond: 14476 // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y))) 14477 for { 14478 _ = v.Args[1] 14479 x := v.Args[0] 14480 y := v.Args[1] 14481 v.reset(OpARM64SRA) 14482 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 14483 v0.AddArg(x) 14484 v.AddArg(v0) 14485 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14486 v1.AddArg(y) 14487 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14488 v2.AuxInt = 63 14489 v1.AddArg(v2) 14490 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14491 v3.AuxInt = 64 14492 v3.AddArg(y) 14493 v1.AddArg(v3) 14494 v.AddArg(v1) 14495 return true 14496 } 14497 } 14498 func rewriteValueARM64_OpRsh16x8_0(v *Value) bool { 14499 b := v.Block 14500 _ = b 14501 typ := &b.Func.Config.Types 14502 _ = typ 14503 // match: (Rsh16x8 x y) 14504 // cond: 14505 // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 14506 for { 14507 _ = v.Args[1] 14508 x := v.Args[0] 14509 y := v.Args[1] 14510 v.reset(OpARM64SRA) 14511 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 14512 v0.AddArg(x) 14513 v.AddArg(v0) 14514 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14515 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14516 v2.AddArg(y) 14517 v1.AddArg(v2) 14518 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14519 v3.AuxInt = 63 14520 v1.AddArg(v3) 14521 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14522 v4.AuxInt = 64 14523 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14524 v5.AddArg(y) 14525 v4.AddArg(v5) 14526 v1.AddArg(v4) 14527 v.AddArg(v1) 14528 return true 14529 } 14530 } 14531 func rewriteValueARM64_OpRsh32Ux16_0(v *Value) bool { 14532 b := v.Block 14533 _ = b 14534 typ := &b.Func.Config.Types 14535 _ = typ 14536 // match: (Rsh32Ux16 <t> x y) 14537 // cond: 14538 // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 14539 for { 14540 t := v.Type 14541 _ = v.Args[1] 14542 x := v.Args[0] 14543 y := v.Args[1] 14544 v.reset(OpARM64CSELULT) 14545 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14546 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14547 v1.AddArg(x) 14548 v0.AddArg(v1) 14549 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14550 v2.AddArg(y) 14551 v0.AddArg(v2) 14552 v.AddArg(v0) 14553 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14554 v3.AuxInt = 0 14555 v.AddArg(v3) 14556 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14557 v4.AuxInt = 64 14558 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14559 v5.AddArg(y) 14560 v4.AddArg(v5) 14561 v.AddArg(v4) 14562 return true 14563 } 14564 } 14565 func rewriteValueARM64_OpRsh32Ux32_0(v *Value) bool { 14566 b := v.Block 14567 _ = b 14568 typ := &b.Func.Config.Types 14569 _ = typ 14570 // match: (Rsh32Ux32 <t> x y) 14571 // cond: 14572 // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 14573 for { 14574 t := v.Type 14575 _ = v.Args[1] 14576 x := v.Args[0] 14577 y := v.Args[1] 14578 v.reset(OpARM64CSELULT) 14579 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14580 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14581 v1.AddArg(x) 14582 v0.AddArg(v1) 14583 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14584 v2.AddArg(y) 14585 v0.AddArg(v2) 14586 v.AddArg(v0) 14587 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14588 v3.AuxInt = 0 14589 v.AddArg(v3) 14590 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14591 v4.AuxInt = 64 14592 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14593 v5.AddArg(y) 14594 v4.AddArg(v5) 14595 v.AddArg(v4) 14596 return true 14597 } 14598 } 14599 func rewriteValueARM64_OpRsh32Ux64_0(v *Value) bool { 14600 b := v.Block 14601 _ = b 14602 typ := &b.Func.Config.Types 14603 _ = typ 14604 // match: (Rsh32Ux64 <t> x y) 14605 // cond: 14606 // result: (CSELULT (SRL <t> (ZeroExt32to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y)) 14607 for { 14608 t := v.Type 14609 _ = v.Args[1] 14610 x := v.Args[0] 14611 y := v.Args[1] 14612 v.reset(OpARM64CSELULT) 14613 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14614 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14615 v1.AddArg(x) 14616 v0.AddArg(v1) 14617 v0.AddArg(y) 14618 v.AddArg(v0) 14619 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14620 v2.AuxInt = 0 14621 v.AddArg(v2) 14622 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14623 v3.AuxInt = 64 14624 v3.AddArg(y) 14625 v.AddArg(v3) 14626 return true 14627 } 14628 } 14629 func rewriteValueARM64_OpRsh32Ux8_0(v *Value) bool { 14630 b := v.Block 14631 _ = b 14632 typ := &b.Func.Config.Types 14633 _ = typ 14634 // match: (Rsh32Ux8 <t> x y) 14635 // cond: 14636 // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 14637 for { 14638 t := v.Type 14639 _ = v.Args[1] 14640 x := v.Args[0] 14641 y := v.Args[1] 14642 v.reset(OpARM64CSELULT) 14643 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14644 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14645 v1.AddArg(x) 14646 v0.AddArg(v1) 14647 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14648 v2.AddArg(y) 14649 v0.AddArg(v2) 14650 v.AddArg(v0) 14651 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14652 v3.AuxInt = 0 14653 v.AddArg(v3) 14654 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14655 v4.AuxInt = 64 14656 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14657 v5.AddArg(y) 14658 v4.AddArg(v5) 14659 v.AddArg(v4) 14660 return true 14661 } 14662 } 14663 func rewriteValueARM64_OpRsh32x16_0(v *Value) bool { 14664 b := v.Block 14665 _ = b 14666 typ := &b.Func.Config.Types 14667 _ = typ 14668 // match: (Rsh32x16 x y) 14669 // cond: 14670 // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 14671 for { 14672 _ = v.Args[1] 14673 x := v.Args[0] 14674 y := v.Args[1] 14675 v.reset(OpARM64SRA) 14676 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 14677 v0.AddArg(x) 14678 v.AddArg(v0) 14679 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14680 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14681 v2.AddArg(y) 14682 v1.AddArg(v2) 14683 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14684 v3.AuxInt = 63 14685 v1.AddArg(v3) 14686 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14687 v4.AuxInt = 64 14688 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14689 v5.AddArg(y) 14690 v4.AddArg(v5) 14691 v1.AddArg(v4) 14692 v.AddArg(v1) 14693 return true 14694 } 14695 } 14696 func rewriteValueARM64_OpRsh32x32_0(v *Value) bool { 14697 b := v.Block 14698 _ = b 14699 typ := &b.Func.Config.Types 14700 _ = typ 14701 // match: (Rsh32x32 x y) 14702 // cond: 14703 // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 14704 for { 14705 _ = v.Args[1] 14706 x := v.Args[0] 14707 y := v.Args[1] 14708 v.reset(OpARM64SRA) 14709 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 14710 v0.AddArg(x) 14711 v.AddArg(v0) 14712 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14713 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14714 v2.AddArg(y) 14715 v1.AddArg(v2) 14716 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14717 v3.AuxInt = 63 14718 v1.AddArg(v3) 14719 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14720 v4.AuxInt = 64 14721 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14722 v5.AddArg(y) 14723 v4.AddArg(v5) 14724 v1.AddArg(v4) 14725 v.AddArg(v1) 14726 return true 14727 } 14728 } 14729 func rewriteValueARM64_OpRsh32x64_0(v *Value) bool { 14730 b := v.Block 14731 _ = b 14732 typ := &b.Func.Config.Types 14733 _ = typ 14734 // match: (Rsh32x64 x y) 14735 // cond: 14736 // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y))) 14737 for { 14738 _ = v.Args[1] 14739 x := v.Args[0] 14740 y := v.Args[1] 14741 v.reset(OpARM64SRA) 14742 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 14743 v0.AddArg(x) 14744 v.AddArg(v0) 14745 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14746 v1.AddArg(y) 14747 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14748 v2.AuxInt = 63 14749 v1.AddArg(v2) 14750 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14751 v3.AuxInt = 64 14752 v3.AddArg(y) 14753 v1.AddArg(v3) 14754 v.AddArg(v1) 14755 return true 14756 } 14757 } 14758 func rewriteValueARM64_OpRsh32x8_0(v *Value) bool { 14759 b := v.Block 14760 _ = b 14761 typ := &b.Func.Config.Types 14762 _ = typ 14763 // match: (Rsh32x8 x y) 14764 // cond: 14765 // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 14766 for { 14767 _ = v.Args[1] 14768 x := v.Args[0] 14769 y := v.Args[1] 14770 v.reset(OpARM64SRA) 14771 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 14772 v0.AddArg(x) 14773 v.AddArg(v0) 14774 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14775 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14776 v2.AddArg(y) 14777 v1.AddArg(v2) 14778 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14779 v3.AuxInt = 63 14780 v1.AddArg(v3) 14781 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14782 v4.AuxInt = 64 14783 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14784 v5.AddArg(y) 14785 v4.AddArg(v5) 14786 v1.AddArg(v4) 14787 v.AddArg(v1) 14788 return true 14789 } 14790 } 14791 func rewriteValueARM64_OpRsh64Ux16_0(v *Value) bool { 14792 b := v.Block 14793 _ = b 14794 typ := &b.Func.Config.Types 14795 _ = typ 14796 // match: (Rsh64Ux16 <t> x y) 14797 // cond: 14798 // result: (CSELULT (SRL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 14799 for { 14800 t := v.Type 14801 _ = v.Args[1] 14802 x := v.Args[0] 14803 y := v.Args[1] 14804 v.reset(OpARM64CSELULT) 14805 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14806 v0.AddArg(x) 14807 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14808 v1.AddArg(y) 14809 v0.AddArg(v1) 14810 v.AddArg(v0) 14811 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14812 v2.AuxInt = 0 14813 v.AddArg(v2) 14814 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14815 v3.AuxInt = 64 14816 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14817 v4.AddArg(y) 14818 v3.AddArg(v4) 14819 v.AddArg(v3) 14820 return true 14821 } 14822 } 14823 func rewriteValueARM64_OpRsh64Ux32_0(v *Value) bool { 14824 b := v.Block 14825 _ = b 14826 typ := &b.Func.Config.Types 14827 _ = typ 14828 // match: (Rsh64Ux32 <t> x y) 14829 // cond: 14830 // result: (CSELULT (SRL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 14831 for { 14832 t := v.Type 14833 _ = v.Args[1] 14834 x := v.Args[0] 14835 y := v.Args[1] 14836 v.reset(OpARM64CSELULT) 14837 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14838 v0.AddArg(x) 14839 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14840 v1.AddArg(y) 14841 v0.AddArg(v1) 14842 v.AddArg(v0) 14843 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14844 v2.AuxInt = 0 14845 v.AddArg(v2) 14846 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14847 v3.AuxInt = 64 14848 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14849 v4.AddArg(y) 14850 v3.AddArg(v4) 14851 v.AddArg(v3) 14852 return true 14853 } 14854 } 14855 func rewriteValueARM64_OpRsh64Ux64_0(v *Value) bool { 14856 b := v.Block 14857 _ = b 14858 // match: (Rsh64Ux64 <t> x y) 14859 // cond: 14860 // result: (CSELULT (SRL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y)) 14861 for { 14862 t := v.Type 14863 _ = v.Args[1] 14864 x := v.Args[0] 14865 y := v.Args[1] 14866 v.reset(OpARM64CSELULT) 14867 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14868 v0.AddArg(x) 14869 v0.AddArg(y) 14870 v.AddArg(v0) 14871 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14872 v1.AuxInt = 0 14873 v.AddArg(v1) 14874 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14875 v2.AuxInt = 64 14876 v2.AddArg(y) 14877 v.AddArg(v2) 14878 return true 14879 } 14880 } 14881 func rewriteValueARM64_OpRsh64Ux8_0(v *Value) bool { 14882 b := v.Block 14883 _ = b 14884 typ := &b.Func.Config.Types 14885 _ = typ 14886 // match: (Rsh64Ux8 <t> x y) 14887 // cond: 14888 // result: (CSELULT (SRL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 14889 for { 14890 t := v.Type 14891 _ = v.Args[1] 14892 x := v.Args[0] 14893 y := v.Args[1] 14894 v.reset(OpARM64CSELULT) 14895 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14896 v0.AddArg(x) 14897 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14898 v1.AddArg(y) 14899 v0.AddArg(v1) 14900 v.AddArg(v0) 14901 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14902 v2.AuxInt = 0 14903 v.AddArg(v2) 14904 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14905 v3.AuxInt = 64 14906 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14907 v4.AddArg(y) 14908 v3.AddArg(v4) 14909 v.AddArg(v3) 14910 return true 14911 } 14912 } 14913 func rewriteValueARM64_OpRsh64x16_0(v *Value) bool { 14914 b := v.Block 14915 _ = b 14916 typ := &b.Func.Config.Types 14917 _ = typ 14918 // match: (Rsh64x16 x y) 14919 // cond: 14920 // result: (SRA x (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 14921 for { 14922 _ = v.Args[1] 14923 x := v.Args[0] 14924 y := v.Args[1] 14925 v.reset(OpARM64SRA) 14926 v.AddArg(x) 14927 v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14928 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14929 v1.AddArg(y) 14930 v0.AddArg(v1) 14931 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14932 v2.AuxInt = 63 14933 v0.AddArg(v2) 14934 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14935 v3.AuxInt = 64 14936 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14937 v4.AddArg(y) 14938 v3.AddArg(v4) 14939 v0.AddArg(v3) 14940 v.AddArg(v0) 14941 return true 14942 } 14943 } 14944 func rewriteValueARM64_OpRsh64x32_0(v *Value) bool { 14945 b := v.Block 14946 _ = b 14947 typ := &b.Func.Config.Types 14948 _ = typ 14949 // match: (Rsh64x32 x y) 14950 // cond: 14951 // result: (SRA x (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 14952 for { 14953 _ = v.Args[1] 14954 x := v.Args[0] 14955 y := v.Args[1] 14956 v.reset(OpARM64SRA) 14957 v.AddArg(x) 14958 v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14959 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14960 v1.AddArg(y) 14961 v0.AddArg(v1) 14962 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14963 v2.AuxInt = 63 14964 v0.AddArg(v2) 14965 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14966 v3.AuxInt = 64 14967 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14968 v4.AddArg(y) 14969 v3.AddArg(v4) 14970 v0.AddArg(v3) 14971 v.AddArg(v0) 14972 return true 14973 } 14974 } 14975 func rewriteValueARM64_OpRsh64x64_0(v *Value) bool { 14976 b := v.Block 14977 _ = b 14978 // match: (Rsh64x64 x y) 14979 // cond: 14980 // result: (SRA x (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y))) 14981 for { 14982 _ = v.Args[1] 14983 x := v.Args[0] 14984 y := v.Args[1] 14985 v.reset(OpARM64SRA) 14986 v.AddArg(x) 14987 v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14988 v0.AddArg(y) 14989 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14990 v1.AuxInt = 63 14991 v0.AddArg(v1) 14992 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14993 v2.AuxInt = 64 14994 v2.AddArg(y) 14995 v0.AddArg(v2) 14996 v.AddArg(v0) 14997 return true 14998 } 14999 } 15000 func rewriteValueARM64_OpRsh64x8_0(v *Value) bool { 15001 b := v.Block 15002 _ = b 15003 typ := &b.Func.Config.Types 15004 _ = typ 15005 // match: (Rsh64x8 x y) 15006 // cond: 15007 // result: (SRA x (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 15008 for { 15009 _ = v.Args[1] 15010 x := v.Args[0] 15011 y := v.Args[1] 15012 v.reset(OpARM64SRA) 15013 v.AddArg(x) 15014 v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 15015 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15016 v1.AddArg(y) 15017 v0.AddArg(v1) 15018 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 15019 v2.AuxInt = 63 15020 v0.AddArg(v2) 15021 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15022 v3.AuxInt = 64 15023 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15024 v4.AddArg(y) 15025 v3.AddArg(v4) 15026 v0.AddArg(v3) 15027 v.AddArg(v0) 15028 return true 15029 } 15030 } 15031 func rewriteValueARM64_OpRsh8Ux16_0(v *Value) bool { 15032 b := v.Block 15033 _ = b 15034 typ := &b.Func.Config.Types 15035 _ = typ 15036 // match: (Rsh8Ux16 <t> x y) 15037 // cond: 15038 // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 15039 for { 15040 t := v.Type 15041 _ = v.Args[1] 15042 x := v.Args[0] 15043 y := v.Args[1] 15044 v.reset(OpARM64CSELULT) 15045 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 15046 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15047 v1.AddArg(x) 15048 v0.AddArg(v1) 15049 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 15050 v2.AddArg(y) 15051 v0.AddArg(v2) 15052 v.AddArg(v0) 15053 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 15054 v3.AuxInt = 0 15055 v.AddArg(v3) 15056 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15057 v4.AuxInt = 64 15058 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 15059 v5.AddArg(y) 15060 v4.AddArg(v5) 15061 v.AddArg(v4) 15062 return true 15063 } 15064 } 15065 func rewriteValueARM64_OpRsh8Ux32_0(v *Value) bool { 15066 b := v.Block 15067 _ = b 15068 typ := &b.Func.Config.Types 15069 _ = typ 15070 // match: (Rsh8Ux32 <t> x y) 15071 // cond: 15072 // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 15073 for { 15074 t := v.Type 15075 _ = v.Args[1] 15076 x := v.Args[0] 15077 y := v.Args[1] 15078 v.reset(OpARM64CSELULT) 15079 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 15080 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15081 v1.AddArg(x) 15082 v0.AddArg(v1) 15083 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 15084 v2.AddArg(y) 15085 v0.AddArg(v2) 15086 v.AddArg(v0) 15087 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 15088 v3.AuxInt = 0 15089 v.AddArg(v3) 15090 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15091 v4.AuxInt = 64 15092 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 15093 v5.AddArg(y) 15094 v4.AddArg(v5) 15095 v.AddArg(v4) 15096 return true 15097 } 15098 } 15099 func rewriteValueARM64_OpRsh8Ux64_0(v *Value) bool { 15100 b := v.Block 15101 _ = b 15102 typ := &b.Func.Config.Types 15103 _ = typ 15104 // match: (Rsh8Ux64 <t> x y) 15105 // cond: 15106 // result: (CSELULT (SRL <t> (ZeroExt8to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y)) 15107 for { 15108 t := v.Type 15109 _ = v.Args[1] 15110 x := v.Args[0] 15111 y := v.Args[1] 15112 v.reset(OpARM64CSELULT) 15113 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 15114 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15115 v1.AddArg(x) 15116 v0.AddArg(v1) 15117 v0.AddArg(y) 15118 v.AddArg(v0) 15119 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 15120 v2.AuxInt = 0 15121 v.AddArg(v2) 15122 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15123 v3.AuxInt = 64 15124 v3.AddArg(y) 15125 v.AddArg(v3) 15126 return true 15127 } 15128 } 15129 func rewriteValueARM64_OpRsh8Ux8_0(v *Value) bool { 15130 b := v.Block 15131 _ = b 15132 typ := &b.Func.Config.Types 15133 _ = typ 15134 // match: (Rsh8Ux8 <t> x y) 15135 // cond: 15136 // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 15137 for { 15138 t := v.Type 15139 _ = v.Args[1] 15140 x := v.Args[0] 15141 y := v.Args[1] 15142 v.reset(OpARM64CSELULT) 15143 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 15144 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15145 v1.AddArg(x) 15146 v0.AddArg(v1) 15147 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15148 v2.AddArg(y) 15149 v0.AddArg(v2) 15150 v.AddArg(v0) 15151 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 15152 v3.AuxInt = 0 15153 v.AddArg(v3) 15154 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15155 v4.AuxInt = 64 15156 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15157 v5.AddArg(y) 15158 v4.AddArg(v5) 15159 v.AddArg(v4) 15160 return true 15161 } 15162 } 15163 func rewriteValueARM64_OpRsh8x16_0(v *Value) bool { 15164 b := v.Block 15165 _ = b 15166 typ := &b.Func.Config.Types 15167 _ = typ 15168 // match: (Rsh8x16 x y) 15169 // cond: 15170 // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 15171 for { 15172 _ = v.Args[1] 15173 x := v.Args[0] 15174 y := v.Args[1] 15175 v.reset(OpARM64SRA) 15176 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 15177 v0.AddArg(x) 15178 v.AddArg(v0) 15179 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 15180 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 15181 v2.AddArg(y) 15182 v1.AddArg(v2) 15183 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 15184 v3.AuxInt = 63 15185 v1.AddArg(v3) 15186 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15187 v4.AuxInt = 64 15188 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 15189 v5.AddArg(y) 15190 v4.AddArg(v5) 15191 v1.AddArg(v4) 15192 v.AddArg(v1) 15193 return true 15194 } 15195 } 15196 func rewriteValueARM64_OpRsh8x32_0(v *Value) bool { 15197 b := v.Block 15198 _ = b 15199 typ := &b.Func.Config.Types 15200 _ = typ 15201 // match: (Rsh8x32 x y) 15202 // cond: 15203 // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 15204 for { 15205 _ = v.Args[1] 15206 x := v.Args[0] 15207 y := v.Args[1] 15208 v.reset(OpARM64SRA) 15209 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 15210 v0.AddArg(x) 15211 v.AddArg(v0) 15212 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 15213 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 15214 v2.AddArg(y) 15215 v1.AddArg(v2) 15216 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 15217 v3.AuxInt = 63 15218 v1.AddArg(v3) 15219 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15220 v4.AuxInt = 64 15221 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 15222 v5.AddArg(y) 15223 v4.AddArg(v5) 15224 v1.AddArg(v4) 15225 v.AddArg(v1) 15226 return true 15227 } 15228 } 15229 func rewriteValueARM64_OpRsh8x64_0(v *Value) bool { 15230 b := v.Block 15231 _ = b 15232 typ := &b.Func.Config.Types 15233 _ = typ 15234 // match: (Rsh8x64 x y) 15235 // cond: 15236 // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y))) 15237 for { 15238 _ = v.Args[1] 15239 x := v.Args[0] 15240 y := v.Args[1] 15241 v.reset(OpARM64SRA) 15242 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 15243 v0.AddArg(x) 15244 v.AddArg(v0) 15245 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 15246 v1.AddArg(y) 15247 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 15248 v2.AuxInt = 63 15249 v1.AddArg(v2) 15250 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15251 v3.AuxInt = 64 15252 v3.AddArg(y) 15253 v1.AddArg(v3) 15254 v.AddArg(v1) 15255 return true 15256 } 15257 } 15258 func rewriteValueARM64_OpRsh8x8_0(v *Value) bool { 15259 b := v.Block 15260 _ = b 15261 typ := &b.Func.Config.Types 15262 _ = typ 15263 // match: (Rsh8x8 x y) 15264 // cond: 15265 // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 15266 for { 15267 _ = v.Args[1] 15268 x := v.Args[0] 15269 y := v.Args[1] 15270 v.reset(OpARM64SRA) 15271 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 15272 v0.AddArg(x) 15273 v.AddArg(v0) 15274 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 15275 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15276 v2.AddArg(y) 15277 v1.AddArg(v2) 15278 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 15279 v3.AuxInt = 63 15280 v1.AddArg(v3) 15281 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15282 v4.AuxInt = 64 15283 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15284 v5.AddArg(y) 15285 v4.AddArg(v5) 15286 v1.AddArg(v4) 15287 v.AddArg(v1) 15288 return true 15289 } 15290 } 15291 func rewriteValueARM64_OpSignExt16to32_0(v *Value) bool { 15292 // match: (SignExt16to32 x) 15293 // cond: 15294 // result: (MOVHreg x) 15295 for { 15296 x := v.Args[0] 15297 v.reset(OpARM64MOVHreg) 15298 v.AddArg(x) 15299 return true 15300 } 15301 } 15302 func rewriteValueARM64_OpSignExt16to64_0(v *Value) bool { 15303 // match: (SignExt16to64 x) 15304 // cond: 15305 // result: (MOVHreg x) 15306 for { 15307 x := v.Args[0] 15308 v.reset(OpARM64MOVHreg) 15309 v.AddArg(x) 15310 return true 15311 } 15312 } 15313 func rewriteValueARM64_OpSignExt32to64_0(v *Value) bool { 15314 // match: (SignExt32to64 x) 15315 // cond: 15316 // result: (MOVWreg x) 15317 for { 15318 x := v.Args[0] 15319 v.reset(OpARM64MOVWreg) 15320 v.AddArg(x) 15321 return true 15322 } 15323 } 15324 func rewriteValueARM64_OpSignExt8to16_0(v *Value) bool { 15325 // match: (SignExt8to16 x) 15326 // cond: 15327 // result: (MOVBreg x) 15328 for { 15329 x := v.Args[0] 15330 v.reset(OpARM64MOVBreg) 15331 v.AddArg(x) 15332 return true 15333 } 15334 } 15335 func rewriteValueARM64_OpSignExt8to32_0(v *Value) bool { 15336 // match: (SignExt8to32 x) 15337 // cond: 15338 // result: (MOVBreg x) 15339 for { 15340 x := v.Args[0] 15341 v.reset(OpARM64MOVBreg) 15342 v.AddArg(x) 15343 return true 15344 } 15345 } 15346 func rewriteValueARM64_OpSignExt8to64_0(v *Value) bool { 15347 // match: (SignExt8to64 x) 15348 // cond: 15349 // result: (MOVBreg x) 15350 for { 15351 x := v.Args[0] 15352 v.reset(OpARM64MOVBreg) 15353 v.AddArg(x) 15354 return true 15355 } 15356 } 15357 func rewriteValueARM64_OpSlicemask_0(v *Value) bool { 15358 b := v.Block 15359 _ = b 15360 // match: (Slicemask <t> x) 15361 // cond: 15362 // result: (SRAconst (NEG <t> x) [63]) 15363 for { 15364 t := v.Type 15365 x := v.Args[0] 15366 v.reset(OpARM64SRAconst) 15367 v.AuxInt = 63 15368 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 15369 v0.AddArg(x) 15370 v.AddArg(v0) 15371 return true 15372 } 15373 } 15374 func rewriteValueARM64_OpSqrt_0(v *Value) bool { 15375 // match: (Sqrt x) 15376 // cond: 15377 // result: (FSQRTD x) 15378 for { 15379 x := v.Args[0] 15380 v.reset(OpARM64FSQRTD) 15381 v.AddArg(x) 15382 return true 15383 } 15384 } 15385 func rewriteValueARM64_OpStaticCall_0(v *Value) bool { 15386 // match: (StaticCall [argwid] {target} mem) 15387 // cond: 15388 // result: (CALLstatic [argwid] {target} mem) 15389 for { 15390 argwid := v.AuxInt 15391 target := v.Aux 15392 mem := v.Args[0] 15393 v.reset(OpARM64CALLstatic) 15394 v.AuxInt = argwid 15395 v.Aux = target 15396 v.AddArg(mem) 15397 return true 15398 } 15399 } 15400 func rewriteValueARM64_OpStore_0(v *Value) bool { 15401 // match: (Store {t} ptr val mem) 15402 // cond: t.(*types.Type).Size() == 1 15403 // result: (MOVBstore ptr val mem) 15404 for { 15405 t := v.Aux 15406 _ = v.Args[2] 15407 ptr := v.Args[0] 15408 val := v.Args[1] 15409 mem := v.Args[2] 15410 if !(t.(*types.Type).Size() == 1) { 15411 break 15412 } 15413 v.reset(OpARM64MOVBstore) 15414 v.AddArg(ptr) 15415 v.AddArg(val) 15416 v.AddArg(mem) 15417 return true 15418 } 15419 // match: (Store {t} ptr val mem) 15420 // cond: t.(*types.Type).Size() == 2 15421 // result: (MOVHstore ptr val mem) 15422 for { 15423 t := v.Aux 15424 _ = v.Args[2] 15425 ptr := v.Args[0] 15426 val := v.Args[1] 15427 mem := v.Args[2] 15428 if !(t.(*types.Type).Size() == 2) { 15429 break 15430 } 15431 v.reset(OpARM64MOVHstore) 15432 v.AddArg(ptr) 15433 v.AddArg(val) 15434 v.AddArg(mem) 15435 return true 15436 } 15437 // match: (Store {t} ptr val mem) 15438 // cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type) 15439 // result: (MOVWstore ptr val mem) 15440 for { 15441 t := v.Aux 15442 _ = v.Args[2] 15443 ptr := v.Args[0] 15444 val := v.Args[1] 15445 mem := v.Args[2] 15446 if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) { 15447 break 15448 } 15449 v.reset(OpARM64MOVWstore) 15450 v.AddArg(ptr) 15451 v.AddArg(val) 15452 v.AddArg(mem) 15453 return true 15454 } 15455 // match: (Store {t} ptr val mem) 15456 // cond: t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type) 15457 // result: (MOVDstore ptr val mem) 15458 for { 15459 t := v.Aux 15460 _ = v.Args[2] 15461 ptr := v.Args[0] 15462 val := v.Args[1] 15463 mem := v.Args[2] 15464 if !(t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)) { 15465 break 15466 } 15467 v.reset(OpARM64MOVDstore) 15468 v.AddArg(ptr) 15469 v.AddArg(val) 15470 v.AddArg(mem) 15471 return true 15472 } 15473 // match: (Store {t} ptr val mem) 15474 // cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type) 15475 // result: (FMOVSstore ptr val mem) 15476 for { 15477 t := v.Aux 15478 _ = v.Args[2] 15479 ptr := v.Args[0] 15480 val := v.Args[1] 15481 mem := v.Args[2] 15482 if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) { 15483 break 15484 } 15485 v.reset(OpARM64FMOVSstore) 15486 v.AddArg(ptr) 15487 v.AddArg(val) 15488 v.AddArg(mem) 15489 return true 15490 } 15491 // match: (Store {t} ptr val mem) 15492 // cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type) 15493 // result: (FMOVDstore ptr val mem) 15494 for { 15495 t := v.Aux 15496 _ = v.Args[2] 15497 ptr := v.Args[0] 15498 val := v.Args[1] 15499 mem := v.Args[2] 15500 if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) { 15501 break 15502 } 15503 v.reset(OpARM64FMOVDstore) 15504 v.AddArg(ptr) 15505 v.AddArg(val) 15506 v.AddArg(mem) 15507 return true 15508 } 15509 return false 15510 } 15511 func rewriteValueARM64_OpSub16_0(v *Value) bool { 15512 // match: (Sub16 x y) 15513 // cond: 15514 // result: (SUB x y) 15515 for { 15516 _ = v.Args[1] 15517 x := v.Args[0] 15518 y := v.Args[1] 15519 v.reset(OpARM64SUB) 15520 v.AddArg(x) 15521 v.AddArg(y) 15522 return true 15523 } 15524 } 15525 func rewriteValueARM64_OpSub32_0(v *Value) bool { 15526 // match: (Sub32 x y) 15527 // cond: 15528 // result: (SUB x y) 15529 for { 15530 _ = v.Args[1] 15531 x := v.Args[0] 15532 y := v.Args[1] 15533 v.reset(OpARM64SUB) 15534 v.AddArg(x) 15535 v.AddArg(y) 15536 return true 15537 } 15538 } 15539 func rewriteValueARM64_OpSub32F_0(v *Value) bool { 15540 // match: (Sub32F x y) 15541 // cond: 15542 // result: (FSUBS x y) 15543 for { 15544 _ = v.Args[1] 15545 x := v.Args[0] 15546 y := v.Args[1] 15547 v.reset(OpARM64FSUBS) 15548 v.AddArg(x) 15549 v.AddArg(y) 15550 return true 15551 } 15552 } 15553 func rewriteValueARM64_OpSub64_0(v *Value) bool { 15554 // match: (Sub64 x y) 15555 // cond: 15556 // result: (SUB x y) 15557 for { 15558 _ = v.Args[1] 15559 x := v.Args[0] 15560 y := v.Args[1] 15561 v.reset(OpARM64SUB) 15562 v.AddArg(x) 15563 v.AddArg(y) 15564 return true 15565 } 15566 } 15567 func rewriteValueARM64_OpSub64F_0(v *Value) bool { 15568 // match: (Sub64F x y) 15569 // cond: 15570 // result: (FSUBD x y) 15571 for { 15572 _ = v.Args[1] 15573 x := v.Args[0] 15574 y := v.Args[1] 15575 v.reset(OpARM64FSUBD) 15576 v.AddArg(x) 15577 v.AddArg(y) 15578 return true 15579 } 15580 } 15581 func rewriteValueARM64_OpSub8_0(v *Value) bool { 15582 // match: (Sub8 x y) 15583 // cond: 15584 // result: (SUB x y) 15585 for { 15586 _ = v.Args[1] 15587 x := v.Args[0] 15588 y := v.Args[1] 15589 v.reset(OpARM64SUB) 15590 v.AddArg(x) 15591 v.AddArg(y) 15592 return true 15593 } 15594 } 15595 func rewriteValueARM64_OpSubPtr_0(v *Value) bool { 15596 // match: (SubPtr x y) 15597 // cond: 15598 // result: (SUB x y) 15599 for { 15600 _ = v.Args[1] 15601 x := v.Args[0] 15602 y := v.Args[1] 15603 v.reset(OpARM64SUB) 15604 v.AddArg(x) 15605 v.AddArg(y) 15606 return true 15607 } 15608 } 15609 func rewriteValueARM64_OpTrunc16to8_0(v *Value) bool { 15610 // match: (Trunc16to8 x) 15611 // cond: 15612 // result: x 15613 for { 15614 x := v.Args[0] 15615 v.reset(OpCopy) 15616 v.Type = x.Type 15617 v.AddArg(x) 15618 return true 15619 } 15620 } 15621 func rewriteValueARM64_OpTrunc32to16_0(v *Value) bool { 15622 // match: (Trunc32to16 x) 15623 // cond: 15624 // result: x 15625 for { 15626 x := v.Args[0] 15627 v.reset(OpCopy) 15628 v.Type = x.Type 15629 v.AddArg(x) 15630 return true 15631 } 15632 } 15633 func rewriteValueARM64_OpTrunc32to8_0(v *Value) bool { 15634 // match: (Trunc32to8 x) 15635 // cond: 15636 // result: x 15637 for { 15638 x := v.Args[0] 15639 v.reset(OpCopy) 15640 v.Type = x.Type 15641 v.AddArg(x) 15642 return true 15643 } 15644 } 15645 func rewriteValueARM64_OpTrunc64to16_0(v *Value) bool { 15646 // match: (Trunc64to16 x) 15647 // cond: 15648 // result: x 15649 for { 15650 x := v.Args[0] 15651 v.reset(OpCopy) 15652 v.Type = x.Type 15653 v.AddArg(x) 15654 return true 15655 } 15656 } 15657 func rewriteValueARM64_OpTrunc64to32_0(v *Value) bool { 15658 // match: (Trunc64to32 x) 15659 // cond: 15660 // result: x 15661 for { 15662 x := v.Args[0] 15663 v.reset(OpCopy) 15664 v.Type = x.Type 15665 v.AddArg(x) 15666 return true 15667 } 15668 } 15669 func rewriteValueARM64_OpTrunc64to8_0(v *Value) bool { 15670 // match: (Trunc64to8 x) 15671 // cond: 15672 // result: x 15673 for { 15674 x := v.Args[0] 15675 v.reset(OpCopy) 15676 v.Type = x.Type 15677 v.AddArg(x) 15678 return true 15679 } 15680 } 15681 func rewriteValueARM64_OpXor16_0(v *Value) bool { 15682 // match: (Xor16 x y) 15683 // cond: 15684 // result: (XOR x y) 15685 for { 15686 _ = v.Args[1] 15687 x := v.Args[0] 15688 y := v.Args[1] 15689 v.reset(OpARM64XOR) 15690 v.AddArg(x) 15691 v.AddArg(y) 15692 return true 15693 } 15694 } 15695 func rewriteValueARM64_OpXor32_0(v *Value) bool { 15696 // match: (Xor32 x y) 15697 // cond: 15698 // result: (XOR x y) 15699 for { 15700 _ = v.Args[1] 15701 x := v.Args[0] 15702 y := v.Args[1] 15703 v.reset(OpARM64XOR) 15704 v.AddArg(x) 15705 v.AddArg(y) 15706 return true 15707 } 15708 } 15709 func rewriteValueARM64_OpXor64_0(v *Value) bool { 15710 // match: (Xor64 x y) 15711 // cond: 15712 // result: (XOR x y) 15713 for { 15714 _ = v.Args[1] 15715 x := v.Args[0] 15716 y := v.Args[1] 15717 v.reset(OpARM64XOR) 15718 v.AddArg(x) 15719 v.AddArg(y) 15720 return true 15721 } 15722 } 15723 func rewriteValueARM64_OpXor8_0(v *Value) bool { 15724 // match: (Xor8 x y) 15725 // cond: 15726 // result: (XOR x y) 15727 for { 15728 _ = v.Args[1] 15729 x := v.Args[0] 15730 y := v.Args[1] 15731 v.reset(OpARM64XOR) 15732 v.AddArg(x) 15733 v.AddArg(y) 15734 return true 15735 } 15736 } 15737 func rewriteValueARM64_OpZero_0(v *Value) bool { 15738 b := v.Block 15739 _ = b 15740 typ := &b.Func.Config.Types 15741 _ = typ 15742 // match: (Zero [0] _ mem) 15743 // cond: 15744 // result: mem 15745 for { 15746 if v.AuxInt != 0 { 15747 break 15748 } 15749 _ = v.Args[1] 15750 mem := v.Args[1] 15751 v.reset(OpCopy) 15752 v.Type = mem.Type 15753 v.AddArg(mem) 15754 return true 15755 } 15756 // match: (Zero [1] ptr mem) 15757 // cond: 15758 // result: (MOVBstore ptr (MOVDconst [0]) mem) 15759 for { 15760 if v.AuxInt != 1 { 15761 break 15762 } 15763 _ = v.Args[1] 15764 ptr := v.Args[0] 15765 mem := v.Args[1] 15766 v.reset(OpARM64MOVBstore) 15767 v.AddArg(ptr) 15768 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15769 v0.AuxInt = 0 15770 v.AddArg(v0) 15771 v.AddArg(mem) 15772 return true 15773 } 15774 // match: (Zero [2] ptr mem) 15775 // cond: 15776 // result: (MOVHstore ptr (MOVDconst [0]) mem) 15777 for { 15778 if v.AuxInt != 2 { 15779 break 15780 } 15781 _ = v.Args[1] 15782 ptr := v.Args[0] 15783 mem := v.Args[1] 15784 v.reset(OpARM64MOVHstore) 15785 v.AddArg(ptr) 15786 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15787 v0.AuxInt = 0 15788 v.AddArg(v0) 15789 v.AddArg(mem) 15790 return true 15791 } 15792 // match: (Zero [4] ptr mem) 15793 // cond: 15794 // result: (MOVWstore ptr (MOVDconst [0]) mem) 15795 for { 15796 if v.AuxInt != 4 { 15797 break 15798 } 15799 _ = v.Args[1] 15800 ptr := v.Args[0] 15801 mem := v.Args[1] 15802 v.reset(OpARM64MOVWstore) 15803 v.AddArg(ptr) 15804 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15805 v0.AuxInt = 0 15806 v.AddArg(v0) 15807 v.AddArg(mem) 15808 return true 15809 } 15810 // match: (Zero [8] ptr mem) 15811 // cond: 15812 // result: (MOVDstore ptr (MOVDconst [0]) mem) 15813 for { 15814 if v.AuxInt != 8 { 15815 break 15816 } 15817 _ = v.Args[1] 15818 ptr := v.Args[0] 15819 mem := v.Args[1] 15820 v.reset(OpARM64MOVDstore) 15821 v.AddArg(ptr) 15822 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15823 v0.AuxInt = 0 15824 v.AddArg(v0) 15825 v.AddArg(mem) 15826 return true 15827 } 15828 // match: (Zero [3] ptr mem) 15829 // cond: 15830 // result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)) 15831 for { 15832 if v.AuxInt != 3 { 15833 break 15834 } 15835 _ = v.Args[1] 15836 ptr := v.Args[0] 15837 mem := v.Args[1] 15838 v.reset(OpARM64MOVBstore) 15839 v.AuxInt = 2 15840 v.AddArg(ptr) 15841 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15842 v0.AuxInt = 0 15843 v.AddArg(v0) 15844 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 15845 v1.AddArg(ptr) 15846 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15847 v2.AuxInt = 0 15848 v1.AddArg(v2) 15849 v1.AddArg(mem) 15850 v.AddArg(v1) 15851 return true 15852 } 15853 // match: (Zero [5] ptr mem) 15854 // cond: 15855 // result: (MOVBstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 15856 for { 15857 if v.AuxInt != 5 { 15858 break 15859 } 15860 _ = v.Args[1] 15861 ptr := v.Args[0] 15862 mem := v.Args[1] 15863 v.reset(OpARM64MOVBstore) 15864 v.AuxInt = 4 15865 v.AddArg(ptr) 15866 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15867 v0.AuxInt = 0 15868 v.AddArg(v0) 15869 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 15870 v1.AddArg(ptr) 15871 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15872 v2.AuxInt = 0 15873 v1.AddArg(v2) 15874 v1.AddArg(mem) 15875 v.AddArg(v1) 15876 return true 15877 } 15878 // match: (Zero [6] ptr mem) 15879 // cond: 15880 // result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 15881 for { 15882 if v.AuxInt != 6 { 15883 break 15884 } 15885 _ = v.Args[1] 15886 ptr := v.Args[0] 15887 mem := v.Args[1] 15888 v.reset(OpARM64MOVHstore) 15889 v.AuxInt = 4 15890 v.AddArg(ptr) 15891 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15892 v0.AuxInt = 0 15893 v.AddArg(v0) 15894 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 15895 v1.AddArg(ptr) 15896 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15897 v2.AuxInt = 0 15898 v1.AddArg(v2) 15899 v1.AddArg(mem) 15900 v.AddArg(v1) 15901 return true 15902 } 15903 // match: (Zero [7] ptr mem) 15904 // cond: 15905 // result: (MOVBstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))) 15906 for { 15907 if v.AuxInt != 7 { 15908 break 15909 } 15910 _ = v.Args[1] 15911 ptr := v.Args[0] 15912 mem := v.Args[1] 15913 v.reset(OpARM64MOVBstore) 15914 v.AuxInt = 6 15915 v.AddArg(ptr) 15916 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15917 v0.AuxInt = 0 15918 v.AddArg(v0) 15919 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 15920 v1.AuxInt = 4 15921 v1.AddArg(ptr) 15922 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15923 v2.AuxInt = 0 15924 v1.AddArg(v2) 15925 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 15926 v3.AddArg(ptr) 15927 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15928 v4.AuxInt = 0 15929 v3.AddArg(v4) 15930 v3.AddArg(mem) 15931 v1.AddArg(v3) 15932 v.AddArg(v1) 15933 return true 15934 } 15935 // match: (Zero [9] ptr mem) 15936 // cond: 15937 // result: (MOVBstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 15938 for { 15939 if v.AuxInt != 9 { 15940 break 15941 } 15942 _ = v.Args[1] 15943 ptr := v.Args[0] 15944 mem := v.Args[1] 15945 v.reset(OpARM64MOVBstore) 15946 v.AuxInt = 8 15947 v.AddArg(ptr) 15948 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15949 v0.AuxInt = 0 15950 v.AddArg(v0) 15951 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 15952 v1.AddArg(ptr) 15953 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15954 v2.AuxInt = 0 15955 v1.AddArg(v2) 15956 v1.AddArg(mem) 15957 v.AddArg(v1) 15958 return true 15959 } 15960 return false 15961 } 15962 func rewriteValueARM64_OpZero_10(v *Value) bool { 15963 b := v.Block 15964 _ = b 15965 typ := &b.Func.Config.Types 15966 _ = typ 15967 // match: (Zero [10] ptr mem) 15968 // cond: 15969 // result: (MOVHstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 15970 for { 15971 if v.AuxInt != 10 { 15972 break 15973 } 15974 _ = v.Args[1] 15975 ptr := v.Args[0] 15976 mem := v.Args[1] 15977 v.reset(OpARM64MOVHstore) 15978 v.AuxInt = 8 15979 v.AddArg(ptr) 15980 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15981 v0.AuxInt = 0 15982 v.AddArg(v0) 15983 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 15984 v1.AddArg(ptr) 15985 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15986 v2.AuxInt = 0 15987 v1.AddArg(v2) 15988 v1.AddArg(mem) 15989 v.AddArg(v1) 15990 return true 15991 } 15992 // match: (Zero [11] ptr mem) 15993 // cond: 15994 // result: (MOVBstore [10] ptr (MOVDconst [0]) (MOVHstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 15995 for { 15996 if v.AuxInt != 11 { 15997 break 15998 } 15999 _ = v.Args[1] 16000 ptr := v.Args[0] 16001 mem := v.Args[1] 16002 v.reset(OpARM64MOVBstore) 16003 v.AuxInt = 10 16004 v.AddArg(ptr) 16005 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16006 v0.AuxInt = 0 16007 v.AddArg(v0) 16008 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 16009 v1.AuxInt = 8 16010 v1.AddArg(ptr) 16011 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16012 v2.AuxInt = 0 16013 v1.AddArg(v2) 16014 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 16015 v3.AddArg(ptr) 16016 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16017 v4.AuxInt = 0 16018 v3.AddArg(v4) 16019 v3.AddArg(mem) 16020 v1.AddArg(v3) 16021 v.AddArg(v1) 16022 return true 16023 } 16024 // match: (Zero [12] ptr mem) 16025 // cond: 16026 // result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 16027 for { 16028 if v.AuxInt != 12 { 16029 break 16030 } 16031 _ = v.Args[1] 16032 ptr := v.Args[0] 16033 mem := v.Args[1] 16034 v.reset(OpARM64MOVWstore) 16035 v.AuxInt = 8 16036 v.AddArg(ptr) 16037 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16038 v0.AuxInt = 0 16039 v.AddArg(v0) 16040 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 16041 v1.AddArg(ptr) 16042 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16043 v2.AuxInt = 0 16044 v1.AddArg(v2) 16045 v1.AddArg(mem) 16046 v.AddArg(v1) 16047 return true 16048 } 16049 // match: (Zero [13] ptr mem) 16050 // cond: 16051 // result: (MOVBstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 16052 for { 16053 if v.AuxInt != 13 { 16054 break 16055 } 16056 _ = v.Args[1] 16057 ptr := v.Args[0] 16058 mem := v.Args[1] 16059 v.reset(OpARM64MOVBstore) 16060 v.AuxInt = 12 16061 v.AddArg(ptr) 16062 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16063 v0.AuxInt = 0 16064 v.AddArg(v0) 16065 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 16066 v1.AuxInt = 8 16067 v1.AddArg(ptr) 16068 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16069 v2.AuxInt = 0 16070 v1.AddArg(v2) 16071 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 16072 v3.AddArg(ptr) 16073 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16074 v4.AuxInt = 0 16075 v3.AddArg(v4) 16076 v3.AddArg(mem) 16077 v1.AddArg(v3) 16078 v.AddArg(v1) 16079 return true 16080 } 16081 // match: (Zero [14] ptr mem) 16082 // cond: 16083 // result: (MOVHstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 16084 for { 16085 if v.AuxInt != 14 { 16086 break 16087 } 16088 _ = v.Args[1] 16089 ptr := v.Args[0] 16090 mem := v.Args[1] 16091 v.reset(OpARM64MOVHstore) 16092 v.AuxInt = 12 16093 v.AddArg(ptr) 16094 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16095 v0.AuxInt = 0 16096 v.AddArg(v0) 16097 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 16098 v1.AuxInt = 8 16099 v1.AddArg(ptr) 16100 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16101 v2.AuxInt = 0 16102 v1.AddArg(v2) 16103 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 16104 v3.AddArg(ptr) 16105 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16106 v4.AuxInt = 0 16107 v3.AddArg(v4) 16108 v3.AddArg(mem) 16109 v1.AddArg(v3) 16110 v.AddArg(v1) 16111 return true 16112 } 16113 // match: (Zero [15] ptr mem) 16114 // cond: 16115 // result: (MOVBstore [14] ptr (MOVDconst [0]) (MOVHstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)))) 16116 for { 16117 if v.AuxInt != 15 { 16118 break 16119 } 16120 _ = v.Args[1] 16121 ptr := v.Args[0] 16122 mem := v.Args[1] 16123 v.reset(OpARM64MOVBstore) 16124 v.AuxInt = 14 16125 v.AddArg(ptr) 16126 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16127 v0.AuxInt = 0 16128 v.AddArg(v0) 16129 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 16130 v1.AuxInt = 12 16131 v1.AddArg(ptr) 16132 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16133 v2.AuxInt = 0 16134 v1.AddArg(v2) 16135 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 16136 v3.AuxInt = 8 16137 v3.AddArg(ptr) 16138 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16139 v4.AuxInt = 0 16140 v3.AddArg(v4) 16141 v5 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 16142 v5.AddArg(ptr) 16143 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16144 v6.AuxInt = 0 16145 v5.AddArg(v6) 16146 v5.AddArg(mem) 16147 v3.AddArg(v5) 16148 v1.AddArg(v3) 16149 v.AddArg(v1) 16150 return true 16151 } 16152 // match: (Zero [16] ptr mem) 16153 // cond: 16154 // result: (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem) 16155 for { 16156 if v.AuxInt != 16 { 16157 break 16158 } 16159 _ = v.Args[1] 16160 ptr := v.Args[0] 16161 mem := v.Args[1] 16162 v.reset(OpARM64STP) 16163 v.AuxInt = 0 16164 v.AddArg(ptr) 16165 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16166 v0.AuxInt = 0 16167 v.AddArg(v0) 16168 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16169 v1.AuxInt = 0 16170 v.AddArg(v1) 16171 v.AddArg(mem) 16172 return true 16173 } 16174 // match: (Zero [32] ptr mem) 16175 // cond: 16176 // result: (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)) 16177 for { 16178 if v.AuxInt != 32 { 16179 break 16180 } 16181 _ = v.Args[1] 16182 ptr := v.Args[0] 16183 mem := v.Args[1] 16184 v.reset(OpARM64STP) 16185 v.AuxInt = 16 16186 v.AddArg(ptr) 16187 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16188 v0.AuxInt = 0 16189 v.AddArg(v0) 16190 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16191 v1.AuxInt = 0 16192 v.AddArg(v1) 16193 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 16194 v2.AuxInt = 0 16195 v2.AddArg(ptr) 16196 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16197 v3.AuxInt = 0 16198 v2.AddArg(v3) 16199 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16200 v4.AuxInt = 0 16201 v2.AddArg(v4) 16202 v2.AddArg(mem) 16203 v.AddArg(v2) 16204 return true 16205 } 16206 // match: (Zero [48] ptr mem) 16207 // cond: 16208 // result: (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem))) 16209 for { 16210 if v.AuxInt != 48 { 16211 break 16212 } 16213 _ = v.Args[1] 16214 ptr := v.Args[0] 16215 mem := v.Args[1] 16216 v.reset(OpARM64STP) 16217 v.AuxInt = 32 16218 v.AddArg(ptr) 16219 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16220 v0.AuxInt = 0 16221 v.AddArg(v0) 16222 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16223 v1.AuxInt = 0 16224 v.AddArg(v1) 16225 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 16226 v2.AuxInt = 16 16227 v2.AddArg(ptr) 16228 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16229 v3.AuxInt = 0 16230 v2.AddArg(v3) 16231 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16232 v4.AuxInt = 0 16233 v2.AddArg(v4) 16234 v5 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 16235 v5.AuxInt = 0 16236 v5.AddArg(ptr) 16237 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16238 v6.AuxInt = 0 16239 v5.AddArg(v6) 16240 v7 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16241 v7.AuxInt = 0 16242 v5.AddArg(v7) 16243 v5.AddArg(mem) 16244 v2.AddArg(v5) 16245 v.AddArg(v2) 16246 return true 16247 } 16248 // match: (Zero [64] ptr mem) 16249 // cond: 16250 // result: (STP [48] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)))) 16251 for { 16252 if v.AuxInt != 64 { 16253 break 16254 } 16255 _ = v.Args[1] 16256 ptr := v.Args[0] 16257 mem := v.Args[1] 16258 v.reset(OpARM64STP) 16259 v.AuxInt = 48 16260 v.AddArg(ptr) 16261 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16262 v0.AuxInt = 0 16263 v.AddArg(v0) 16264 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16265 v1.AuxInt = 0 16266 v.AddArg(v1) 16267 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 16268 v2.AuxInt = 32 16269 v2.AddArg(ptr) 16270 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16271 v3.AuxInt = 0 16272 v2.AddArg(v3) 16273 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16274 v4.AuxInt = 0 16275 v2.AddArg(v4) 16276 v5 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 16277 v5.AuxInt = 16 16278 v5.AddArg(ptr) 16279 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16280 v6.AuxInt = 0 16281 v5.AddArg(v6) 16282 v7 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16283 v7.AuxInt = 0 16284 v5.AddArg(v7) 16285 v8 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 16286 v8.AuxInt = 0 16287 v8.AddArg(ptr) 16288 v9 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16289 v9.AuxInt = 0 16290 v8.AddArg(v9) 16291 v10 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16292 v10.AuxInt = 0 16293 v8.AddArg(v10) 16294 v8.AddArg(mem) 16295 v5.AddArg(v8) 16296 v2.AddArg(v5) 16297 v.AddArg(v2) 16298 return true 16299 } 16300 return false 16301 } 16302 func rewriteValueARM64_OpZero_20(v *Value) bool { 16303 b := v.Block 16304 _ = b 16305 config := b.Func.Config 16306 _ = config 16307 // match: (Zero [s] ptr mem) 16308 // cond: s%16 != 0 && s > 16 16309 // result: (Zero [s-s%16] (OffPtr <ptr.Type> ptr [s%16]) (Zero [s%16] ptr mem)) 16310 for { 16311 s := v.AuxInt 16312 _ = v.Args[1] 16313 ptr := v.Args[0] 16314 mem := v.Args[1] 16315 if !(s%16 != 0 && s > 16) { 16316 break 16317 } 16318 v.reset(OpZero) 16319 v.AuxInt = s - s%16 16320 v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type) 16321 v0.AuxInt = s % 16 16322 v0.AddArg(ptr) 16323 v.AddArg(v0) 16324 v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem) 16325 v1.AuxInt = s % 16 16326 v1.AddArg(ptr) 16327 v1.AddArg(mem) 16328 v.AddArg(v1) 16329 return true 16330 } 16331 // match: (Zero [s] ptr mem) 16332 // cond: s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice 16333 // result: (DUFFZERO [4 * (64 - int64(s/16))] ptr mem) 16334 for { 16335 s := v.AuxInt 16336 _ = v.Args[1] 16337 ptr := v.Args[0] 16338 mem := v.Args[1] 16339 if !(s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice) { 16340 break 16341 } 16342 v.reset(OpARM64DUFFZERO) 16343 v.AuxInt = 4 * (64 - int64(s/16)) 16344 v.AddArg(ptr) 16345 v.AddArg(mem) 16346 return true 16347 } 16348 // match: (Zero [s] ptr mem) 16349 // cond: s%16 == 0 && (s > 16*64 || config.noDuffDevice) 16350 // result: (LoweredZero ptr (ADDconst <ptr.Type> [s-16] ptr) mem) 16351 for { 16352 s := v.AuxInt 16353 _ = v.Args[1] 16354 ptr := v.Args[0] 16355 mem := v.Args[1] 16356 if !(s%16 == 0 && (s > 16*64 || config.noDuffDevice)) { 16357 break 16358 } 16359 v.reset(OpARM64LoweredZero) 16360 v.AddArg(ptr) 16361 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, ptr.Type) 16362 v0.AuxInt = s - 16 16363 v0.AddArg(ptr) 16364 v.AddArg(v0) 16365 v.AddArg(mem) 16366 return true 16367 } 16368 return false 16369 } 16370 func rewriteValueARM64_OpZeroExt16to32_0(v *Value) bool { 16371 // match: (ZeroExt16to32 x) 16372 // cond: 16373 // result: (MOVHUreg x) 16374 for { 16375 x := v.Args[0] 16376 v.reset(OpARM64MOVHUreg) 16377 v.AddArg(x) 16378 return true 16379 } 16380 } 16381 func rewriteValueARM64_OpZeroExt16to64_0(v *Value) bool { 16382 // match: (ZeroExt16to64 x) 16383 // cond: 16384 // result: (MOVHUreg x) 16385 for { 16386 x := v.Args[0] 16387 v.reset(OpARM64MOVHUreg) 16388 v.AddArg(x) 16389 return true 16390 } 16391 } 16392 func rewriteValueARM64_OpZeroExt32to64_0(v *Value) bool { 16393 // match: (ZeroExt32to64 x) 16394 // cond: 16395 // result: (MOVWUreg x) 16396 for { 16397 x := v.Args[0] 16398 v.reset(OpARM64MOVWUreg) 16399 v.AddArg(x) 16400 return true 16401 } 16402 } 16403 func rewriteValueARM64_OpZeroExt8to16_0(v *Value) bool { 16404 // match: (ZeroExt8to16 x) 16405 // cond: 16406 // result: (MOVBUreg x) 16407 for { 16408 x := v.Args[0] 16409 v.reset(OpARM64MOVBUreg) 16410 v.AddArg(x) 16411 return true 16412 } 16413 } 16414 func rewriteValueARM64_OpZeroExt8to32_0(v *Value) bool { 16415 // match: (ZeroExt8to32 x) 16416 // cond: 16417 // result: (MOVBUreg x) 16418 for { 16419 x := v.Args[0] 16420 v.reset(OpARM64MOVBUreg) 16421 v.AddArg(x) 16422 return true 16423 } 16424 } 16425 func rewriteValueARM64_OpZeroExt8to64_0(v *Value) bool { 16426 // match: (ZeroExt8to64 x) 16427 // cond: 16428 // result: (MOVBUreg x) 16429 for { 16430 x := v.Args[0] 16431 v.reset(OpARM64MOVBUreg) 16432 v.AddArg(x) 16433 return true 16434 } 16435 } 16436 func rewriteBlockARM64(b *Block) bool { 16437 config := b.Func.Config 16438 _ = config 16439 fe := b.Func.fe 16440 _ = fe 16441 typ := &config.Types 16442 _ = typ 16443 switch b.Kind { 16444 case BlockARM64EQ: 16445 // match: (EQ (CMPconst [0] x) yes no) 16446 // cond: 16447 // result: (Z x yes no) 16448 for { 16449 v := b.Control 16450 if v.Op != OpARM64CMPconst { 16451 break 16452 } 16453 if v.AuxInt != 0 { 16454 break 16455 } 16456 x := v.Args[0] 16457 b.Kind = BlockARM64Z 16458 b.SetControl(x) 16459 b.Aux = nil 16460 return true 16461 } 16462 // match: (EQ (CMPWconst [0] x) yes no) 16463 // cond: 16464 // result: (ZW x yes no) 16465 for { 16466 v := b.Control 16467 if v.Op != OpARM64CMPWconst { 16468 break 16469 } 16470 if v.AuxInt != 0 { 16471 break 16472 } 16473 x := v.Args[0] 16474 b.Kind = BlockARM64ZW 16475 b.SetControl(x) 16476 b.Aux = nil 16477 return true 16478 } 16479 // match: (EQ (FlagEQ) yes no) 16480 // cond: 16481 // result: (First nil yes no) 16482 for { 16483 v := b.Control 16484 if v.Op != OpARM64FlagEQ { 16485 break 16486 } 16487 b.Kind = BlockFirst 16488 b.SetControl(nil) 16489 b.Aux = nil 16490 return true 16491 } 16492 // match: (EQ (FlagLT_ULT) yes no) 16493 // cond: 16494 // result: (First nil no yes) 16495 for { 16496 v := b.Control 16497 if v.Op != OpARM64FlagLT_ULT { 16498 break 16499 } 16500 b.Kind = BlockFirst 16501 b.SetControl(nil) 16502 b.Aux = nil 16503 b.swapSuccessors() 16504 return true 16505 } 16506 // match: (EQ (FlagLT_UGT) yes no) 16507 // cond: 16508 // result: (First nil no yes) 16509 for { 16510 v := b.Control 16511 if v.Op != OpARM64FlagLT_UGT { 16512 break 16513 } 16514 b.Kind = BlockFirst 16515 b.SetControl(nil) 16516 b.Aux = nil 16517 b.swapSuccessors() 16518 return true 16519 } 16520 // match: (EQ (FlagGT_ULT) yes no) 16521 // cond: 16522 // result: (First nil no yes) 16523 for { 16524 v := b.Control 16525 if v.Op != OpARM64FlagGT_ULT { 16526 break 16527 } 16528 b.Kind = BlockFirst 16529 b.SetControl(nil) 16530 b.Aux = nil 16531 b.swapSuccessors() 16532 return true 16533 } 16534 // match: (EQ (FlagGT_UGT) yes no) 16535 // cond: 16536 // result: (First nil no yes) 16537 for { 16538 v := b.Control 16539 if v.Op != OpARM64FlagGT_UGT { 16540 break 16541 } 16542 b.Kind = BlockFirst 16543 b.SetControl(nil) 16544 b.Aux = nil 16545 b.swapSuccessors() 16546 return true 16547 } 16548 // match: (EQ (InvertFlags cmp) yes no) 16549 // cond: 16550 // result: (EQ cmp yes no) 16551 for { 16552 v := b.Control 16553 if v.Op != OpARM64InvertFlags { 16554 break 16555 } 16556 cmp := v.Args[0] 16557 b.Kind = BlockARM64EQ 16558 b.SetControl(cmp) 16559 b.Aux = nil 16560 return true 16561 } 16562 case BlockARM64GE: 16563 // match: (GE (FlagEQ) yes no) 16564 // cond: 16565 // result: (First nil yes no) 16566 for { 16567 v := b.Control 16568 if v.Op != OpARM64FlagEQ { 16569 break 16570 } 16571 b.Kind = BlockFirst 16572 b.SetControl(nil) 16573 b.Aux = nil 16574 return true 16575 } 16576 // match: (GE (FlagLT_ULT) yes no) 16577 // cond: 16578 // result: (First nil no yes) 16579 for { 16580 v := b.Control 16581 if v.Op != OpARM64FlagLT_ULT { 16582 break 16583 } 16584 b.Kind = BlockFirst 16585 b.SetControl(nil) 16586 b.Aux = nil 16587 b.swapSuccessors() 16588 return true 16589 } 16590 // match: (GE (FlagLT_UGT) yes no) 16591 // cond: 16592 // result: (First nil no yes) 16593 for { 16594 v := b.Control 16595 if v.Op != OpARM64FlagLT_UGT { 16596 break 16597 } 16598 b.Kind = BlockFirst 16599 b.SetControl(nil) 16600 b.Aux = nil 16601 b.swapSuccessors() 16602 return true 16603 } 16604 // match: (GE (FlagGT_ULT) yes no) 16605 // cond: 16606 // result: (First nil yes no) 16607 for { 16608 v := b.Control 16609 if v.Op != OpARM64FlagGT_ULT { 16610 break 16611 } 16612 b.Kind = BlockFirst 16613 b.SetControl(nil) 16614 b.Aux = nil 16615 return true 16616 } 16617 // match: (GE (FlagGT_UGT) yes no) 16618 // cond: 16619 // result: (First nil yes no) 16620 for { 16621 v := b.Control 16622 if v.Op != OpARM64FlagGT_UGT { 16623 break 16624 } 16625 b.Kind = BlockFirst 16626 b.SetControl(nil) 16627 b.Aux = nil 16628 return true 16629 } 16630 // match: (GE (InvertFlags cmp) yes no) 16631 // cond: 16632 // result: (LE cmp yes no) 16633 for { 16634 v := b.Control 16635 if v.Op != OpARM64InvertFlags { 16636 break 16637 } 16638 cmp := v.Args[0] 16639 b.Kind = BlockARM64LE 16640 b.SetControl(cmp) 16641 b.Aux = nil 16642 return true 16643 } 16644 case BlockARM64GT: 16645 // match: (GT (FlagEQ) yes no) 16646 // cond: 16647 // result: (First nil no yes) 16648 for { 16649 v := b.Control 16650 if v.Op != OpARM64FlagEQ { 16651 break 16652 } 16653 b.Kind = BlockFirst 16654 b.SetControl(nil) 16655 b.Aux = nil 16656 b.swapSuccessors() 16657 return true 16658 } 16659 // match: (GT (FlagLT_ULT) yes no) 16660 // cond: 16661 // result: (First nil no yes) 16662 for { 16663 v := b.Control 16664 if v.Op != OpARM64FlagLT_ULT { 16665 break 16666 } 16667 b.Kind = BlockFirst 16668 b.SetControl(nil) 16669 b.Aux = nil 16670 b.swapSuccessors() 16671 return true 16672 } 16673 // match: (GT (FlagLT_UGT) yes no) 16674 // cond: 16675 // result: (First nil no yes) 16676 for { 16677 v := b.Control 16678 if v.Op != OpARM64FlagLT_UGT { 16679 break 16680 } 16681 b.Kind = BlockFirst 16682 b.SetControl(nil) 16683 b.Aux = nil 16684 b.swapSuccessors() 16685 return true 16686 } 16687 // match: (GT (FlagGT_ULT) yes no) 16688 // cond: 16689 // result: (First nil yes no) 16690 for { 16691 v := b.Control 16692 if v.Op != OpARM64FlagGT_ULT { 16693 break 16694 } 16695 b.Kind = BlockFirst 16696 b.SetControl(nil) 16697 b.Aux = nil 16698 return true 16699 } 16700 // match: (GT (FlagGT_UGT) yes no) 16701 // cond: 16702 // result: (First nil yes no) 16703 for { 16704 v := b.Control 16705 if v.Op != OpARM64FlagGT_UGT { 16706 break 16707 } 16708 b.Kind = BlockFirst 16709 b.SetControl(nil) 16710 b.Aux = nil 16711 return true 16712 } 16713 // match: (GT (InvertFlags cmp) yes no) 16714 // cond: 16715 // result: (LT cmp yes no) 16716 for { 16717 v := b.Control 16718 if v.Op != OpARM64InvertFlags { 16719 break 16720 } 16721 cmp := v.Args[0] 16722 b.Kind = BlockARM64LT 16723 b.SetControl(cmp) 16724 b.Aux = nil 16725 return true 16726 } 16727 case BlockIf: 16728 // match: (If (Equal cc) yes no) 16729 // cond: 16730 // result: (EQ cc yes no) 16731 for { 16732 v := b.Control 16733 if v.Op != OpARM64Equal { 16734 break 16735 } 16736 cc := v.Args[0] 16737 b.Kind = BlockARM64EQ 16738 b.SetControl(cc) 16739 b.Aux = nil 16740 return true 16741 } 16742 // match: (If (NotEqual cc) yes no) 16743 // cond: 16744 // result: (NE cc yes no) 16745 for { 16746 v := b.Control 16747 if v.Op != OpARM64NotEqual { 16748 break 16749 } 16750 cc := v.Args[0] 16751 b.Kind = BlockARM64NE 16752 b.SetControl(cc) 16753 b.Aux = nil 16754 return true 16755 } 16756 // match: (If (LessThan cc) yes no) 16757 // cond: 16758 // result: (LT cc yes no) 16759 for { 16760 v := b.Control 16761 if v.Op != OpARM64LessThan { 16762 break 16763 } 16764 cc := v.Args[0] 16765 b.Kind = BlockARM64LT 16766 b.SetControl(cc) 16767 b.Aux = nil 16768 return true 16769 } 16770 // match: (If (LessThanU cc) yes no) 16771 // cond: 16772 // result: (ULT cc yes no) 16773 for { 16774 v := b.Control 16775 if v.Op != OpARM64LessThanU { 16776 break 16777 } 16778 cc := v.Args[0] 16779 b.Kind = BlockARM64ULT 16780 b.SetControl(cc) 16781 b.Aux = nil 16782 return true 16783 } 16784 // match: (If (LessEqual cc) yes no) 16785 // cond: 16786 // result: (LE cc yes no) 16787 for { 16788 v := b.Control 16789 if v.Op != OpARM64LessEqual { 16790 break 16791 } 16792 cc := v.Args[0] 16793 b.Kind = BlockARM64LE 16794 b.SetControl(cc) 16795 b.Aux = nil 16796 return true 16797 } 16798 // match: (If (LessEqualU cc) yes no) 16799 // cond: 16800 // result: (ULE cc yes no) 16801 for { 16802 v := b.Control 16803 if v.Op != OpARM64LessEqualU { 16804 break 16805 } 16806 cc := v.Args[0] 16807 b.Kind = BlockARM64ULE 16808 b.SetControl(cc) 16809 b.Aux = nil 16810 return true 16811 } 16812 // match: (If (GreaterThan cc) yes no) 16813 // cond: 16814 // result: (GT cc yes no) 16815 for { 16816 v := b.Control 16817 if v.Op != OpARM64GreaterThan { 16818 break 16819 } 16820 cc := v.Args[0] 16821 b.Kind = BlockARM64GT 16822 b.SetControl(cc) 16823 b.Aux = nil 16824 return true 16825 } 16826 // match: (If (GreaterThanU cc) yes no) 16827 // cond: 16828 // result: (UGT cc yes no) 16829 for { 16830 v := b.Control 16831 if v.Op != OpARM64GreaterThanU { 16832 break 16833 } 16834 cc := v.Args[0] 16835 b.Kind = BlockARM64UGT 16836 b.SetControl(cc) 16837 b.Aux = nil 16838 return true 16839 } 16840 // match: (If (GreaterEqual cc) yes no) 16841 // cond: 16842 // result: (GE cc yes no) 16843 for { 16844 v := b.Control 16845 if v.Op != OpARM64GreaterEqual { 16846 break 16847 } 16848 cc := v.Args[0] 16849 b.Kind = BlockARM64GE 16850 b.SetControl(cc) 16851 b.Aux = nil 16852 return true 16853 } 16854 // match: (If (GreaterEqualU cc) yes no) 16855 // cond: 16856 // result: (UGE cc yes no) 16857 for { 16858 v := b.Control 16859 if v.Op != OpARM64GreaterEqualU { 16860 break 16861 } 16862 cc := v.Args[0] 16863 b.Kind = BlockARM64UGE 16864 b.SetControl(cc) 16865 b.Aux = nil 16866 return true 16867 } 16868 // match: (If cond yes no) 16869 // cond: 16870 // result: (NZ cond yes no) 16871 for { 16872 v := b.Control 16873 _ = v 16874 cond := b.Control 16875 b.Kind = BlockARM64NZ 16876 b.SetControl(cond) 16877 b.Aux = nil 16878 return true 16879 } 16880 case BlockARM64LE: 16881 // match: (LE (FlagEQ) yes no) 16882 // cond: 16883 // result: (First nil yes no) 16884 for { 16885 v := b.Control 16886 if v.Op != OpARM64FlagEQ { 16887 break 16888 } 16889 b.Kind = BlockFirst 16890 b.SetControl(nil) 16891 b.Aux = nil 16892 return true 16893 } 16894 // match: (LE (FlagLT_ULT) yes no) 16895 // cond: 16896 // result: (First nil yes no) 16897 for { 16898 v := b.Control 16899 if v.Op != OpARM64FlagLT_ULT { 16900 break 16901 } 16902 b.Kind = BlockFirst 16903 b.SetControl(nil) 16904 b.Aux = nil 16905 return true 16906 } 16907 // match: (LE (FlagLT_UGT) yes no) 16908 // cond: 16909 // result: (First nil yes no) 16910 for { 16911 v := b.Control 16912 if v.Op != OpARM64FlagLT_UGT { 16913 break 16914 } 16915 b.Kind = BlockFirst 16916 b.SetControl(nil) 16917 b.Aux = nil 16918 return true 16919 } 16920 // match: (LE (FlagGT_ULT) yes no) 16921 // cond: 16922 // result: (First nil no yes) 16923 for { 16924 v := b.Control 16925 if v.Op != OpARM64FlagGT_ULT { 16926 break 16927 } 16928 b.Kind = BlockFirst 16929 b.SetControl(nil) 16930 b.Aux = nil 16931 b.swapSuccessors() 16932 return true 16933 } 16934 // match: (LE (FlagGT_UGT) yes no) 16935 // cond: 16936 // result: (First nil no yes) 16937 for { 16938 v := b.Control 16939 if v.Op != OpARM64FlagGT_UGT { 16940 break 16941 } 16942 b.Kind = BlockFirst 16943 b.SetControl(nil) 16944 b.Aux = nil 16945 b.swapSuccessors() 16946 return true 16947 } 16948 // match: (LE (InvertFlags cmp) yes no) 16949 // cond: 16950 // result: (GE cmp yes no) 16951 for { 16952 v := b.Control 16953 if v.Op != OpARM64InvertFlags { 16954 break 16955 } 16956 cmp := v.Args[0] 16957 b.Kind = BlockARM64GE 16958 b.SetControl(cmp) 16959 b.Aux = nil 16960 return true 16961 } 16962 case BlockARM64LT: 16963 // match: (LT (FlagEQ) yes no) 16964 // cond: 16965 // result: (First nil no yes) 16966 for { 16967 v := b.Control 16968 if v.Op != OpARM64FlagEQ { 16969 break 16970 } 16971 b.Kind = BlockFirst 16972 b.SetControl(nil) 16973 b.Aux = nil 16974 b.swapSuccessors() 16975 return true 16976 } 16977 // match: (LT (FlagLT_ULT) yes no) 16978 // cond: 16979 // result: (First nil yes no) 16980 for { 16981 v := b.Control 16982 if v.Op != OpARM64FlagLT_ULT { 16983 break 16984 } 16985 b.Kind = BlockFirst 16986 b.SetControl(nil) 16987 b.Aux = nil 16988 return true 16989 } 16990 // match: (LT (FlagLT_UGT) yes no) 16991 // cond: 16992 // result: (First nil yes no) 16993 for { 16994 v := b.Control 16995 if v.Op != OpARM64FlagLT_UGT { 16996 break 16997 } 16998 b.Kind = BlockFirst 16999 b.SetControl(nil) 17000 b.Aux = nil 17001 return true 17002 } 17003 // match: (LT (FlagGT_ULT) yes no) 17004 // cond: 17005 // result: (First nil no yes) 17006 for { 17007 v := b.Control 17008 if v.Op != OpARM64FlagGT_ULT { 17009 break 17010 } 17011 b.Kind = BlockFirst 17012 b.SetControl(nil) 17013 b.Aux = nil 17014 b.swapSuccessors() 17015 return true 17016 } 17017 // match: (LT (FlagGT_UGT) yes no) 17018 // cond: 17019 // result: (First nil no yes) 17020 for { 17021 v := b.Control 17022 if v.Op != OpARM64FlagGT_UGT { 17023 break 17024 } 17025 b.Kind = BlockFirst 17026 b.SetControl(nil) 17027 b.Aux = nil 17028 b.swapSuccessors() 17029 return true 17030 } 17031 // match: (LT (InvertFlags cmp) yes no) 17032 // cond: 17033 // result: (GT cmp yes no) 17034 for { 17035 v := b.Control 17036 if v.Op != OpARM64InvertFlags { 17037 break 17038 } 17039 cmp := v.Args[0] 17040 b.Kind = BlockARM64GT 17041 b.SetControl(cmp) 17042 b.Aux = nil 17043 return true 17044 } 17045 case BlockARM64NE: 17046 // match: (NE (CMPconst [0] x) yes no) 17047 // cond: 17048 // result: (NZ x yes no) 17049 for { 17050 v := b.Control 17051 if v.Op != OpARM64CMPconst { 17052 break 17053 } 17054 if v.AuxInt != 0 { 17055 break 17056 } 17057 x := v.Args[0] 17058 b.Kind = BlockARM64NZ 17059 b.SetControl(x) 17060 b.Aux = nil 17061 return true 17062 } 17063 // match: (NE (CMPWconst [0] x) yes no) 17064 // cond: 17065 // result: (NZW x yes no) 17066 for { 17067 v := b.Control 17068 if v.Op != OpARM64CMPWconst { 17069 break 17070 } 17071 if v.AuxInt != 0 { 17072 break 17073 } 17074 x := v.Args[0] 17075 b.Kind = BlockARM64NZW 17076 b.SetControl(x) 17077 b.Aux = nil 17078 return true 17079 } 17080 // match: (NE (FlagEQ) yes no) 17081 // cond: 17082 // result: (First nil no yes) 17083 for { 17084 v := b.Control 17085 if v.Op != OpARM64FlagEQ { 17086 break 17087 } 17088 b.Kind = BlockFirst 17089 b.SetControl(nil) 17090 b.Aux = nil 17091 b.swapSuccessors() 17092 return true 17093 } 17094 // match: (NE (FlagLT_ULT) yes no) 17095 // cond: 17096 // result: (First nil yes no) 17097 for { 17098 v := b.Control 17099 if v.Op != OpARM64FlagLT_ULT { 17100 break 17101 } 17102 b.Kind = BlockFirst 17103 b.SetControl(nil) 17104 b.Aux = nil 17105 return true 17106 } 17107 // match: (NE (FlagLT_UGT) yes no) 17108 // cond: 17109 // result: (First nil yes no) 17110 for { 17111 v := b.Control 17112 if v.Op != OpARM64FlagLT_UGT { 17113 break 17114 } 17115 b.Kind = BlockFirst 17116 b.SetControl(nil) 17117 b.Aux = nil 17118 return true 17119 } 17120 // match: (NE (FlagGT_ULT) yes no) 17121 // cond: 17122 // result: (First nil yes no) 17123 for { 17124 v := b.Control 17125 if v.Op != OpARM64FlagGT_ULT { 17126 break 17127 } 17128 b.Kind = BlockFirst 17129 b.SetControl(nil) 17130 b.Aux = nil 17131 return true 17132 } 17133 // match: (NE (FlagGT_UGT) yes no) 17134 // cond: 17135 // result: (First nil yes no) 17136 for { 17137 v := b.Control 17138 if v.Op != OpARM64FlagGT_UGT { 17139 break 17140 } 17141 b.Kind = BlockFirst 17142 b.SetControl(nil) 17143 b.Aux = nil 17144 return true 17145 } 17146 // match: (NE (InvertFlags cmp) yes no) 17147 // cond: 17148 // result: (NE cmp yes no) 17149 for { 17150 v := b.Control 17151 if v.Op != OpARM64InvertFlags { 17152 break 17153 } 17154 cmp := v.Args[0] 17155 b.Kind = BlockARM64NE 17156 b.SetControl(cmp) 17157 b.Aux = nil 17158 return true 17159 } 17160 case BlockARM64NZ: 17161 // match: (NZ (Equal cc) yes no) 17162 // cond: 17163 // result: (EQ cc yes no) 17164 for { 17165 v := b.Control 17166 if v.Op != OpARM64Equal { 17167 break 17168 } 17169 cc := v.Args[0] 17170 b.Kind = BlockARM64EQ 17171 b.SetControl(cc) 17172 b.Aux = nil 17173 return true 17174 } 17175 // match: (NZ (NotEqual cc) yes no) 17176 // cond: 17177 // result: (NE cc yes no) 17178 for { 17179 v := b.Control 17180 if v.Op != OpARM64NotEqual { 17181 break 17182 } 17183 cc := v.Args[0] 17184 b.Kind = BlockARM64NE 17185 b.SetControl(cc) 17186 b.Aux = nil 17187 return true 17188 } 17189 // match: (NZ (LessThan cc) yes no) 17190 // cond: 17191 // result: (LT cc yes no) 17192 for { 17193 v := b.Control 17194 if v.Op != OpARM64LessThan { 17195 break 17196 } 17197 cc := v.Args[0] 17198 b.Kind = BlockARM64LT 17199 b.SetControl(cc) 17200 b.Aux = nil 17201 return true 17202 } 17203 // match: (NZ (LessThanU cc) yes no) 17204 // cond: 17205 // result: (ULT cc yes no) 17206 for { 17207 v := b.Control 17208 if v.Op != OpARM64LessThanU { 17209 break 17210 } 17211 cc := v.Args[0] 17212 b.Kind = BlockARM64ULT 17213 b.SetControl(cc) 17214 b.Aux = nil 17215 return true 17216 } 17217 // match: (NZ (LessEqual cc) yes no) 17218 // cond: 17219 // result: (LE cc yes no) 17220 for { 17221 v := b.Control 17222 if v.Op != OpARM64LessEqual { 17223 break 17224 } 17225 cc := v.Args[0] 17226 b.Kind = BlockARM64LE 17227 b.SetControl(cc) 17228 b.Aux = nil 17229 return true 17230 } 17231 // match: (NZ (LessEqualU cc) yes no) 17232 // cond: 17233 // result: (ULE cc yes no) 17234 for { 17235 v := b.Control 17236 if v.Op != OpARM64LessEqualU { 17237 break 17238 } 17239 cc := v.Args[0] 17240 b.Kind = BlockARM64ULE 17241 b.SetControl(cc) 17242 b.Aux = nil 17243 return true 17244 } 17245 // match: (NZ (GreaterThan cc) yes no) 17246 // cond: 17247 // result: (GT cc yes no) 17248 for { 17249 v := b.Control 17250 if v.Op != OpARM64GreaterThan { 17251 break 17252 } 17253 cc := v.Args[0] 17254 b.Kind = BlockARM64GT 17255 b.SetControl(cc) 17256 b.Aux = nil 17257 return true 17258 } 17259 // match: (NZ (GreaterThanU cc) yes no) 17260 // cond: 17261 // result: (UGT cc yes no) 17262 for { 17263 v := b.Control 17264 if v.Op != OpARM64GreaterThanU { 17265 break 17266 } 17267 cc := v.Args[0] 17268 b.Kind = BlockARM64UGT 17269 b.SetControl(cc) 17270 b.Aux = nil 17271 return true 17272 } 17273 // match: (NZ (GreaterEqual cc) yes no) 17274 // cond: 17275 // result: (GE cc yes no) 17276 for { 17277 v := b.Control 17278 if v.Op != OpARM64GreaterEqual { 17279 break 17280 } 17281 cc := v.Args[0] 17282 b.Kind = BlockARM64GE 17283 b.SetControl(cc) 17284 b.Aux = nil 17285 return true 17286 } 17287 // match: (NZ (GreaterEqualU cc) yes no) 17288 // cond: 17289 // result: (UGE cc yes no) 17290 for { 17291 v := b.Control 17292 if v.Op != OpARM64GreaterEqualU { 17293 break 17294 } 17295 cc := v.Args[0] 17296 b.Kind = BlockARM64UGE 17297 b.SetControl(cc) 17298 b.Aux = nil 17299 return true 17300 } 17301 // match: (NZ (ANDconst [c] x) yes no) 17302 // cond: oneBit(c) 17303 // result: (TBNZ {ntz(c)} x yes no) 17304 for { 17305 v := b.Control 17306 if v.Op != OpARM64ANDconst { 17307 break 17308 } 17309 c := v.AuxInt 17310 x := v.Args[0] 17311 if !(oneBit(c)) { 17312 break 17313 } 17314 b.Kind = BlockARM64TBNZ 17315 b.SetControl(x) 17316 b.Aux = ntz(c) 17317 return true 17318 } 17319 // match: (NZ (MOVDconst [0]) yes no) 17320 // cond: 17321 // result: (First nil no yes) 17322 for { 17323 v := b.Control 17324 if v.Op != OpARM64MOVDconst { 17325 break 17326 } 17327 if v.AuxInt != 0 { 17328 break 17329 } 17330 b.Kind = BlockFirst 17331 b.SetControl(nil) 17332 b.Aux = nil 17333 b.swapSuccessors() 17334 return true 17335 } 17336 // match: (NZ (MOVDconst [c]) yes no) 17337 // cond: c != 0 17338 // result: (First nil yes no) 17339 for { 17340 v := b.Control 17341 if v.Op != OpARM64MOVDconst { 17342 break 17343 } 17344 c := v.AuxInt 17345 if !(c != 0) { 17346 break 17347 } 17348 b.Kind = BlockFirst 17349 b.SetControl(nil) 17350 b.Aux = nil 17351 return true 17352 } 17353 case BlockARM64NZW: 17354 // match: (NZW (ANDconst [c] x) yes no) 17355 // cond: oneBit(int64(uint32(c))) 17356 // result: (TBNZ {ntz(int64(uint32(c)))} x yes no) 17357 for { 17358 v := b.Control 17359 if v.Op != OpARM64ANDconst { 17360 break 17361 } 17362 c := v.AuxInt 17363 x := v.Args[0] 17364 if !(oneBit(int64(uint32(c)))) { 17365 break 17366 } 17367 b.Kind = BlockARM64TBNZ 17368 b.SetControl(x) 17369 b.Aux = ntz(int64(uint32(c))) 17370 return true 17371 } 17372 // match: (NZW (MOVDconst [c]) yes no) 17373 // cond: int32(c) == 0 17374 // result: (First nil no yes) 17375 for { 17376 v := b.Control 17377 if v.Op != OpARM64MOVDconst { 17378 break 17379 } 17380 c := v.AuxInt 17381 if !(int32(c) == 0) { 17382 break 17383 } 17384 b.Kind = BlockFirst 17385 b.SetControl(nil) 17386 b.Aux = nil 17387 b.swapSuccessors() 17388 return true 17389 } 17390 // match: (NZW (MOVDconst [c]) yes no) 17391 // cond: int32(c) != 0 17392 // result: (First nil yes no) 17393 for { 17394 v := b.Control 17395 if v.Op != OpARM64MOVDconst { 17396 break 17397 } 17398 c := v.AuxInt 17399 if !(int32(c) != 0) { 17400 break 17401 } 17402 b.Kind = BlockFirst 17403 b.SetControl(nil) 17404 b.Aux = nil 17405 return true 17406 } 17407 case BlockARM64UGE: 17408 // match: (UGE (FlagEQ) yes no) 17409 // cond: 17410 // result: (First nil yes no) 17411 for { 17412 v := b.Control 17413 if v.Op != OpARM64FlagEQ { 17414 break 17415 } 17416 b.Kind = BlockFirst 17417 b.SetControl(nil) 17418 b.Aux = nil 17419 return true 17420 } 17421 // match: (UGE (FlagLT_ULT) yes no) 17422 // cond: 17423 // result: (First nil no yes) 17424 for { 17425 v := b.Control 17426 if v.Op != OpARM64FlagLT_ULT { 17427 break 17428 } 17429 b.Kind = BlockFirst 17430 b.SetControl(nil) 17431 b.Aux = nil 17432 b.swapSuccessors() 17433 return true 17434 } 17435 // match: (UGE (FlagLT_UGT) yes no) 17436 // cond: 17437 // result: (First nil yes no) 17438 for { 17439 v := b.Control 17440 if v.Op != OpARM64FlagLT_UGT { 17441 break 17442 } 17443 b.Kind = BlockFirst 17444 b.SetControl(nil) 17445 b.Aux = nil 17446 return true 17447 } 17448 // match: (UGE (FlagGT_ULT) yes no) 17449 // cond: 17450 // result: (First nil no yes) 17451 for { 17452 v := b.Control 17453 if v.Op != OpARM64FlagGT_ULT { 17454 break 17455 } 17456 b.Kind = BlockFirst 17457 b.SetControl(nil) 17458 b.Aux = nil 17459 b.swapSuccessors() 17460 return true 17461 } 17462 // match: (UGE (FlagGT_UGT) yes no) 17463 // cond: 17464 // result: (First nil yes no) 17465 for { 17466 v := b.Control 17467 if v.Op != OpARM64FlagGT_UGT { 17468 break 17469 } 17470 b.Kind = BlockFirst 17471 b.SetControl(nil) 17472 b.Aux = nil 17473 return true 17474 } 17475 // match: (UGE (InvertFlags cmp) yes no) 17476 // cond: 17477 // result: (ULE cmp yes no) 17478 for { 17479 v := b.Control 17480 if v.Op != OpARM64InvertFlags { 17481 break 17482 } 17483 cmp := v.Args[0] 17484 b.Kind = BlockARM64ULE 17485 b.SetControl(cmp) 17486 b.Aux = nil 17487 return true 17488 } 17489 case BlockARM64UGT: 17490 // match: (UGT (FlagEQ) yes no) 17491 // cond: 17492 // result: (First nil no yes) 17493 for { 17494 v := b.Control 17495 if v.Op != OpARM64FlagEQ { 17496 break 17497 } 17498 b.Kind = BlockFirst 17499 b.SetControl(nil) 17500 b.Aux = nil 17501 b.swapSuccessors() 17502 return true 17503 } 17504 // match: (UGT (FlagLT_ULT) yes no) 17505 // cond: 17506 // result: (First nil no yes) 17507 for { 17508 v := b.Control 17509 if v.Op != OpARM64FlagLT_ULT { 17510 break 17511 } 17512 b.Kind = BlockFirst 17513 b.SetControl(nil) 17514 b.Aux = nil 17515 b.swapSuccessors() 17516 return true 17517 } 17518 // match: (UGT (FlagLT_UGT) yes no) 17519 // cond: 17520 // result: (First nil yes no) 17521 for { 17522 v := b.Control 17523 if v.Op != OpARM64FlagLT_UGT { 17524 break 17525 } 17526 b.Kind = BlockFirst 17527 b.SetControl(nil) 17528 b.Aux = nil 17529 return true 17530 } 17531 // match: (UGT (FlagGT_ULT) yes no) 17532 // cond: 17533 // result: (First nil no yes) 17534 for { 17535 v := b.Control 17536 if v.Op != OpARM64FlagGT_ULT { 17537 break 17538 } 17539 b.Kind = BlockFirst 17540 b.SetControl(nil) 17541 b.Aux = nil 17542 b.swapSuccessors() 17543 return true 17544 } 17545 // match: (UGT (FlagGT_UGT) yes no) 17546 // cond: 17547 // result: (First nil yes no) 17548 for { 17549 v := b.Control 17550 if v.Op != OpARM64FlagGT_UGT { 17551 break 17552 } 17553 b.Kind = BlockFirst 17554 b.SetControl(nil) 17555 b.Aux = nil 17556 return true 17557 } 17558 // match: (UGT (InvertFlags cmp) yes no) 17559 // cond: 17560 // result: (ULT cmp yes no) 17561 for { 17562 v := b.Control 17563 if v.Op != OpARM64InvertFlags { 17564 break 17565 } 17566 cmp := v.Args[0] 17567 b.Kind = BlockARM64ULT 17568 b.SetControl(cmp) 17569 b.Aux = nil 17570 return true 17571 } 17572 case BlockARM64ULE: 17573 // match: (ULE (FlagEQ) yes no) 17574 // cond: 17575 // result: (First nil yes no) 17576 for { 17577 v := b.Control 17578 if v.Op != OpARM64FlagEQ { 17579 break 17580 } 17581 b.Kind = BlockFirst 17582 b.SetControl(nil) 17583 b.Aux = nil 17584 return true 17585 } 17586 // match: (ULE (FlagLT_ULT) yes no) 17587 // cond: 17588 // result: (First nil yes no) 17589 for { 17590 v := b.Control 17591 if v.Op != OpARM64FlagLT_ULT { 17592 break 17593 } 17594 b.Kind = BlockFirst 17595 b.SetControl(nil) 17596 b.Aux = nil 17597 return true 17598 } 17599 // match: (ULE (FlagLT_UGT) yes no) 17600 // cond: 17601 // result: (First nil no yes) 17602 for { 17603 v := b.Control 17604 if v.Op != OpARM64FlagLT_UGT { 17605 break 17606 } 17607 b.Kind = BlockFirst 17608 b.SetControl(nil) 17609 b.Aux = nil 17610 b.swapSuccessors() 17611 return true 17612 } 17613 // match: (ULE (FlagGT_ULT) yes no) 17614 // cond: 17615 // result: (First nil yes no) 17616 for { 17617 v := b.Control 17618 if v.Op != OpARM64FlagGT_ULT { 17619 break 17620 } 17621 b.Kind = BlockFirst 17622 b.SetControl(nil) 17623 b.Aux = nil 17624 return true 17625 } 17626 // match: (ULE (FlagGT_UGT) yes no) 17627 // cond: 17628 // result: (First nil no yes) 17629 for { 17630 v := b.Control 17631 if v.Op != OpARM64FlagGT_UGT { 17632 break 17633 } 17634 b.Kind = BlockFirst 17635 b.SetControl(nil) 17636 b.Aux = nil 17637 b.swapSuccessors() 17638 return true 17639 } 17640 // match: (ULE (InvertFlags cmp) yes no) 17641 // cond: 17642 // result: (UGE cmp yes no) 17643 for { 17644 v := b.Control 17645 if v.Op != OpARM64InvertFlags { 17646 break 17647 } 17648 cmp := v.Args[0] 17649 b.Kind = BlockARM64UGE 17650 b.SetControl(cmp) 17651 b.Aux = nil 17652 return true 17653 } 17654 case BlockARM64ULT: 17655 // match: (ULT (FlagEQ) yes no) 17656 // cond: 17657 // result: (First nil no yes) 17658 for { 17659 v := b.Control 17660 if v.Op != OpARM64FlagEQ { 17661 break 17662 } 17663 b.Kind = BlockFirst 17664 b.SetControl(nil) 17665 b.Aux = nil 17666 b.swapSuccessors() 17667 return true 17668 } 17669 // match: (ULT (FlagLT_ULT) yes no) 17670 // cond: 17671 // result: (First nil yes no) 17672 for { 17673 v := b.Control 17674 if v.Op != OpARM64FlagLT_ULT { 17675 break 17676 } 17677 b.Kind = BlockFirst 17678 b.SetControl(nil) 17679 b.Aux = nil 17680 return true 17681 } 17682 // match: (ULT (FlagLT_UGT) yes no) 17683 // cond: 17684 // result: (First nil no yes) 17685 for { 17686 v := b.Control 17687 if v.Op != OpARM64FlagLT_UGT { 17688 break 17689 } 17690 b.Kind = BlockFirst 17691 b.SetControl(nil) 17692 b.Aux = nil 17693 b.swapSuccessors() 17694 return true 17695 } 17696 // match: (ULT (FlagGT_ULT) yes no) 17697 // cond: 17698 // result: (First nil yes no) 17699 for { 17700 v := b.Control 17701 if v.Op != OpARM64FlagGT_ULT { 17702 break 17703 } 17704 b.Kind = BlockFirst 17705 b.SetControl(nil) 17706 b.Aux = nil 17707 return true 17708 } 17709 // match: (ULT (FlagGT_UGT) yes no) 17710 // cond: 17711 // result: (First nil no yes) 17712 for { 17713 v := b.Control 17714 if v.Op != OpARM64FlagGT_UGT { 17715 break 17716 } 17717 b.Kind = BlockFirst 17718 b.SetControl(nil) 17719 b.Aux = nil 17720 b.swapSuccessors() 17721 return true 17722 } 17723 // match: (ULT (InvertFlags cmp) yes no) 17724 // cond: 17725 // result: (UGT cmp yes no) 17726 for { 17727 v := b.Control 17728 if v.Op != OpARM64InvertFlags { 17729 break 17730 } 17731 cmp := v.Args[0] 17732 b.Kind = BlockARM64UGT 17733 b.SetControl(cmp) 17734 b.Aux = nil 17735 return true 17736 } 17737 case BlockARM64Z: 17738 // match: (Z (ANDconst [c] x) yes no) 17739 // cond: oneBit(c) 17740 // result: (TBZ {ntz(c)} x yes no) 17741 for { 17742 v := b.Control 17743 if v.Op != OpARM64ANDconst { 17744 break 17745 } 17746 c := v.AuxInt 17747 x := v.Args[0] 17748 if !(oneBit(c)) { 17749 break 17750 } 17751 b.Kind = BlockARM64TBZ 17752 b.SetControl(x) 17753 b.Aux = ntz(c) 17754 return true 17755 } 17756 // match: (Z (MOVDconst [0]) yes no) 17757 // cond: 17758 // result: (First nil yes no) 17759 for { 17760 v := b.Control 17761 if v.Op != OpARM64MOVDconst { 17762 break 17763 } 17764 if v.AuxInt != 0 { 17765 break 17766 } 17767 b.Kind = BlockFirst 17768 b.SetControl(nil) 17769 b.Aux = nil 17770 return true 17771 } 17772 // match: (Z (MOVDconst [c]) yes no) 17773 // cond: c != 0 17774 // result: (First nil no yes) 17775 for { 17776 v := b.Control 17777 if v.Op != OpARM64MOVDconst { 17778 break 17779 } 17780 c := v.AuxInt 17781 if !(c != 0) { 17782 break 17783 } 17784 b.Kind = BlockFirst 17785 b.SetControl(nil) 17786 b.Aux = nil 17787 b.swapSuccessors() 17788 return true 17789 } 17790 case BlockARM64ZW: 17791 // match: (ZW (ANDconst [c] x) yes no) 17792 // cond: oneBit(int64(uint32(c))) 17793 // result: (TBZ {ntz(int64(uint32(c)))} x yes no) 17794 for { 17795 v := b.Control 17796 if v.Op != OpARM64ANDconst { 17797 break 17798 } 17799 c := v.AuxInt 17800 x := v.Args[0] 17801 if !(oneBit(int64(uint32(c)))) { 17802 break 17803 } 17804 b.Kind = BlockARM64TBZ 17805 b.SetControl(x) 17806 b.Aux = ntz(int64(uint32(c))) 17807 return true 17808 } 17809 // match: (ZW (MOVDconst [c]) yes no) 17810 // cond: int32(c) == 0 17811 // result: (First nil yes no) 17812 for { 17813 v := b.Control 17814 if v.Op != OpARM64MOVDconst { 17815 break 17816 } 17817 c := v.AuxInt 17818 if !(int32(c) == 0) { 17819 break 17820 } 17821 b.Kind = BlockFirst 17822 b.SetControl(nil) 17823 b.Aux = nil 17824 return true 17825 } 17826 // match: (ZW (MOVDconst [c]) yes no) 17827 // cond: int32(c) != 0 17828 // result: (First nil no yes) 17829 for { 17830 v := b.Control 17831 if v.Op != OpARM64MOVDconst { 17832 break 17833 } 17834 c := v.AuxInt 17835 if !(int32(c) != 0) { 17836 break 17837 } 17838 b.Kind = BlockFirst 17839 b.SetControl(nil) 17840 b.Aux = nil 17841 b.swapSuccessors() 17842 return true 17843 } 17844 } 17845 return false 17846 }