github.com/hlts2/go@v0.0.0-20170904000733-812b34efaed8/src/cmd/compile/internal/ssa/rewriteARM64.go (about) 1 // Code generated from gen/ARM64.rules; DO NOT EDIT. 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "math" 7 import "cmd/internal/obj" 8 import "cmd/internal/objabi" 9 import "cmd/compile/internal/types" 10 11 var _ = math.MinInt8 // in case not otherwise used 12 var _ = obj.ANOP // in case not otherwise used 13 var _ = objabi.GOROOT // in case not otherwise used 14 var _ = types.TypeMem // in case not otherwise used 15 16 func rewriteValueARM64(v *Value) bool { 17 switch v.Op { 18 case OpARM64ADD: 19 return rewriteValueARM64_OpARM64ADD_0(v) 20 case OpARM64ADDconst: 21 return rewriteValueARM64_OpARM64ADDconst_0(v) 22 case OpARM64ADDshiftLL: 23 return rewriteValueARM64_OpARM64ADDshiftLL_0(v) 24 case OpARM64ADDshiftRA: 25 return rewriteValueARM64_OpARM64ADDshiftRA_0(v) 26 case OpARM64ADDshiftRL: 27 return rewriteValueARM64_OpARM64ADDshiftRL_0(v) 28 case OpARM64AND: 29 return rewriteValueARM64_OpARM64AND_0(v) || rewriteValueARM64_OpARM64AND_10(v) 30 case OpARM64ANDconst: 31 return rewriteValueARM64_OpARM64ANDconst_0(v) 32 case OpARM64ANDshiftLL: 33 return rewriteValueARM64_OpARM64ANDshiftLL_0(v) 34 case OpARM64ANDshiftRA: 35 return rewriteValueARM64_OpARM64ANDshiftRA_0(v) 36 case OpARM64ANDshiftRL: 37 return rewriteValueARM64_OpARM64ANDshiftRL_0(v) 38 case OpARM64BIC: 39 return rewriteValueARM64_OpARM64BIC_0(v) 40 case OpARM64BICconst: 41 return rewriteValueARM64_OpARM64BICconst_0(v) 42 case OpARM64BICshiftLL: 43 return rewriteValueARM64_OpARM64BICshiftLL_0(v) 44 case OpARM64BICshiftRA: 45 return rewriteValueARM64_OpARM64BICshiftRA_0(v) 46 case OpARM64BICshiftRL: 47 return rewriteValueARM64_OpARM64BICshiftRL_0(v) 48 case OpARM64CMP: 49 return rewriteValueARM64_OpARM64CMP_0(v) 50 case OpARM64CMPW: 51 return rewriteValueARM64_OpARM64CMPW_0(v) 52 case OpARM64CMPWconst: 53 return rewriteValueARM64_OpARM64CMPWconst_0(v) 54 case OpARM64CMPconst: 55 return rewriteValueARM64_OpARM64CMPconst_0(v) 56 case OpARM64CMPshiftLL: 57 return rewriteValueARM64_OpARM64CMPshiftLL_0(v) 58 case OpARM64CMPshiftRA: 59 return rewriteValueARM64_OpARM64CMPshiftRA_0(v) 60 case OpARM64CMPshiftRL: 61 return rewriteValueARM64_OpARM64CMPshiftRL_0(v) 62 case OpARM64CSELULT: 63 return rewriteValueARM64_OpARM64CSELULT_0(v) 64 case OpARM64CSELULT0: 65 return rewriteValueARM64_OpARM64CSELULT0_0(v) 66 case OpARM64DIV: 67 return rewriteValueARM64_OpARM64DIV_0(v) 68 case OpARM64DIVW: 69 return rewriteValueARM64_OpARM64DIVW_0(v) 70 case OpARM64Equal: 71 return rewriteValueARM64_OpARM64Equal_0(v) 72 case OpARM64FMOVDload: 73 return rewriteValueARM64_OpARM64FMOVDload_0(v) 74 case OpARM64FMOVDstore: 75 return rewriteValueARM64_OpARM64FMOVDstore_0(v) 76 case OpARM64FMOVSload: 77 return rewriteValueARM64_OpARM64FMOVSload_0(v) 78 case OpARM64FMOVSstore: 79 return rewriteValueARM64_OpARM64FMOVSstore_0(v) 80 case OpARM64GreaterEqual: 81 return rewriteValueARM64_OpARM64GreaterEqual_0(v) 82 case OpARM64GreaterEqualU: 83 return rewriteValueARM64_OpARM64GreaterEqualU_0(v) 84 case OpARM64GreaterThan: 85 return rewriteValueARM64_OpARM64GreaterThan_0(v) 86 case OpARM64GreaterThanU: 87 return rewriteValueARM64_OpARM64GreaterThanU_0(v) 88 case OpARM64LessEqual: 89 return rewriteValueARM64_OpARM64LessEqual_0(v) 90 case OpARM64LessEqualU: 91 return rewriteValueARM64_OpARM64LessEqualU_0(v) 92 case OpARM64LessThan: 93 return rewriteValueARM64_OpARM64LessThan_0(v) 94 case OpARM64LessThanU: 95 return rewriteValueARM64_OpARM64LessThanU_0(v) 96 case OpARM64MOD: 97 return rewriteValueARM64_OpARM64MOD_0(v) 98 case OpARM64MODW: 99 return rewriteValueARM64_OpARM64MODW_0(v) 100 case OpARM64MOVBUload: 101 return rewriteValueARM64_OpARM64MOVBUload_0(v) 102 case OpARM64MOVBUreg: 103 return rewriteValueARM64_OpARM64MOVBUreg_0(v) 104 case OpARM64MOVBload: 105 return rewriteValueARM64_OpARM64MOVBload_0(v) 106 case OpARM64MOVBreg: 107 return rewriteValueARM64_OpARM64MOVBreg_0(v) 108 case OpARM64MOVBstore: 109 return rewriteValueARM64_OpARM64MOVBstore_0(v) 110 case OpARM64MOVBstorezero: 111 return rewriteValueARM64_OpARM64MOVBstorezero_0(v) 112 case OpARM64MOVDload: 113 return rewriteValueARM64_OpARM64MOVDload_0(v) 114 case OpARM64MOVDreg: 115 return rewriteValueARM64_OpARM64MOVDreg_0(v) 116 case OpARM64MOVDstore: 117 return rewriteValueARM64_OpARM64MOVDstore_0(v) 118 case OpARM64MOVDstorezero: 119 return rewriteValueARM64_OpARM64MOVDstorezero_0(v) 120 case OpARM64MOVHUload: 121 return rewriteValueARM64_OpARM64MOVHUload_0(v) 122 case OpARM64MOVHUreg: 123 return rewriteValueARM64_OpARM64MOVHUreg_0(v) 124 case OpARM64MOVHload: 125 return rewriteValueARM64_OpARM64MOVHload_0(v) 126 case OpARM64MOVHreg: 127 return rewriteValueARM64_OpARM64MOVHreg_0(v) 128 case OpARM64MOVHstore: 129 return rewriteValueARM64_OpARM64MOVHstore_0(v) 130 case OpARM64MOVHstorezero: 131 return rewriteValueARM64_OpARM64MOVHstorezero_0(v) 132 case OpARM64MOVQstorezero: 133 return rewriteValueARM64_OpARM64MOVQstorezero_0(v) 134 case OpARM64MOVWUload: 135 return rewriteValueARM64_OpARM64MOVWUload_0(v) 136 case OpARM64MOVWUreg: 137 return rewriteValueARM64_OpARM64MOVWUreg_0(v) 138 case OpARM64MOVWload: 139 return rewriteValueARM64_OpARM64MOVWload_0(v) 140 case OpARM64MOVWreg: 141 return rewriteValueARM64_OpARM64MOVWreg_0(v) || rewriteValueARM64_OpARM64MOVWreg_10(v) 142 case OpARM64MOVWstore: 143 return rewriteValueARM64_OpARM64MOVWstore_0(v) 144 case OpARM64MOVWstorezero: 145 return rewriteValueARM64_OpARM64MOVWstorezero_0(v) 146 case OpARM64MUL: 147 return rewriteValueARM64_OpARM64MUL_0(v) || rewriteValueARM64_OpARM64MUL_10(v) || rewriteValueARM64_OpARM64MUL_20(v) 148 case OpARM64MULW: 149 return rewriteValueARM64_OpARM64MULW_0(v) || rewriteValueARM64_OpARM64MULW_10(v) || rewriteValueARM64_OpARM64MULW_20(v) 150 case OpARM64MVN: 151 return rewriteValueARM64_OpARM64MVN_0(v) 152 case OpARM64NEG: 153 return rewriteValueARM64_OpARM64NEG_0(v) 154 case OpARM64NotEqual: 155 return rewriteValueARM64_OpARM64NotEqual_0(v) 156 case OpARM64OR: 157 return rewriteValueARM64_OpARM64OR_0(v) || rewriteValueARM64_OpARM64OR_10(v) 158 case OpARM64ORconst: 159 return rewriteValueARM64_OpARM64ORconst_0(v) 160 case OpARM64ORshiftLL: 161 return rewriteValueARM64_OpARM64ORshiftLL_0(v) || rewriteValueARM64_OpARM64ORshiftLL_10(v) 162 case OpARM64ORshiftRA: 163 return rewriteValueARM64_OpARM64ORshiftRA_0(v) 164 case OpARM64ORshiftRL: 165 return rewriteValueARM64_OpARM64ORshiftRL_0(v) 166 case OpARM64SLL: 167 return rewriteValueARM64_OpARM64SLL_0(v) 168 case OpARM64SLLconst: 169 return rewriteValueARM64_OpARM64SLLconst_0(v) 170 case OpARM64SRA: 171 return rewriteValueARM64_OpARM64SRA_0(v) 172 case OpARM64SRAconst: 173 return rewriteValueARM64_OpARM64SRAconst_0(v) 174 case OpARM64SRL: 175 return rewriteValueARM64_OpARM64SRL_0(v) 176 case OpARM64SRLconst: 177 return rewriteValueARM64_OpARM64SRLconst_0(v) 178 case OpARM64STP: 179 return rewriteValueARM64_OpARM64STP_0(v) 180 case OpARM64SUB: 181 return rewriteValueARM64_OpARM64SUB_0(v) 182 case OpARM64SUBconst: 183 return rewriteValueARM64_OpARM64SUBconst_0(v) 184 case OpARM64SUBshiftLL: 185 return rewriteValueARM64_OpARM64SUBshiftLL_0(v) 186 case OpARM64SUBshiftRA: 187 return rewriteValueARM64_OpARM64SUBshiftRA_0(v) 188 case OpARM64SUBshiftRL: 189 return rewriteValueARM64_OpARM64SUBshiftRL_0(v) 190 case OpARM64UDIV: 191 return rewriteValueARM64_OpARM64UDIV_0(v) 192 case OpARM64UDIVW: 193 return rewriteValueARM64_OpARM64UDIVW_0(v) 194 case OpARM64UMOD: 195 return rewriteValueARM64_OpARM64UMOD_0(v) 196 case OpARM64UMODW: 197 return rewriteValueARM64_OpARM64UMODW_0(v) 198 case OpARM64XOR: 199 return rewriteValueARM64_OpARM64XOR_0(v) 200 case OpARM64XORconst: 201 return rewriteValueARM64_OpARM64XORconst_0(v) 202 case OpARM64XORshiftLL: 203 return rewriteValueARM64_OpARM64XORshiftLL_0(v) 204 case OpARM64XORshiftRA: 205 return rewriteValueARM64_OpARM64XORshiftRA_0(v) 206 case OpARM64XORshiftRL: 207 return rewriteValueARM64_OpARM64XORshiftRL_0(v) 208 case OpAdd16: 209 return rewriteValueARM64_OpAdd16_0(v) 210 case OpAdd32: 211 return rewriteValueARM64_OpAdd32_0(v) 212 case OpAdd32F: 213 return rewriteValueARM64_OpAdd32F_0(v) 214 case OpAdd64: 215 return rewriteValueARM64_OpAdd64_0(v) 216 case OpAdd64F: 217 return rewriteValueARM64_OpAdd64F_0(v) 218 case OpAdd8: 219 return rewriteValueARM64_OpAdd8_0(v) 220 case OpAddPtr: 221 return rewriteValueARM64_OpAddPtr_0(v) 222 case OpAddr: 223 return rewriteValueARM64_OpAddr_0(v) 224 case OpAnd16: 225 return rewriteValueARM64_OpAnd16_0(v) 226 case OpAnd32: 227 return rewriteValueARM64_OpAnd32_0(v) 228 case OpAnd64: 229 return rewriteValueARM64_OpAnd64_0(v) 230 case OpAnd8: 231 return rewriteValueARM64_OpAnd8_0(v) 232 case OpAndB: 233 return rewriteValueARM64_OpAndB_0(v) 234 case OpAtomicAdd32: 235 return rewriteValueARM64_OpAtomicAdd32_0(v) 236 case OpAtomicAdd64: 237 return rewriteValueARM64_OpAtomicAdd64_0(v) 238 case OpAtomicAnd8: 239 return rewriteValueARM64_OpAtomicAnd8_0(v) 240 case OpAtomicCompareAndSwap32: 241 return rewriteValueARM64_OpAtomicCompareAndSwap32_0(v) 242 case OpAtomicCompareAndSwap64: 243 return rewriteValueARM64_OpAtomicCompareAndSwap64_0(v) 244 case OpAtomicExchange32: 245 return rewriteValueARM64_OpAtomicExchange32_0(v) 246 case OpAtomicExchange64: 247 return rewriteValueARM64_OpAtomicExchange64_0(v) 248 case OpAtomicLoad32: 249 return rewriteValueARM64_OpAtomicLoad32_0(v) 250 case OpAtomicLoad64: 251 return rewriteValueARM64_OpAtomicLoad64_0(v) 252 case OpAtomicLoadPtr: 253 return rewriteValueARM64_OpAtomicLoadPtr_0(v) 254 case OpAtomicOr8: 255 return rewriteValueARM64_OpAtomicOr8_0(v) 256 case OpAtomicStore32: 257 return rewriteValueARM64_OpAtomicStore32_0(v) 258 case OpAtomicStore64: 259 return rewriteValueARM64_OpAtomicStore64_0(v) 260 case OpAtomicStorePtrNoWB: 261 return rewriteValueARM64_OpAtomicStorePtrNoWB_0(v) 262 case OpAvg64u: 263 return rewriteValueARM64_OpAvg64u_0(v) 264 case OpBitLen64: 265 return rewriteValueARM64_OpBitLen64_0(v) 266 case OpBitRev16: 267 return rewriteValueARM64_OpBitRev16_0(v) 268 case OpBitRev32: 269 return rewriteValueARM64_OpBitRev32_0(v) 270 case OpBitRev64: 271 return rewriteValueARM64_OpBitRev64_0(v) 272 case OpBitRev8: 273 return rewriteValueARM64_OpBitRev8_0(v) 274 case OpBswap32: 275 return rewriteValueARM64_OpBswap32_0(v) 276 case OpBswap64: 277 return rewriteValueARM64_OpBswap64_0(v) 278 case OpClosureCall: 279 return rewriteValueARM64_OpClosureCall_0(v) 280 case OpCom16: 281 return rewriteValueARM64_OpCom16_0(v) 282 case OpCom32: 283 return rewriteValueARM64_OpCom32_0(v) 284 case OpCom64: 285 return rewriteValueARM64_OpCom64_0(v) 286 case OpCom8: 287 return rewriteValueARM64_OpCom8_0(v) 288 case OpConst16: 289 return rewriteValueARM64_OpConst16_0(v) 290 case OpConst32: 291 return rewriteValueARM64_OpConst32_0(v) 292 case OpConst32F: 293 return rewriteValueARM64_OpConst32F_0(v) 294 case OpConst64: 295 return rewriteValueARM64_OpConst64_0(v) 296 case OpConst64F: 297 return rewriteValueARM64_OpConst64F_0(v) 298 case OpConst8: 299 return rewriteValueARM64_OpConst8_0(v) 300 case OpConstBool: 301 return rewriteValueARM64_OpConstBool_0(v) 302 case OpConstNil: 303 return rewriteValueARM64_OpConstNil_0(v) 304 case OpConvert: 305 return rewriteValueARM64_OpConvert_0(v) 306 case OpCtz32: 307 return rewriteValueARM64_OpCtz32_0(v) 308 case OpCtz64: 309 return rewriteValueARM64_OpCtz64_0(v) 310 case OpCvt32Fto32: 311 return rewriteValueARM64_OpCvt32Fto32_0(v) 312 case OpCvt32Fto32U: 313 return rewriteValueARM64_OpCvt32Fto32U_0(v) 314 case OpCvt32Fto64: 315 return rewriteValueARM64_OpCvt32Fto64_0(v) 316 case OpCvt32Fto64F: 317 return rewriteValueARM64_OpCvt32Fto64F_0(v) 318 case OpCvt32Fto64U: 319 return rewriteValueARM64_OpCvt32Fto64U_0(v) 320 case OpCvt32Uto32F: 321 return rewriteValueARM64_OpCvt32Uto32F_0(v) 322 case OpCvt32Uto64F: 323 return rewriteValueARM64_OpCvt32Uto64F_0(v) 324 case OpCvt32to32F: 325 return rewriteValueARM64_OpCvt32to32F_0(v) 326 case OpCvt32to64F: 327 return rewriteValueARM64_OpCvt32to64F_0(v) 328 case OpCvt64Fto32: 329 return rewriteValueARM64_OpCvt64Fto32_0(v) 330 case OpCvt64Fto32F: 331 return rewriteValueARM64_OpCvt64Fto32F_0(v) 332 case OpCvt64Fto32U: 333 return rewriteValueARM64_OpCvt64Fto32U_0(v) 334 case OpCvt64Fto64: 335 return rewriteValueARM64_OpCvt64Fto64_0(v) 336 case OpCvt64Fto64U: 337 return rewriteValueARM64_OpCvt64Fto64U_0(v) 338 case OpCvt64Uto32F: 339 return rewriteValueARM64_OpCvt64Uto32F_0(v) 340 case OpCvt64Uto64F: 341 return rewriteValueARM64_OpCvt64Uto64F_0(v) 342 case OpCvt64to32F: 343 return rewriteValueARM64_OpCvt64to32F_0(v) 344 case OpCvt64to64F: 345 return rewriteValueARM64_OpCvt64to64F_0(v) 346 case OpDiv16: 347 return rewriteValueARM64_OpDiv16_0(v) 348 case OpDiv16u: 349 return rewriteValueARM64_OpDiv16u_0(v) 350 case OpDiv32: 351 return rewriteValueARM64_OpDiv32_0(v) 352 case OpDiv32F: 353 return rewriteValueARM64_OpDiv32F_0(v) 354 case OpDiv32u: 355 return rewriteValueARM64_OpDiv32u_0(v) 356 case OpDiv64: 357 return rewriteValueARM64_OpDiv64_0(v) 358 case OpDiv64F: 359 return rewriteValueARM64_OpDiv64F_0(v) 360 case OpDiv64u: 361 return rewriteValueARM64_OpDiv64u_0(v) 362 case OpDiv8: 363 return rewriteValueARM64_OpDiv8_0(v) 364 case OpDiv8u: 365 return rewriteValueARM64_OpDiv8u_0(v) 366 case OpEq16: 367 return rewriteValueARM64_OpEq16_0(v) 368 case OpEq32: 369 return rewriteValueARM64_OpEq32_0(v) 370 case OpEq32F: 371 return rewriteValueARM64_OpEq32F_0(v) 372 case OpEq64: 373 return rewriteValueARM64_OpEq64_0(v) 374 case OpEq64F: 375 return rewriteValueARM64_OpEq64F_0(v) 376 case OpEq8: 377 return rewriteValueARM64_OpEq8_0(v) 378 case OpEqB: 379 return rewriteValueARM64_OpEqB_0(v) 380 case OpEqPtr: 381 return rewriteValueARM64_OpEqPtr_0(v) 382 case OpGeq16: 383 return rewriteValueARM64_OpGeq16_0(v) 384 case OpGeq16U: 385 return rewriteValueARM64_OpGeq16U_0(v) 386 case OpGeq32: 387 return rewriteValueARM64_OpGeq32_0(v) 388 case OpGeq32F: 389 return rewriteValueARM64_OpGeq32F_0(v) 390 case OpGeq32U: 391 return rewriteValueARM64_OpGeq32U_0(v) 392 case OpGeq64: 393 return rewriteValueARM64_OpGeq64_0(v) 394 case OpGeq64F: 395 return rewriteValueARM64_OpGeq64F_0(v) 396 case OpGeq64U: 397 return rewriteValueARM64_OpGeq64U_0(v) 398 case OpGeq8: 399 return rewriteValueARM64_OpGeq8_0(v) 400 case OpGeq8U: 401 return rewriteValueARM64_OpGeq8U_0(v) 402 case OpGetClosurePtr: 403 return rewriteValueARM64_OpGetClosurePtr_0(v) 404 case OpGreater16: 405 return rewriteValueARM64_OpGreater16_0(v) 406 case OpGreater16U: 407 return rewriteValueARM64_OpGreater16U_0(v) 408 case OpGreater32: 409 return rewriteValueARM64_OpGreater32_0(v) 410 case OpGreater32F: 411 return rewriteValueARM64_OpGreater32F_0(v) 412 case OpGreater32U: 413 return rewriteValueARM64_OpGreater32U_0(v) 414 case OpGreater64: 415 return rewriteValueARM64_OpGreater64_0(v) 416 case OpGreater64F: 417 return rewriteValueARM64_OpGreater64F_0(v) 418 case OpGreater64U: 419 return rewriteValueARM64_OpGreater64U_0(v) 420 case OpGreater8: 421 return rewriteValueARM64_OpGreater8_0(v) 422 case OpGreater8U: 423 return rewriteValueARM64_OpGreater8U_0(v) 424 case OpHmul32: 425 return rewriteValueARM64_OpHmul32_0(v) 426 case OpHmul32u: 427 return rewriteValueARM64_OpHmul32u_0(v) 428 case OpHmul64: 429 return rewriteValueARM64_OpHmul64_0(v) 430 case OpHmul64u: 431 return rewriteValueARM64_OpHmul64u_0(v) 432 case OpInterCall: 433 return rewriteValueARM64_OpInterCall_0(v) 434 case OpIsInBounds: 435 return rewriteValueARM64_OpIsInBounds_0(v) 436 case OpIsNonNil: 437 return rewriteValueARM64_OpIsNonNil_0(v) 438 case OpIsSliceInBounds: 439 return rewriteValueARM64_OpIsSliceInBounds_0(v) 440 case OpLeq16: 441 return rewriteValueARM64_OpLeq16_0(v) 442 case OpLeq16U: 443 return rewriteValueARM64_OpLeq16U_0(v) 444 case OpLeq32: 445 return rewriteValueARM64_OpLeq32_0(v) 446 case OpLeq32F: 447 return rewriteValueARM64_OpLeq32F_0(v) 448 case OpLeq32U: 449 return rewriteValueARM64_OpLeq32U_0(v) 450 case OpLeq64: 451 return rewriteValueARM64_OpLeq64_0(v) 452 case OpLeq64F: 453 return rewriteValueARM64_OpLeq64F_0(v) 454 case OpLeq64U: 455 return rewriteValueARM64_OpLeq64U_0(v) 456 case OpLeq8: 457 return rewriteValueARM64_OpLeq8_0(v) 458 case OpLeq8U: 459 return rewriteValueARM64_OpLeq8U_0(v) 460 case OpLess16: 461 return rewriteValueARM64_OpLess16_0(v) 462 case OpLess16U: 463 return rewriteValueARM64_OpLess16U_0(v) 464 case OpLess32: 465 return rewriteValueARM64_OpLess32_0(v) 466 case OpLess32F: 467 return rewriteValueARM64_OpLess32F_0(v) 468 case OpLess32U: 469 return rewriteValueARM64_OpLess32U_0(v) 470 case OpLess64: 471 return rewriteValueARM64_OpLess64_0(v) 472 case OpLess64F: 473 return rewriteValueARM64_OpLess64F_0(v) 474 case OpLess64U: 475 return rewriteValueARM64_OpLess64U_0(v) 476 case OpLess8: 477 return rewriteValueARM64_OpLess8_0(v) 478 case OpLess8U: 479 return rewriteValueARM64_OpLess8U_0(v) 480 case OpLoad: 481 return rewriteValueARM64_OpLoad_0(v) 482 case OpLsh16x16: 483 return rewriteValueARM64_OpLsh16x16_0(v) 484 case OpLsh16x32: 485 return rewriteValueARM64_OpLsh16x32_0(v) 486 case OpLsh16x64: 487 return rewriteValueARM64_OpLsh16x64_0(v) 488 case OpLsh16x8: 489 return rewriteValueARM64_OpLsh16x8_0(v) 490 case OpLsh32x16: 491 return rewriteValueARM64_OpLsh32x16_0(v) 492 case OpLsh32x32: 493 return rewriteValueARM64_OpLsh32x32_0(v) 494 case OpLsh32x64: 495 return rewriteValueARM64_OpLsh32x64_0(v) 496 case OpLsh32x8: 497 return rewriteValueARM64_OpLsh32x8_0(v) 498 case OpLsh64x16: 499 return rewriteValueARM64_OpLsh64x16_0(v) 500 case OpLsh64x32: 501 return rewriteValueARM64_OpLsh64x32_0(v) 502 case OpLsh64x64: 503 return rewriteValueARM64_OpLsh64x64_0(v) 504 case OpLsh64x8: 505 return rewriteValueARM64_OpLsh64x8_0(v) 506 case OpLsh8x16: 507 return rewriteValueARM64_OpLsh8x16_0(v) 508 case OpLsh8x32: 509 return rewriteValueARM64_OpLsh8x32_0(v) 510 case OpLsh8x64: 511 return rewriteValueARM64_OpLsh8x64_0(v) 512 case OpLsh8x8: 513 return rewriteValueARM64_OpLsh8x8_0(v) 514 case OpMod16: 515 return rewriteValueARM64_OpMod16_0(v) 516 case OpMod16u: 517 return rewriteValueARM64_OpMod16u_0(v) 518 case OpMod32: 519 return rewriteValueARM64_OpMod32_0(v) 520 case OpMod32u: 521 return rewriteValueARM64_OpMod32u_0(v) 522 case OpMod64: 523 return rewriteValueARM64_OpMod64_0(v) 524 case OpMod64u: 525 return rewriteValueARM64_OpMod64u_0(v) 526 case OpMod8: 527 return rewriteValueARM64_OpMod8_0(v) 528 case OpMod8u: 529 return rewriteValueARM64_OpMod8u_0(v) 530 case OpMove: 531 return rewriteValueARM64_OpMove_0(v) || rewriteValueARM64_OpMove_10(v) 532 case OpMul16: 533 return rewriteValueARM64_OpMul16_0(v) 534 case OpMul32: 535 return rewriteValueARM64_OpMul32_0(v) 536 case OpMul32F: 537 return rewriteValueARM64_OpMul32F_0(v) 538 case OpMul64: 539 return rewriteValueARM64_OpMul64_0(v) 540 case OpMul64F: 541 return rewriteValueARM64_OpMul64F_0(v) 542 case OpMul8: 543 return rewriteValueARM64_OpMul8_0(v) 544 case OpNeg16: 545 return rewriteValueARM64_OpNeg16_0(v) 546 case OpNeg32: 547 return rewriteValueARM64_OpNeg32_0(v) 548 case OpNeg32F: 549 return rewriteValueARM64_OpNeg32F_0(v) 550 case OpNeg64: 551 return rewriteValueARM64_OpNeg64_0(v) 552 case OpNeg64F: 553 return rewriteValueARM64_OpNeg64F_0(v) 554 case OpNeg8: 555 return rewriteValueARM64_OpNeg8_0(v) 556 case OpNeq16: 557 return rewriteValueARM64_OpNeq16_0(v) 558 case OpNeq32: 559 return rewriteValueARM64_OpNeq32_0(v) 560 case OpNeq32F: 561 return rewriteValueARM64_OpNeq32F_0(v) 562 case OpNeq64: 563 return rewriteValueARM64_OpNeq64_0(v) 564 case OpNeq64F: 565 return rewriteValueARM64_OpNeq64F_0(v) 566 case OpNeq8: 567 return rewriteValueARM64_OpNeq8_0(v) 568 case OpNeqB: 569 return rewriteValueARM64_OpNeqB_0(v) 570 case OpNeqPtr: 571 return rewriteValueARM64_OpNeqPtr_0(v) 572 case OpNilCheck: 573 return rewriteValueARM64_OpNilCheck_0(v) 574 case OpNot: 575 return rewriteValueARM64_OpNot_0(v) 576 case OpOffPtr: 577 return rewriteValueARM64_OpOffPtr_0(v) 578 case OpOr16: 579 return rewriteValueARM64_OpOr16_0(v) 580 case OpOr32: 581 return rewriteValueARM64_OpOr32_0(v) 582 case OpOr64: 583 return rewriteValueARM64_OpOr64_0(v) 584 case OpOr8: 585 return rewriteValueARM64_OpOr8_0(v) 586 case OpOrB: 587 return rewriteValueARM64_OpOrB_0(v) 588 case OpRound32F: 589 return rewriteValueARM64_OpRound32F_0(v) 590 case OpRound64F: 591 return rewriteValueARM64_OpRound64F_0(v) 592 case OpRsh16Ux16: 593 return rewriteValueARM64_OpRsh16Ux16_0(v) 594 case OpRsh16Ux32: 595 return rewriteValueARM64_OpRsh16Ux32_0(v) 596 case OpRsh16Ux64: 597 return rewriteValueARM64_OpRsh16Ux64_0(v) 598 case OpRsh16Ux8: 599 return rewriteValueARM64_OpRsh16Ux8_0(v) 600 case OpRsh16x16: 601 return rewriteValueARM64_OpRsh16x16_0(v) 602 case OpRsh16x32: 603 return rewriteValueARM64_OpRsh16x32_0(v) 604 case OpRsh16x64: 605 return rewriteValueARM64_OpRsh16x64_0(v) 606 case OpRsh16x8: 607 return rewriteValueARM64_OpRsh16x8_0(v) 608 case OpRsh32Ux16: 609 return rewriteValueARM64_OpRsh32Ux16_0(v) 610 case OpRsh32Ux32: 611 return rewriteValueARM64_OpRsh32Ux32_0(v) 612 case OpRsh32Ux64: 613 return rewriteValueARM64_OpRsh32Ux64_0(v) 614 case OpRsh32Ux8: 615 return rewriteValueARM64_OpRsh32Ux8_0(v) 616 case OpRsh32x16: 617 return rewriteValueARM64_OpRsh32x16_0(v) 618 case OpRsh32x32: 619 return rewriteValueARM64_OpRsh32x32_0(v) 620 case OpRsh32x64: 621 return rewriteValueARM64_OpRsh32x64_0(v) 622 case OpRsh32x8: 623 return rewriteValueARM64_OpRsh32x8_0(v) 624 case OpRsh64Ux16: 625 return rewriteValueARM64_OpRsh64Ux16_0(v) 626 case OpRsh64Ux32: 627 return rewriteValueARM64_OpRsh64Ux32_0(v) 628 case OpRsh64Ux64: 629 return rewriteValueARM64_OpRsh64Ux64_0(v) 630 case OpRsh64Ux8: 631 return rewriteValueARM64_OpRsh64Ux8_0(v) 632 case OpRsh64x16: 633 return rewriteValueARM64_OpRsh64x16_0(v) 634 case OpRsh64x32: 635 return rewriteValueARM64_OpRsh64x32_0(v) 636 case OpRsh64x64: 637 return rewriteValueARM64_OpRsh64x64_0(v) 638 case OpRsh64x8: 639 return rewriteValueARM64_OpRsh64x8_0(v) 640 case OpRsh8Ux16: 641 return rewriteValueARM64_OpRsh8Ux16_0(v) 642 case OpRsh8Ux32: 643 return rewriteValueARM64_OpRsh8Ux32_0(v) 644 case OpRsh8Ux64: 645 return rewriteValueARM64_OpRsh8Ux64_0(v) 646 case OpRsh8Ux8: 647 return rewriteValueARM64_OpRsh8Ux8_0(v) 648 case OpRsh8x16: 649 return rewriteValueARM64_OpRsh8x16_0(v) 650 case OpRsh8x32: 651 return rewriteValueARM64_OpRsh8x32_0(v) 652 case OpRsh8x64: 653 return rewriteValueARM64_OpRsh8x64_0(v) 654 case OpRsh8x8: 655 return rewriteValueARM64_OpRsh8x8_0(v) 656 case OpSignExt16to32: 657 return rewriteValueARM64_OpSignExt16to32_0(v) 658 case OpSignExt16to64: 659 return rewriteValueARM64_OpSignExt16to64_0(v) 660 case OpSignExt32to64: 661 return rewriteValueARM64_OpSignExt32to64_0(v) 662 case OpSignExt8to16: 663 return rewriteValueARM64_OpSignExt8to16_0(v) 664 case OpSignExt8to32: 665 return rewriteValueARM64_OpSignExt8to32_0(v) 666 case OpSignExt8to64: 667 return rewriteValueARM64_OpSignExt8to64_0(v) 668 case OpSlicemask: 669 return rewriteValueARM64_OpSlicemask_0(v) 670 case OpSqrt: 671 return rewriteValueARM64_OpSqrt_0(v) 672 case OpStaticCall: 673 return rewriteValueARM64_OpStaticCall_0(v) 674 case OpStore: 675 return rewriteValueARM64_OpStore_0(v) 676 case OpSub16: 677 return rewriteValueARM64_OpSub16_0(v) 678 case OpSub32: 679 return rewriteValueARM64_OpSub32_0(v) 680 case OpSub32F: 681 return rewriteValueARM64_OpSub32F_0(v) 682 case OpSub64: 683 return rewriteValueARM64_OpSub64_0(v) 684 case OpSub64F: 685 return rewriteValueARM64_OpSub64F_0(v) 686 case OpSub8: 687 return rewriteValueARM64_OpSub8_0(v) 688 case OpSubPtr: 689 return rewriteValueARM64_OpSubPtr_0(v) 690 case OpTrunc16to8: 691 return rewriteValueARM64_OpTrunc16to8_0(v) 692 case OpTrunc32to16: 693 return rewriteValueARM64_OpTrunc32to16_0(v) 694 case OpTrunc32to8: 695 return rewriteValueARM64_OpTrunc32to8_0(v) 696 case OpTrunc64to16: 697 return rewriteValueARM64_OpTrunc64to16_0(v) 698 case OpTrunc64to32: 699 return rewriteValueARM64_OpTrunc64to32_0(v) 700 case OpTrunc64to8: 701 return rewriteValueARM64_OpTrunc64to8_0(v) 702 case OpXor16: 703 return rewriteValueARM64_OpXor16_0(v) 704 case OpXor32: 705 return rewriteValueARM64_OpXor32_0(v) 706 case OpXor64: 707 return rewriteValueARM64_OpXor64_0(v) 708 case OpXor8: 709 return rewriteValueARM64_OpXor8_0(v) 710 case OpZero: 711 return rewriteValueARM64_OpZero_0(v) || rewriteValueARM64_OpZero_10(v) || rewriteValueARM64_OpZero_20(v) 712 case OpZeroExt16to32: 713 return rewriteValueARM64_OpZeroExt16to32_0(v) 714 case OpZeroExt16to64: 715 return rewriteValueARM64_OpZeroExt16to64_0(v) 716 case OpZeroExt32to64: 717 return rewriteValueARM64_OpZeroExt32to64_0(v) 718 case OpZeroExt8to16: 719 return rewriteValueARM64_OpZeroExt8to16_0(v) 720 case OpZeroExt8to32: 721 return rewriteValueARM64_OpZeroExt8to32_0(v) 722 case OpZeroExt8to64: 723 return rewriteValueARM64_OpZeroExt8to64_0(v) 724 } 725 return false 726 } 727 func rewriteValueARM64_OpARM64ADD_0(v *Value) bool { 728 // match: (ADD x (MOVDconst [c])) 729 // cond: 730 // result: (ADDconst [c] x) 731 for { 732 _ = v.Args[1] 733 x := v.Args[0] 734 v_1 := v.Args[1] 735 if v_1.Op != OpARM64MOVDconst { 736 break 737 } 738 c := v_1.AuxInt 739 v.reset(OpARM64ADDconst) 740 v.AuxInt = c 741 v.AddArg(x) 742 return true 743 } 744 // match: (ADD (MOVDconst [c]) x) 745 // cond: 746 // result: (ADDconst [c] x) 747 for { 748 _ = v.Args[1] 749 v_0 := v.Args[0] 750 if v_0.Op != OpARM64MOVDconst { 751 break 752 } 753 c := v_0.AuxInt 754 x := v.Args[1] 755 v.reset(OpARM64ADDconst) 756 v.AuxInt = c 757 v.AddArg(x) 758 return true 759 } 760 // match: (ADD x (NEG y)) 761 // cond: 762 // result: (SUB x y) 763 for { 764 _ = v.Args[1] 765 x := v.Args[0] 766 v_1 := v.Args[1] 767 if v_1.Op != OpARM64NEG { 768 break 769 } 770 y := v_1.Args[0] 771 v.reset(OpARM64SUB) 772 v.AddArg(x) 773 v.AddArg(y) 774 return true 775 } 776 // match: (ADD (NEG y) x) 777 // cond: 778 // result: (SUB x y) 779 for { 780 _ = v.Args[1] 781 v_0 := v.Args[0] 782 if v_0.Op != OpARM64NEG { 783 break 784 } 785 y := v_0.Args[0] 786 x := v.Args[1] 787 v.reset(OpARM64SUB) 788 v.AddArg(x) 789 v.AddArg(y) 790 return true 791 } 792 // match: (ADD x (SLLconst [c] y)) 793 // cond: 794 // result: (ADDshiftLL x y [c]) 795 for { 796 _ = v.Args[1] 797 x := v.Args[0] 798 v_1 := v.Args[1] 799 if v_1.Op != OpARM64SLLconst { 800 break 801 } 802 c := v_1.AuxInt 803 y := v_1.Args[0] 804 v.reset(OpARM64ADDshiftLL) 805 v.AuxInt = c 806 v.AddArg(x) 807 v.AddArg(y) 808 return true 809 } 810 // match: (ADD (SLLconst [c] y) x) 811 // cond: 812 // result: (ADDshiftLL x y [c]) 813 for { 814 _ = v.Args[1] 815 v_0 := v.Args[0] 816 if v_0.Op != OpARM64SLLconst { 817 break 818 } 819 c := v_0.AuxInt 820 y := v_0.Args[0] 821 x := v.Args[1] 822 v.reset(OpARM64ADDshiftLL) 823 v.AuxInt = c 824 v.AddArg(x) 825 v.AddArg(y) 826 return true 827 } 828 // match: (ADD x (SRLconst [c] y)) 829 // cond: 830 // result: (ADDshiftRL x y [c]) 831 for { 832 _ = v.Args[1] 833 x := v.Args[0] 834 v_1 := v.Args[1] 835 if v_1.Op != OpARM64SRLconst { 836 break 837 } 838 c := v_1.AuxInt 839 y := v_1.Args[0] 840 v.reset(OpARM64ADDshiftRL) 841 v.AuxInt = c 842 v.AddArg(x) 843 v.AddArg(y) 844 return true 845 } 846 // match: (ADD (SRLconst [c] y) x) 847 // cond: 848 // result: (ADDshiftRL x y [c]) 849 for { 850 _ = v.Args[1] 851 v_0 := v.Args[0] 852 if v_0.Op != OpARM64SRLconst { 853 break 854 } 855 c := v_0.AuxInt 856 y := v_0.Args[0] 857 x := v.Args[1] 858 v.reset(OpARM64ADDshiftRL) 859 v.AuxInt = c 860 v.AddArg(x) 861 v.AddArg(y) 862 return true 863 } 864 // match: (ADD x (SRAconst [c] y)) 865 // cond: 866 // result: (ADDshiftRA x y [c]) 867 for { 868 _ = v.Args[1] 869 x := v.Args[0] 870 v_1 := v.Args[1] 871 if v_1.Op != OpARM64SRAconst { 872 break 873 } 874 c := v_1.AuxInt 875 y := v_1.Args[0] 876 v.reset(OpARM64ADDshiftRA) 877 v.AuxInt = c 878 v.AddArg(x) 879 v.AddArg(y) 880 return true 881 } 882 // match: (ADD (SRAconst [c] y) x) 883 // cond: 884 // result: (ADDshiftRA x y [c]) 885 for { 886 _ = v.Args[1] 887 v_0 := v.Args[0] 888 if v_0.Op != OpARM64SRAconst { 889 break 890 } 891 c := v_0.AuxInt 892 y := v_0.Args[0] 893 x := v.Args[1] 894 v.reset(OpARM64ADDshiftRA) 895 v.AuxInt = c 896 v.AddArg(x) 897 v.AddArg(y) 898 return true 899 } 900 return false 901 } 902 func rewriteValueARM64_OpARM64ADDconst_0(v *Value) bool { 903 // match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr)) 904 // cond: 905 // result: (MOVDaddr [off1+off2] {sym} ptr) 906 for { 907 off1 := v.AuxInt 908 v_0 := v.Args[0] 909 if v_0.Op != OpARM64MOVDaddr { 910 break 911 } 912 off2 := v_0.AuxInt 913 sym := v_0.Aux 914 ptr := v_0.Args[0] 915 v.reset(OpARM64MOVDaddr) 916 v.AuxInt = off1 + off2 917 v.Aux = sym 918 v.AddArg(ptr) 919 return true 920 } 921 // match: (ADDconst [0] x) 922 // cond: 923 // result: x 924 for { 925 if v.AuxInt != 0 { 926 break 927 } 928 x := v.Args[0] 929 v.reset(OpCopy) 930 v.Type = x.Type 931 v.AddArg(x) 932 return true 933 } 934 // match: (ADDconst [c] (MOVDconst [d])) 935 // cond: 936 // result: (MOVDconst [c+d]) 937 for { 938 c := v.AuxInt 939 v_0 := v.Args[0] 940 if v_0.Op != OpARM64MOVDconst { 941 break 942 } 943 d := v_0.AuxInt 944 v.reset(OpARM64MOVDconst) 945 v.AuxInt = c + d 946 return true 947 } 948 // match: (ADDconst [c] (ADDconst [d] x)) 949 // cond: 950 // result: (ADDconst [c+d] x) 951 for { 952 c := v.AuxInt 953 v_0 := v.Args[0] 954 if v_0.Op != OpARM64ADDconst { 955 break 956 } 957 d := v_0.AuxInt 958 x := v_0.Args[0] 959 v.reset(OpARM64ADDconst) 960 v.AuxInt = c + d 961 v.AddArg(x) 962 return true 963 } 964 // match: (ADDconst [c] (SUBconst [d] x)) 965 // cond: 966 // result: (ADDconst [c-d] x) 967 for { 968 c := v.AuxInt 969 v_0 := v.Args[0] 970 if v_0.Op != OpARM64SUBconst { 971 break 972 } 973 d := v_0.AuxInt 974 x := v_0.Args[0] 975 v.reset(OpARM64ADDconst) 976 v.AuxInt = c - d 977 v.AddArg(x) 978 return true 979 } 980 return false 981 } 982 func rewriteValueARM64_OpARM64ADDshiftLL_0(v *Value) bool { 983 b := v.Block 984 _ = b 985 // match: (ADDshiftLL (MOVDconst [c]) x [d]) 986 // cond: 987 // result: (ADDconst [c] (SLLconst <x.Type> x [d])) 988 for { 989 d := v.AuxInt 990 _ = v.Args[1] 991 v_0 := v.Args[0] 992 if v_0.Op != OpARM64MOVDconst { 993 break 994 } 995 c := v_0.AuxInt 996 x := v.Args[1] 997 v.reset(OpARM64ADDconst) 998 v.AuxInt = c 999 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 1000 v0.AuxInt = d 1001 v0.AddArg(x) 1002 v.AddArg(v0) 1003 return true 1004 } 1005 // match: (ADDshiftLL x (MOVDconst [c]) [d]) 1006 // cond: 1007 // result: (ADDconst x [int64(uint64(c)<<uint64(d))]) 1008 for { 1009 d := v.AuxInt 1010 _ = v.Args[1] 1011 x := v.Args[0] 1012 v_1 := v.Args[1] 1013 if v_1.Op != OpARM64MOVDconst { 1014 break 1015 } 1016 c := v_1.AuxInt 1017 v.reset(OpARM64ADDconst) 1018 v.AuxInt = int64(uint64(c) << uint64(d)) 1019 v.AddArg(x) 1020 return true 1021 } 1022 // match: (ADDshiftLL [c] (SRLconst x [64-c]) x) 1023 // cond: 1024 // result: (RORconst [64-c] x) 1025 for { 1026 c := v.AuxInt 1027 _ = v.Args[1] 1028 v_0 := v.Args[0] 1029 if v_0.Op != OpARM64SRLconst { 1030 break 1031 } 1032 if v_0.AuxInt != 64-c { 1033 break 1034 } 1035 x := v_0.Args[0] 1036 if x != v.Args[1] { 1037 break 1038 } 1039 v.reset(OpARM64RORconst) 1040 v.AuxInt = 64 - c 1041 v.AddArg(x) 1042 return true 1043 } 1044 // match: (ADDshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x) 1045 // cond: c < 32 && t.Size() == 4 1046 // result: (RORWconst [32-c] x) 1047 for { 1048 t := v.Type 1049 c := v.AuxInt 1050 _ = v.Args[1] 1051 v_0 := v.Args[0] 1052 if v_0.Op != OpARM64SRLconst { 1053 break 1054 } 1055 if v_0.AuxInt != 32-c { 1056 break 1057 } 1058 v_0_0 := v_0.Args[0] 1059 if v_0_0.Op != OpARM64MOVWUreg { 1060 break 1061 } 1062 x := v_0_0.Args[0] 1063 if x != v.Args[1] { 1064 break 1065 } 1066 if !(c < 32 && t.Size() == 4) { 1067 break 1068 } 1069 v.reset(OpARM64RORWconst) 1070 v.AuxInt = 32 - c 1071 v.AddArg(x) 1072 return true 1073 } 1074 return false 1075 } 1076 func rewriteValueARM64_OpARM64ADDshiftRA_0(v *Value) bool { 1077 b := v.Block 1078 _ = b 1079 // match: (ADDshiftRA (MOVDconst [c]) x [d]) 1080 // cond: 1081 // result: (ADDconst [c] (SRAconst <x.Type> x [d])) 1082 for { 1083 d := v.AuxInt 1084 _ = v.Args[1] 1085 v_0 := v.Args[0] 1086 if v_0.Op != OpARM64MOVDconst { 1087 break 1088 } 1089 c := v_0.AuxInt 1090 x := v.Args[1] 1091 v.reset(OpARM64ADDconst) 1092 v.AuxInt = c 1093 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 1094 v0.AuxInt = d 1095 v0.AddArg(x) 1096 v.AddArg(v0) 1097 return true 1098 } 1099 // match: (ADDshiftRA x (MOVDconst [c]) [d]) 1100 // cond: 1101 // result: (ADDconst x [int64(int64(c)>>uint64(d))]) 1102 for { 1103 d := v.AuxInt 1104 _ = v.Args[1] 1105 x := v.Args[0] 1106 v_1 := v.Args[1] 1107 if v_1.Op != OpARM64MOVDconst { 1108 break 1109 } 1110 c := v_1.AuxInt 1111 v.reset(OpARM64ADDconst) 1112 v.AuxInt = int64(int64(c) >> uint64(d)) 1113 v.AddArg(x) 1114 return true 1115 } 1116 return false 1117 } 1118 func rewriteValueARM64_OpARM64ADDshiftRL_0(v *Value) bool { 1119 b := v.Block 1120 _ = b 1121 // match: (ADDshiftRL (MOVDconst [c]) x [d]) 1122 // cond: 1123 // result: (ADDconst [c] (SRLconst <x.Type> x [d])) 1124 for { 1125 d := v.AuxInt 1126 _ = v.Args[1] 1127 v_0 := v.Args[0] 1128 if v_0.Op != OpARM64MOVDconst { 1129 break 1130 } 1131 c := v_0.AuxInt 1132 x := v.Args[1] 1133 v.reset(OpARM64ADDconst) 1134 v.AuxInt = c 1135 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 1136 v0.AuxInt = d 1137 v0.AddArg(x) 1138 v.AddArg(v0) 1139 return true 1140 } 1141 // match: (ADDshiftRL x (MOVDconst [c]) [d]) 1142 // cond: 1143 // result: (ADDconst x [int64(uint64(c)>>uint64(d))]) 1144 for { 1145 d := v.AuxInt 1146 _ = v.Args[1] 1147 x := v.Args[0] 1148 v_1 := v.Args[1] 1149 if v_1.Op != OpARM64MOVDconst { 1150 break 1151 } 1152 c := v_1.AuxInt 1153 v.reset(OpARM64ADDconst) 1154 v.AuxInt = int64(uint64(c) >> uint64(d)) 1155 v.AddArg(x) 1156 return true 1157 } 1158 // match: (ADDshiftRL [c] (SLLconst x [64-c]) x) 1159 // cond: 1160 // result: (RORconst [ c] x) 1161 for { 1162 c := v.AuxInt 1163 _ = v.Args[1] 1164 v_0 := v.Args[0] 1165 if v_0.Op != OpARM64SLLconst { 1166 break 1167 } 1168 if v_0.AuxInt != 64-c { 1169 break 1170 } 1171 x := v_0.Args[0] 1172 if x != v.Args[1] { 1173 break 1174 } 1175 v.reset(OpARM64RORconst) 1176 v.AuxInt = c 1177 v.AddArg(x) 1178 return true 1179 } 1180 // match: (ADDshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 1181 // cond: c < 32 && t.Size() == 4 1182 // result: (RORWconst [ c] x) 1183 for { 1184 t := v.Type 1185 c := v.AuxInt 1186 _ = v.Args[1] 1187 v_0 := v.Args[0] 1188 if v_0.Op != OpARM64SLLconst { 1189 break 1190 } 1191 if v_0.AuxInt != 32-c { 1192 break 1193 } 1194 x := v_0.Args[0] 1195 v_1 := v.Args[1] 1196 if v_1.Op != OpARM64MOVWUreg { 1197 break 1198 } 1199 if x != v_1.Args[0] { 1200 break 1201 } 1202 if !(c < 32 && t.Size() == 4) { 1203 break 1204 } 1205 v.reset(OpARM64RORWconst) 1206 v.AuxInt = c 1207 v.AddArg(x) 1208 return true 1209 } 1210 return false 1211 } 1212 func rewriteValueARM64_OpARM64AND_0(v *Value) bool { 1213 // match: (AND x (MOVDconst [c])) 1214 // cond: 1215 // result: (ANDconst [c] x) 1216 for { 1217 _ = v.Args[1] 1218 x := v.Args[0] 1219 v_1 := v.Args[1] 1220 if v_1.Op != OpARM64MOVDconst { 1221 break 1222 } 1223 c := v_1.AuxInt 1224 v.reset(OpARM64ANDconst) 1225 v.AuxInt = c 1226 v.AddArg(x) 1227 return true 1228 } 1229 // match: (AND (MOVDconst [c]) x) 1230 // cond: 1231 // result: (ANDconst [c] x) 1232 for { 1233 _ = v.Args[1] 1234 v_0 := v.Args[0] 1235 if v_0.Op != OpARM64MOVDconst { 1236 break 1237 } 1238 c := v_0.AuxInt 1239 x := v.Args[1] 1240 v.reset(OpARM64ANDconst) 1241 v.AuxInt = c 1242 v.AddArg(x) 1243 return true 1244 } 1245 // match: (AND x x) 1246 // cond: 1247 // result: x 1248 for { 1249 _ = v.Args[1] 1250 x := v.Args[0] 1251 if x != v.Args[1] { 1252 break 1253 } 1254 v.reset(OpCopy) 1255 v.Type = x.Type 1256 v.AddArg(x) 1257 return true 1258 } 1259 // match: (AND x (MVN y)) 1260 // cond: 1261 // result: (BIC x y) 1262 for { 1263 _ = v.Args[1] 1264 x := v.Args[0] 1265 v_1 := v.Args[1] 1266 if v_1.Op != OpARM64MVN { 1267 break 1268 } 1269 y := v_1.Args[0] 1270 v.reset(OpARM64BIC) 1271 v.AddArg(x) 1272 v.AddArg(y) 1273 return true 1274 } 1275 // match: (AND (MVN y) x) 1276 // cond: 1277 // result: (BIC x y) 1278 for { 1279 _ = v.Args[1] 1280 v_0 := v.Args[0] 1281 if v_0.Op != OpARM64MVN { 1282 break 1283 } 1284 y := v_0.Args[0] 1285 x := v.Args[1] 1286 v.reset(OpARM64BIC) 1287 v.AddArg(x) 1288 v.AddArg(y) 1289 return true 1290 } 1291 // match: (AND x (SLLconst [c] y)) 1292 // cond: 1293 // result: (ANDshiftLL x y [c]) 1294 for { 1295 _ = v.Args[1] 1296 x := v.Args[0] 1297 v_1 := v.Args[1] 1298 if v_1.Op != OpARM64SLLconst { 1299 break 1300 } 1301 c := v_1.AuxInt 1302 y := v_1.Args[0] 1303 v.reset(OpARM64ANDshiftLL) 1304 v.AuxInt = c 1305 v.AddArg(x) 1306 v.AddArg(y) 1307 return true 1308 } 1309 // match: (AND (SLLconst [c] y) x) 1310 // cond: 1311 // result: (ANDshiftLL x y [c]) 1312 for { 1313 _ = v.Args[1] 1314 v_0 := v.Args[0] 1315 if v_0.Op != OpARM64SLLconst { 1316 break 1317 } 1318 c := v_0.AuxInt 1319 y := v_0.Args[0] 1320 x := v.Args[1] 1321 v.reset(OpARM64ANDshiftLL) 1322 v.AuxInt = c 1323 v.AddArg(x) 1324 v.AddArg(y) 1325 return true 1326 } 1327 // match: (AND x (SRLconst [c] y)) 1328 // cond: 1329 // result: (ANDshiftRL x y [c]) 1330 for { 1331 _ = v.Args[1] 1332 x := v.Args[0] 1333 v_1 := v.Args[1] 1334 if v_1.Op != OpARM64SRLconst { 1335 break 1336 } 1337 c := v_1.AuxInt 1338 y := v_1.Args[0] 1339 v.reset(OpARM64ANDshiftRL) 1340 v.AuxInt = c 1341 v.AddArg(x) 1342 v.AddArg(y) 1343 return true 1344 } 1345 // match: (AND (SRLconst [c] y) x) 1346 // cond: 1347 // result: (ANDshiftRL x y [c]) 1348 for { 1349 _ = v.Args[1] 1350 v_0 := v.Args[0] 1351 if v_0.Op != OpARM64SRLconst { 1352 break 1353 } 1354 c := v_0.AuxInt 1355 y := v_0.Args[0] 1356 x := v.Args[1] 1357 v.reset(OpARM64ANDshiftRL) 1358 v.AuxInt = c 1359 v.AddArg(x) 1360 v.AddArg(y) 1361 return true 1362 } 1363 // match: (AND x (SRAconst [c] y)) 1364 // cond: 1365 // result: (ANDshiftRA x y [c]) 1366 for { 1367 _ = v.Args[1] 1368 x := v.Args[0] 1369 v_1 := v.Args[1] 1370 if v_1.Op != OpARM64SRAconst { 1371 break 1372 } 1373 c := v_1.AuxInt 1374 y := v_1.Args[0] 1375 v.reset(OpARM64ANDshiftRA) 1376 v.AuxInt = c 1377 v.AddArg(x) 1378 v.AddArg(y) 1379 return true 1380 } 1381 return false 1382 } 1383 func rewriteValueARM64_OpARM64AND_10(v *Value) bool { 1384 // match: (AND (SRAconst [c] y) x) 1385 // cond: 1386 // result: (ANDshiftRA x y [c]) 1387 for { 1388 _ = v.Args[1] 1389 v_0 := v.Args[0] 1390 if v_0.Op != OpARM64SRAconst { 1391 break 1392 } 1393 c := v_0.AuxInt 1394 y := v_0.Args[0] 1395 x := v.Args[1] 1396 v.reset(OpARM64ANDshiftRA) 1397 v.AuxInt = c 1398 v.AddArg(x) 1399 v.AddArg(y) 1400 return true 1401 } 1402 return false 1403 } 1404 func rewriteValueARM64_OpARM64ANDconst_0(v *Value) bool { 1405 // match: (ANDconst [0] _) 1406 // cond: 1407 // result: (MOVDconst [0]) 1408 for { 1409 if v.AuxInt != 0 { 1410 break 1411 } 1412 v.reset(OpARM64MOVDconst) 1413 v.AuxInt = 0 1414 return true 1415 } 1416 // match: (ANDconst [-1] x) 1417 // cond: 1418 // result: x 1419 for { 1420 if v.AuxInt != -1 { 1421 break 1422 } 1423 x := v.Args[0] 1424 v.reset(OpCopy) 1425 v.Type = x.Type 1426 v.AddArg(x) 1427 return true 1428 } 1429 // match: (ANDconst [c] (MOVDconst [d])) 1430 // cond: 1431 // result: (MOVDconst [c&d]) 1432 for { 1433 c := v.AuxInt 1434 v_0 := v.Args[0] 1435 if v_0.Op != OpARM64MOVDconst { 1436 break 1437 } 1438 d := v_0.AuxInt 1439 v.reset(OpARM64MOVDconst) 1440 v.AuxInt = c & d 1441 return true 1442 } 1443 // match: (ANDconst [c] (ANDconst [d] x)) 1444 // cond: 1445 // result: (ANDconst [c&d] x) 1446 for { 1447 c := v.AuxInt 1448 v_0 := v.Args[0] 1449 if v_0.Op != OpARM64ANDconst { 1450 break 1451 } 1452 d := v_0.AuxInt 1453 x := v_0.Args[0] 1454 v.reset(OpARM64ANDconst) 1455 v.AuxInt = c & d 1456 v.AddArg(x) 1457 return true 1458 } 1459 return false 1460 } 1461 func rewriteValueARM64_OpARM64ANDshiftLL_0(v *Value) bool { 1462 b := v.Block 1463 _ = b 1464 // match: (ANDshiftLL (MOVDconst [c]) x [d]) 1465 // cond: 1466 // result: (ANDconst [c] (SLLconst <x.Type> x [d])) 1467 for { 1468 d := v.AuxInt 1469 _ = v.Args[1] 1470 v_0 := v.Args[0] 1471 if v_0.Op != OpARM64MOVDconst { 1472 break 1473 } 1474 c := v_0.AuxInt 1475 x := v.Args[1] 1476 v.reset(OpARM64ANDconst) 1477 v.AuxInt = c 1478 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 1479 v0.AuxInt = d 1480 v0.AddArg(x) 1481 v.AddArg(v0) 1482 return true 1483 } 1484 // match: (ANDshiftLL x (MOVDconst [c]) [d]) 1485 // cond: 1486 // result: (ANDconst x [int64(uint64(c)<<uint64(d))]) 1487 for { 1488 d := v.AuxInt 1489 _ = v.Args[1] 1490 x := v.Args[0] 1491 v_1 := v.Args[1] 1492 if v_1.Op != OpARM64MOVDconst { 1493 break 1494 } 1495 c := v_1.AuxInt 1496 v.reset(OpARM64ANDconst) 1497 v.AuxInt = int64(uint64(c) << uint64(d)) 1498 v.AddArg(x) 1499 return true 1500 } 1501 // match: (ANDshiftLL x y:(SLLconst x [c]) [d]) 1502 // cond: c==d 1503 // result: y 1504 for { 1505 d := v.AuxInt 1506 _ = v.Args[1] 1507 x := v.Args[0] 1508 y := v.Args[1] 1509 if y.Op != OpARM64SLLconst { 1510 break 1511 } 1512 c := y.AuxInt 1513 if x != y.Args[0] { 1514 break 1515 } 1516 if !(c == d) { 1517 break 1518 } 1519 v.reset(OpCopy) 1520 v.Type = y.Type 1521 v.AddArg(y) 1522 return true 1523 } 1524 return false 1525 } 1526 func rewriteValueARM64_OpARM64ANDshiftRA_0(v *Value) bool { 1527 b := v.Block 1528 _ = b 1529 // match: (ANDshiftRA (MOVDconst [c]) x [d]) 1530 // cond: 1531 // result: (ANDconst [c] (SRAconst <x.Type> x [d])) 1532 for { 1533 d := v.AuxInt 1534 _ = v.Args[1] 1535 v_0 := v.Args[0] 1536 if v_0.Op != OpARM64MOVDconst { 1537 break 1538 } 1539 c := v_0.AuxInt 1540 x := v.Args[1] 1541 v.reset(OpARM64ANDconst) 1542 v.AuxInt = c 1543 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 1544 v0.AuxInt = d 1545 v0.AddArg(x) 1546 v.AddArg(v0) 1547 return true 1548 } 1549 // match: (ANDshiftRA x (MOVDconst [c]) [d]) 1550 // cond: 1551 // result: (ANDconst x [int64(int64(c)>>uint64(d))]) 1552 for { 1553 d := v.AuxInt 1554 _ = v.Args[1] 1555 x := v.Args[0] 1556 v_1 := v.Args[1] 1557 if v_1.Op != OpARM64MOVDconst { 1558 break 1559 } 1560 c := v_1.AuxInt 1561 v.reset(OpARM64ANDconst) 1562 v.AuxInt = int64(int64(c) >> uint64(d)) 1563 v.AddArg(x) 1564 return true 1565 } 1566 // match: (ANDshiftRA x y:(SRAconst x [c]) [d]) 1567 // cond: c==d 1568 // result: y 1569 for { 1570 d := v.AuxInt 1571 _ = v.Args[1] 1572 x := v.Args[0] 1573 y := v.Args[1] 1574 if y.Op != OpARM64SRAconst { 1575 break 1576 } 1577 c := y.AuxInt 1578 if x != y.Args[0] { 1579 break 1580 } 1581 if !(c == d) { 1582 break 1583 } 1584 v.reset(OpCopy) 1585 v.Type = y.Type 1586 v.AddArg(y) 1587 return true 1588 } 1589 return false 1590 } 1591 func rewriteValueARM64_OpARM64ANDshiftRL_0(v *Value) bool { 1592 b := v.Block 1593 _ = b 1594 // match: (ANDshiftRL (MOVDconst [c]) x [d]) 1595 // cond: 1596 // result: (ANDconst [c] (SRLconst <x.Type> x [d])) 1597 for { 1598 d := v.AuxInt 1599 _ = v.Args[1] 1600 v_0 := v.Args[0] 1601 if v_0.Op != OpARM64MOVDconst { 1602 break 1603 } 1604 c := v_0.AuxInt 1605 x := v.Args[1] 1606 v.reset(OpARM64ANDconst) 1607 v.AuxInt = c 1608 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 1609 v0.AuxInt = d 1610 v0.AddArg(x) 1611 v.AddArg(v0) 1612 return true 1613 } 1614 // match: (ANDshiftRL x (MOVDconst [c]) [d]) 1615 // cond: 1616 // result: (ANDconst x [int64(uint64(c)>>uint64(d))]) 1617 for { 1618 d := v.AuxInt 1619 _ = v.Args[1] 1620 x := v.Args[0] 1621 v_1 := v.Args[1] 1622 if v_1.Op != OpARM64MOVDconst { 1623 break 1624 } 1625 c := v_1.AuxInt 1626 v.reset(OpARM64ANDconst) 1627 v.AuxInt = int64(uint64(c) >> uint64(d)) 1628 v.AddArg(x) 1629 return true 1630 } 1631 // match: (ANDshiftRL x y:(SRLconst x [c]) [d]) 1632 // cond: c==d 1633 // result: y 1634 for { 1635 d := v.AuxInt 1636 _ = v.Args[1] 1637 x := v.Args[0] 1638 y := v.Args[1] 1639 if y.Op != OpARM64SRLconst { 1640 break 1641 } 1642 c := y.AuxInt 1643 if x != y.Args[0] { 1644 break 1645 } 1646 if !(c == d) { 1647 break 1648 } 1649 v.reset(OpCopy) 1650 v.Type = y.Type 1651 v.AddArg(y) 1652 return true 1653 } 1654 return false 1655 } 1656 func rewriteValueARM64_OpARM64BIC_0(v *Value) bool { 1657 // match: (BIC x (MOVDconst [c])) 1658 // cond: 1659 // result: (BICconst [c] x) 1660 for { 1661 _ = v.Args[1] 1662 x := v.Args[0] 1663 v_1 := v.Args[1] 1664 if v_1.Op != OpARM64MOVDconst { 1665 break 1666 } 1667 c := v_1.AuxInt 1668 v.reset(OpARM64BICconst) 1669 v.AuxInt = c 1670 v.AddArg(x) 1671 return true 1672 } 1673 // match: (BIC x x) 1674 // cond: 1675 // result: (MOVDconst [0]) 1676 for { 1677 _ = v.Args[1] 1678 x := v.Args[0] 1679 if x != v.Args[1] { 1680 break 1681 } 1682 v.reset(OpARM64MOVDconst) 1683 v.AuxInt = 0 1684 return true 1685 } 1686 // match: (BIC x (SLLconst [c] y)) 1687 // cond: 1688 // result: (BICshiftLL x y [c]) 1689 for { 1690 _ = v.Args[1] 1691 x := v.Args[0] 1692 v_1 := v.Args[1] 1693 if v_1.Op != OpARM64SLLconst { 1694 break 1695 } 1696 c := v_1.AuxInt 1697 y := v_1.Args[0] 1698 v.reset(OpARM64BICshiftLL) 1699 v.AuxInt = c 1700 v.AddArg(x) 1701 v.AddArg(y) 1702 return true 1703 } 1704 // match: (BIC x (SRLconst [c] y)) 1705 // cond: 1706 // result: (BICshiftRL x y [c]) 1707 for { 1708 _ = v.Args[1] 1709 x := v.Args[0] 1710 v_1 := v.Args[1] 1711 if v_1.Op != OpARM64SRLconst { 1712 break 1713 } 1714 c := v_1.AuxInt 1715 y := v_1.Args[0] 1716 v.reset(OpARM64BICshiftRL) 1717 v.AuxInt = c 1718 v.AddArg(x) 1719 v.AddArg(y) 1720 return true 1721 } 1722 // match: (BIC x (SRAconst [c] y)) 1723 // cond: 1724 // result: (BICshiftRA x y [c]) 1725 for { 1726 _ = v.Args[1] 1727 x := v.Args[0] 1728 v_1 := v.Args[1] 1729 if v_1.Op != OpARM64SRAconst { 1730 break 1731 } 1732 c := v_1.AuxInt 1733 y := v_1.Args[0] 1734 v.reset(OpARM64BICshiftRA) 1735 v.AuxInt = c 1736 v.AddArg(x) 1737 v.AddArg(y) 1738 return true 1739 } 1740 return false 1741 } 1742 func rewriteValueARM64_OpARM64BICconst_0(v *Value) bool { 1743 // match: (BICconst [0] x) 1744 // cond: 1745 // result: x 1746 for { 1747 if v.AuxInt != 0 { 1748 break 1749 } 1750 x := v.Args[0] 1751 v.reset(OpCopy) 1752 v.Type = x.Type 1753 v.AddArg(x) 1754 return true 1755 } 1756 // match: (BICconst [-1] _) 1757 // cond: 1758 // result: (MOVDconst [0]) 1759 for { 1760 if v.AuxInt != -1 { 1761 break 1762 } 1763 v.reset(OpARM64MOVDconst) 1764 v.AuxInt = 0 1765 return true 1766 } 1767 // match: (BICconst [c] (MOVDconst [d])) 1768 // cond: 1769 // result: (MOVDconst [d&^c]) 1770 for { 1771 c := v.AuxInt 1772 v_0 := v.Args[0] 1773 if v_0.Op != OpARM64MOVDconst { 1774 break 1775 } 1776 d := v_0.AuxInt 1777 v.reset(OpARM64MOVDconst) 1778 v.AuxInt = d &^ c 1779 return true 1780 } 1781 return false 1782 } 1783 func rewriteValueARM64_OpARM64BICshiftLL_0(v *Value) bool { 1784 // match: (BICshiftLL x (MOVDconst [c]) [d]) 1785 // cond: 1786 // result: (BICconst x [int64(uint64(c)<<uint64(d))]) 1787 for { 1788 d := v.AuxInt 1789 _ = v.Args[1] 1790 x := v.Args[0] 1791 v_1 := v.Args[1] 1792 if v_1.Op != OpARM64MOVDconst { 1793 break 1794 } 1795 c := v_1.AuxInt 1796 v.reset(OpARM64BICconst) 1797 v.AuxInt = int64(uint64(c) << uint64(d)) 1798 v.AddArg(x) 1799 return true 1800 } 1801 // match: (BICshiftLL x (SLLconst x [c]) [d]) 1802 // cond: c==d 1803 // result: (MOVDconst [0]) 1804 for { 1805 d := v.AuxInt 1806 _ = v.Args[1] 1807 x := v.Args[0] 1808 v_1 := v.Args[1] 1809 if v_1.Op != OpARM64SLLconst { 1810 break 1811 } 1812 c := v_1.AuxInt 1813 if x != v_1.Args[0] { 1814 break 1815 } 1816 if !(c == d) { 1817 break 1818 } 1819 v.reset(OpARM64MOVDconst) 1820 v.AuxInt = 0 1821 return true 1822 } 1823 return false 1824 } 1825 func rewriteValueARM64_OpARM64BICshiftRA_0(v *Value) bool { 1826 // match: (BICshiftRA x (MOVDconst [c]) [d]) 1827 // cond: 1828 // result: (BICconst x [int64(int64(c)>>uint64(d))]) 1829 for { 1830 d := v.AuxInt 1831 _ = v.Args[1] 1832 x := v.Args[0] 1833 v_1 := v.Args[1] 1834 if v_1.Op != OpARM64MOVDconst { 1835 break 1836 } 1837 c := v_1.AuxInt 1838 v.reset(OpARM64BICconst) 1839 v.AuxInt = int64(int64(c) >> uint64(d)) 1840 v.AddArg(x) 1841 return true 1842 } 1843 // match: (BICshiftRA x (SRAconst x [c]) [d]) 1844 // cond: c==d 1845 // result: (MOVDconst [0]) 1846 for { 1847 d := v.AuxInt 1848 _ = v.Args[1] 1849 x := v.Args[0] 1850 v_1 := v.Args[1] 1851 if v_1.Op != OpARM64SRAconst { 1852 break 1853 } 1854 c := v_1.AuxInt 1855 if x != v_1.Args[0] { 1856 break 1857 } 1858 if !(c == d) { 1859 break 1860 } 1861 v.reset(OpARM64MOVDconst) 1862 v.AuxInt = 0 1863 return true 1864 } 1865 return false 1866 } 1867 func rewriteValueARM64_OpARM64BICshiftRL_0(v *Value) bool { 1868 // match: (BICshiftRL x (MOVDconst [c]) [d]) 1869 // cond: 1870 // result: (BICconst x [int64(uint64(c)>>uint64(d))]) 1871 for { 1872 d := v.AuxInt 1873 _ = v.Args[1] 1874 x := v.Args[0] 1875 v_1 := v.Args[1] 1876 if v_1.Op != OpARM64MOVDconst { 1877 break 1878 } 1879 c := v_1.AuxInt 1880 v.reset(OpARM64BICconst) 1881 v.AuxInt = int64(uint64(c) >> uint64(d)) 1882 v.AddArg(x) 1883 return true 1884 } 1885 // match: (BICshiftRL x (SRLconst x [c]) [d]) 1886 // cond: c==d 1887 // result: (MOVDconst [0]) 1888 for { 1889 d := v.AuxInt 1890 _ = v.Args[1] 1891 x := v.Args[0] 1892 v_1 := v.Args[1] 1893 if v_1.Op != OpARM64SRLconst { 1894 break 1895 } 1896 c := v_1.AuxInt 1897 if x != v_1.Args[0] { 1898 break 1899 } 1900 if !(c == d) { 1901 break 1902 } 1903 v.reset(OpARM64MOVDconst) 1904 v.AuxInt = 0 1905 return true 1906 } 1907 return false 1908 } 1909 func rewriteValueARM64_OpARM64CMP_0(v *Value) bool { 1910 b := v.Block 1911 _ = b 1912 // match: (CMP x (MOVDconst [c])) 1913 // cond: 1914 // result: (CMPconst [c] x) 1915 for { 1916 _ = v.Args[1] 1917 x := v.Args[0] 1918 v_1 := v.Args[1] 1919 if v_1.Op != OpARM64MOVDconst { 1920 break 1921 } 1922 c := v_1.AuxInt 1923 v.reset(OpARM64CMPconst) 1924 v.AuxInt = c 1925 v.AddArg(x) 1926 return true 1927 } 1928 // match: (CMP (MOVDconst [c]) x) 1929 // cond: 1930 // result: (InvertFlags (CMPconst [c] x)) 1931 for { 1932 _ = v.Args[1] 1933 v_0 := v.Args[0] 1934 if v_0.Op != OpARM64MOVDconst { 1935 break 1936 } 1937 c := v_0.AuxInt 1938 x := v.Args[1] 1939 v.reset(OpARM64InvertFlags) 1940 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 1941 v0.AuxInt = c 1942 v0.AddArg(x) 1943 v.AddArg(v0) 1944 return true 1945 } 1946 // match: (CMP x (SLLconst [c] y)) 1947 // cond: 1948 // result: (CMPshiftLL x y [c]) 1949 for { 1950 _ = v.Args[1] 1951 x := v.Args[0] 1952 v_1 := v.Args[1] 1953 if v_1.Op != OpARM64SLLconst { 1954 break 1955 } 1956 c := v_1.AuxInt 1957 y := v_1.Args[0] 1958 v.reset(OpARM64CMPshiftLL) 1959 v.AuxInt = c 1960 v.AddArg(x) 1961 v.AddArg(y) 1962 return true 1963 } 1964 // match: (CMP (SLLconst [c] y) x) 1965 // cond: 1966 // result: (InvertFlags (CMPshiftLL x y [c])) 1967 for { 1968 _ = v.Args[1] 1969 v_0 := v.Args[0] 1970 if v_0.Op != OpARM64SLLconst { 1971 break 1972 } 1973 c := v_0.AuxInt 1974 y := v_0.Args[0] 1975 x := v.Args[1] 1976 v.reset(OpARM64InvertFlags) 1977 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftLL, types.TypeFlags) 1978 v0.AuxInt = c 1979 v0.AddArg(x) 1980 v0.AddArg(y) 1981 v.AddArg(v0) 1982 return true 1983 } 1984 // match: (CMP x (SRLconst [c] y)) 1985 // cond: 1986 // result: (CMPshiftRL x y [c]) 1987 for { 1988 _ = v.Args[1] 1989 x := v.Args[0] 1990 v_1 := v.Args[1] 1991 if v_1.Op != OpARM64SRLconst { 1992 break 1993 } 1994 c := v_1.AuxInt 1995 y := v_1.Args[0] 1996 v.reset(OpARM64CMPshiftRL) 1997 v.AuxInt = c 1998 v.AddArg(x) 1999 v.AddArg(y) 2000 return true 2001 } 2002 // match: (CMP (SRLconst [c] y) x) 2003 // cond: 2004 // result: (InvertFlags (CMPshiftRL x y [c])) 2005 for { 2006 _ = v.Args[1] 2007 v_0 := v.Args[0] 2008 if v_0.Op != OpARM64SRLconst { 2009 break 2010 } 2011 c := v_0.AuxInt 2012 y := v_0.Args[0] 2013 x := v.Args[1] 2014 v.reset(OpARM64InvertFlags) 2015 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRL, types.TypeFlags) 2016 v0.AuxInt = c 2017 v0.AddArg(x) 2018 v0.AddArg(y) 2019 v.AddArg(v0) 2020 return true 2021 } 2022 // match: (CMP x (SRAconst [c] y)) 2023 // cond: 2024 // result: (CMPshiftRA x y [c]) 2025 for { 2026 _ = v.Args[1] 2027 x := v.Args[0] 2028 v_1 := v.Args[1] 2029 if v_1.Op != OpARM64SRAconst { 2030 break 2031 } 2032 c := v_1.AuxInt 2033 y := v_1.Args[0] 2034 v.reset(OpARM64CMPshiftRA) 2035 v.AuxInt = c 2036 v.AddArg(x) 2037 v.AddArg(y) 2038 return true 2039 } 2040 // match: (CMP (SRAconst [c] y) x) 2041 // cond: 2042 // result: (InvertFlags (CMPshiftRA x y [c])) 2043 for { 2044 _ = v.Args[1] 2045 v_0 := v.Args[0] 2046 if v_0.Op != OpARM64SRAconst { 2047 break 2048 } 2049 c := v_0.AuxInt 2050 y := v_0.Args[0] 2051 x := v.Args[1] 2052 v.reset(OpARM64InvertFlags) 2053 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRA, types.TypeFlags) 2054 v0.AuxInt = c 2055 v0.AddArg(x) 2056 v0.AddArg(y) 2057 v.AddArg(v0) 2058 return true 2059 } 2060 return false 2061 } 2062 func rewriteValueARM64_OpARM64CMPW_0(v *Value) bool { 2063 b := v.Block 2064 _ = b 2065 // match: (CMPW x (MOVDconst [c])) 2066 // cond: 2067 // result: (CMPWconst [int64(int32(c))] x) 2068 for { 2069 _ = v.Args[1] 2070 x := v.Args[0] 2071 v_1 := v.Args[1] 2072 if v_1.Op != OpARM64MOVDconst { 2073 break 2074 } 2075 c := v_1.AuxInt 2076 v.reset(OpARM64CMPWconst) 2077 v.AuxInt = int64(int32(c)) 2078 v.AddArg(x) 2079 return true 2080 } 2081 // match: (CMPW (MOVDconst [c]) x) 2082 // cond: 2083 // result: (InvertFlags (CMPWconst [int64(int32(c))] x)) 2084 for { 2085 _ = v.Args[1] 2086 v_0 := v.Args[0] 2087 if v_0.Op != OpARM64MOVDconst { 2088 break 2089 } 2090 c := v_0.AuxInt 2091 x := v.Args[1] 2092 v.reset(OpARM64InvertFlags) 2093 v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags) 2094 v0.AuxInt = int64(int32(c)) 2095 v0.AddArg(x) 2096 v.AddArg(v0) 2097 return true 2098 } 2099 return false 2100 } 2101 func rewriteValueARM64_OpARM64CMPWconst_0(v *Value) bool { 2102 // match: (CMPWconst (MOVDconst [x]) [y]) 2103 // cond: int32(x)==int32(y) 2104 // result: (FlagEQ) 2105 for { 2106 y := v.AuxInt 2107 v_0 := v.Args[0] 2108 if v_0.Op != OpARM64MOVDconst { 2109 break 2110 } 2111 x := v_0.AuxInt 2112 if !(int32(x) == int32(y)) { 2113 break 2114 } 2115 v.reset(OpARM64FlagEQ) 2116 return true 2117 } 2118 // match: (CMPWconst (MOVDconst [x]) [y]) 2119 // cond: int32(x)<int32(y) && uint32(x)<uint32(y) 2120 // result: (FlagLT_ULT) 2121 for { 2122 y := v.AuxInt 2123 v_0 := v.Args[0] 2124 if v_0.Op != OpARM64MOVDconst { 2125 break 2126 } 2127 x := v_0.AuxInt 2128 if !(int32(x) < int32(y) && uint32(x) < uint32(y)) { 2129 break 2130 } 2131 v.reset(OpARM64FlagLT_ULT) 2132 return true 2133 } 2134 // match: (CMPWconst (MOVDconst [x]) [y]) 2135 // cond: int32(x)<int32(y) && uint32(x)>uint32(y) 2136 // result: (FlagLT_UGT) 2137 for { 2138 y := v.AuxInt 2139 v_0 := v.Args[0] 2140 if v_0.Op != OpARM64MOVDconst { 2141 break 2142 } 2143 x := v_0.AuxInt 2144 if !(int32(x) < int32(y) && uint32(x) > uint32(y)) { 2145 break 2146 } 2147 v.reset(OpARM64FlagLT_UGT) 2148 return true 2149 } 2150 // match: (CMPWconst (MOVDconst [x]) [y]) 2151 // cond: int32(x)>int32(y) && uint32(x)<uint32(y) 2152 // result: (FlagGT_ULT) 2153 for { 2154 y := v.AuxInt 2155 v_0 := v.Args[0] 2156 if v_0.Op != OpARM64MOVDconst { 2157 break 2158 } 2159 x := v_0.AuxInt 2160 if !(int32(x) > int32(y) && uint32(x) < uint32(y)) { 2161 break 2162 } 2163 v.reset(OpARM64FlagGT_ULT) 2164 return true 2165 } 2166 // match: (CMPWconst (MOVDconst [x]) [y]) 2167 // cond: int32(x)>int32(y) && uint32(x)>uint32(y) 2168 // result: (FlagGT_UGT) 2169 for { 2170 y := v.AuxInt 2171 v_0 := v.Args[0] 2172 if v_0.Op != OpARM64MOVDconst { 2173 break 2174 } 2175 x := v_0.AuxInt 2176 if !(int32(x) > int32(y) && uint32(x) > uint32(y)) { 2177 break 2178 } 2179 v.reset(OpARM64FlagGT_UGT) 2180 return true 2181 } 2182 // match: (CMPWconst (MOVBUreg _) [c]) 2183 // cond: 0xff < int32(c) 2184 // result: (FlagLT_ULT) 2185 for { 2186 c := v.AuxInt 2187 v_0 := v.Args[0] 2188 if v_0.Op != OpARM64MOVBUreg { 2189 break 2190 } 2191 if !(0xff < int32(c)) { 2192 break 2193 } 2194 v.reset(OpARM64FlagLT_ULT) 2195 return true 2196 } 2197 // match: (CMPWconst (MOVHUreg _) [c]) 2198 // cond: 0xffff < int32(c) 2199 // result: (FlagLT_ULT) 2200 for { 2201 c := v.AuxInt 2202 v_0 := v.Args[0] 2203 if v_0.Op != OpARM64MOVHUreg { 2204 break 2205 } 2206 if !(0xffff < int32(c)) { 2207 break 2208 } 2209 v.reset(OpARM64FlagLT_ULT) 2210 return true 2211 } 2212 return false 2213 } 2214 func rewriteValueARM64_OpARM64CMPconst_0(v *Value) bool { 2215 // match: (CMPconst (MOVDconst [x]) [y]) 2216 // cond: x==y 2217 // result: (FlagEQ) 2218 for { 2219 y := v.AuxInt 2220 v_0 := v.Args[0] 2221 if v_0.Op != OpARM64MOVDconst { 2222 break 2223 } 2224 x := v_0.AuxInt 2225 if !(x == y) { 2226 break 2227 } 2228 v.reset(OpARM64FlagEQ) 2229 return true 2230 } 2231 // match: (CMPconst (MOVDconst [x]) [y]) 2232 // cond: int64(x)<int64(y) && uint64(x)<uint64(y) 2233 // result: (FlagLT_ULT) 2234 for { 2235 y := v.AuxInt 2236 v_0 := v.Args[0] 2237 if v_0.Op != OpARM64MOVDconst { 2238 break 2239 } 2240 x := v_0.AuxInt 2241 if !(int64(x) < int64(y) && uint64(x) < uint64(y)) { 2242 break 2243 } 2244 v.reset(OpARM64FlagLT_ULT) 2245 return true 2246 } 2247 // match: (CMPconst (MOVDconst [x]) [y]) 2248 // cond: int64(x)<int64(y) && uint64(x)>uint64(y) 2249 // result: (FlagLT_UGT) 2250 for { 2251 y := v.AuxInt 2252 v_0 := v.Args[0] 2253 if v_0.Op != OpARM64MOVDconst { 2254 break 2255 } 2256 x := v_0.AuxInt 2257 if !(int64(x) < int64(y) && uint64(x) > uint64(y)) { 2258 break 2259 } 2260 v.reset(OpARM64FlagLT_UGT) 2261 return true 2262 } 2263 // match: (CMPconst (MOVDconst [x]) [y]) 2264 // cond: int64(x)>int64(y) && uint64(x)<uint64(y) 2265 // result: (FlagGT_ULT) 2266 for { 2267 y := v.AuxInt 2268 v_0 := v.Args[0] 2269 if v_0.Op != OpARM64MOVDconst { 2270 break 2271 } 2272 x := v_0.AuxInt 2273 if !(int64(x) > int64(y) && uint64(x) < uint64(y)) { 2274 break 2275 } 2276 v.reset(OpARM64FlagGT_ULT) 2277 return true 2278 } 2279 // match: (CMPconst (MOVDconst [x]) [y]) 2280 // cond: int64(x)>int64(y) && uint64(x)>uint64(y) 2281 // result: (FlagGT_UGT) 2282 for { 2283 y := v.AuxInt 2284 v_0 := v.Args[0] 2285 if v_0.Op != OpARM64MOVDconst { 2286 break 2287 } 2288 x := v_0.AuxInt 2289 if !(int64(x) > int64(y) && uint64(x) > uint64(y)) { 2290 break 2291 } 2292 v.reset(OpARM64FlagGT_UGT) 2293 return true 2294 } 2295 // match: (CMPconst (MOVBUreg _) [c]) 2296 // cond: 0xff < c 2297 // result: (FlagLT_ULT) 2298 for { 2299 c := v.AuxInt 2300 v_0 := v.Args[0] 2301 if v_0.Op != OpARM64MOVBUreg { 2302 break 2303 } 2304 if !(0xff < c) { 2305 break 2306 } 2307 v.reset(OpARM64FlagLT_ULT) 2308 return true 2309 } 2310 // match: (CMPconst (MOVHUreg _) [c]) 2311 // cond: 0xffff < c 2312 // result: (FlagLT_ULT) 2313 for { 2314 c := v.AuxInt 2315 v_0 := v.Args[0] 2316 if v_0.Op != OpARM64MOVHUreg { 2317 break 2318 } 2319 if !(0xffff < c) { 2320 break 2321 } 2322 v.reset(OpARM64FlagLT_ULT) 2323 return true 2324 } 2325 // match: (CMPconst (MOVWUreg _) [c]) 2326 // cond: 0xffffffff < c 2327 // result: (FlagLT_ULT) 2328 for { 2329 c := v.AuxInt 2330 v_0 := v.Args[0] 2331 if v_0.Op != OpARM64MOVWUreg { 2332 break 2333 } 2334 if !(0xffffffff < c) { 2335 break 2336 } 2337 v.reset(OpARM64FlagLT_ULT) 2338 return true 2339 } 2340 // match: (CMPconst (ANDconst _ [m]) [n]) 2341 // cond: 0 <= m && m < n 2342 // result: (FlagLT_ULT) 2343 for { 2344 n := v.AuxInt 2345 v_0 := v.Args[0] 2346 if v_0.Op != OpARM64ANDconst { 2347 break 2348 } 2349 m := v_0.AuxInt 2350 if !(0 <= m && m < n) { 2351 break 2352 } 2353 v.reset(OpARM64FlagLT_ULT) 2354 return true 2355 } 2356 // match: (CMPconst (SRLconst _ [c]) [n]) 2357 // cond: 0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n) 2358 // result: (FlagLT_ULT) 2359 for { 2360 n := v.AuxInt 2361 v_0 := v.Args[0] 2362 if v_0.Op != OpARM64SRLconst { 2363 break 2364 } 2365 c := v_0.AuxInt 2366 if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) { 2367 break 2368 } 2369 v.reset(OpARM64FlagLT_ULT) 2370 return true 2371 } 2372 return false 2373 } 2374 func rewriteValueARM64_OpARM64CMPshiftLL_0(v *Value) bool { 2375 b := v.Block 2376 _ = b 2377 // match: (CMPshiftLL (MOVDconst [c]) x [d]) 2378 // cond: 2379 // result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d]))) 2380 for { 2381 d := v.AuxInt 2382 _ = v.Args[1] 2383 v_0 := v.Args[0] 2384 if v_0.Op != OpARM64MOVDconst { 2385 break 2386 } 2387 c := v_0.AuxInt 2388 x := v.Args[1] 2389 v.reset(OpARM64InvertFlags) 2390 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 2391 v0.AuxInt = c 2392 v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 2393 v1.AuxInt = d 2394 v1.AddArg(x) 2395 v0.AddArg(v1) 2396 v.AddArg(v0) 2397 return true 2398 } 2399 // match: (CMPshiftLL x (MOVDconst [c]) [d]) 2400 // cond: 2401 // result: (CMPconst x [int64(uint64(c)<<uint64(d))]) 2402 for { 2403 d := v.AuxInt 2404 _ = v.Args[1] 2405 x := v.Args[0] 2406 v_1 := v.Args[1] 2407 if v_1.Op != OpARM64MOVDconst { 2408 break 2409 } 2410 c := v_1.AuxInt 2411 v.reset(OpARM64CMPconst) 2412 v.AuxInt = int64(uint64(c) << uint64(d)) 2413 v.AddArg(x) 2414 return true 2415 } 2416 return false 2417 } 2418 func rewriteValueARM64_OpARM64CMPshiftRA_0(v *Value) bool { 2419 b := v.Block 2420 _ = b 2421 // match: (CMPshiftRA (MOVDconst [c]) x [d]) 2422 // cond: 2423 // result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d]))) 2424 for { 2425 d := v.AuxInt 2426 _ = v.Args[1] 2427 v_0 := v.Args[0] 2428 if v_0.Op != OpARM64MOVDconst { 2429 break 2430 } 2431 c := v_0.AuxInt 2432 x := v.Args[1] 2433 v.reset(OpARM64InvertFlags) 2434 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 2435 v0.AuxInt = c 2436 v1 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 2437 v1.AuxInt = d 2438 v1.AddArg(x) 2439 v0.AddArg(v1) 2440 v.AddArg(v0) 2441 return true 2442 } 2443 // match: (CMPshiftRA x (MOVDconst [c]) [d]) 2444 // cond: 2445 // result: (CMPconst x [int64(int64(c)>>uint64(d))]) 2446 for { 2447 d := v.AuxInt 2448 _ = v.Args[1] 2449 x := v.Args[0] 2450 v_1 := v.Args[1] 2451 if v_1.Op != OpARM64MOVDconst { 2452 break 2453 } 2454 c := v_1.AuxInt 2455 v.reset(OpARM64CMPconst) 2456 v.AuxInt = int64(int64(c) >> uint64(d)) 2457 v.AddArg(x) 2458 return true 2459 } 2460 return false 2461 } 2462 func rewriteValueARM64_OpARM64CMPshiftRL_0(v *Value) bool { 2463 b := v.Block 2464 _ = b 2465 // match: (CMPshiftRL (MOVDconst [c]) x [d]) 2466 // cond: 2467 // result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d]))) 2468 for { 2469 d := v.AuxInt 2470 _ = v.Args[1] 2471 v_0 := v.Args[0] 2472 if v_0.Op != OpARM64MOVDconst { 2473 break 2474 } 2475 c := v_0.AuxInt 2476 x := v.Args[1] 2477 v.reset(OpARM64InvertFlags) 2478 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 2479 v0.AuxInt = c 2480 v1 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 2481 v1.AuxInt = d 2482 v1.AddArg(x) 2483 v0.AddArg(v1) 2484 v.AddArg(v0) 2485 return true 2486 } 2487 // match: (CMPshiftRL x (MOVDconst [c]) [d]) 2488 // cond: 2489 // result: (CMPconst x [int64(uint64(c)>>uint64(d))]) 2490 for { 2491 d := v.AuxInt 2492 _ = v.Args[1] 2493 x := v.Args[0] 2494 v_1 := v.Args[1] 2495 if v_1.Op != OpARM64MOVDconst { 2496 break 2497 } 2498 c := v_1.AuxInt 2499 v.reset(OpARM64CMPconst) 2500 v.AuxInt = int64(uint64(c) >> uint64(d)) 2501 v.AddArg(x) 2502 return true 2503 } 2504 return false 2505 } 2506 func rewriteValueARM64_OpARM64CSELULT_0(v *Value) bool { 2507 // match: (CSELULT x (MOVDconst [0]) flag) 2508 // cond: 2509 // result: (CSELULT0 x flag) 2510 for { 2511 _ = v.Args[2] 2512 x := v.Args[0] 2513 v_1 := v.Args[1] 2514 if v_1.Op != OpARM64MOVDconst { 2515 break 2516 } 2517 if v_1.AuxInt != 0 { 2518 break 2519 } 2520 flag := v.Args[2] 2521 v.reset(OpARM64CSELULT0) 2522 v.AddArg(x) 2523 v.AddArg(flag) 2524 return true 2525 } 2526 // match: (CSELULT _ y (FlagEQ)) 2527 // cond: 2528 // result: y 2529 for { 2530 _ = v.Args[2] 2531 y := v.Args[1] 2532 v_2 := v.Args[2] 2533 if v_2.Op != OpARM64FlagEQ { 2534 break 2535 } 2536 v.reset(OpCopy) 2537 v.Type = y.Type 2538 v.AddArg(y) 2539 return true 2540 } 2541 // match: (CSELULT x _ (FlagLT_ULT)) 2542 // cond: 2543 // result: x 2544 for { 2545 _ = v.Args[2] 2546 x := v.Args[0] 2547 v_2 := v.Args[2] 2548 if v_2.Op != OpARM64FlagLT_ULT { 2549 break 2550 } 2551 v.reset(OpCopy) 2552 v.Type = x.Type 2553 v.AddArg(x) 2554 return true 2555 } 2556 // match: (CSELULT _ y (FlagLT_UGT)) 2557 // cond: 2558 // result: y 2559 for { 2560 _ = v.Args[2] 2561 y := v.Args[1] 2562 v_2 := v.Args[2] 2563 if v_2.Op != OpARM64FlagLT_UGT { 2564 break 2565 } 2566 v.reset(OpCopy) 2567 v.Type = y.Type 2568 v.AddArg(y) 2569 return true 2570 } 2571 // match: (CSELULT x _ (FlagGT_ULT)) 2572 // cond: 2573 // result: x 2574 for { 2575 _ = v.Args[2] 2576 x := v.Args[0] 2577 v_2 := v.Args[2] 2578 if v_2.Op != OpARM64FlagGT_ULT { 2579 break 2580 } 2581 v.reset(OpCopy) 2582 v.Type = x.Type 2583 v.AddArg(x) 2584 return true 2585 } 2586 // match: (CSELULT _ y (FlagGT_UGT)) 2587 // cond: 2588 // result: y 2589 for { 2590 _ = v.Args[2] 2591 y := v.Args[1] 2592 v_2 := v.Args[2] 2593 if v_2.Op != OpARM64FlagGT_UGT { 2594 break 2595 } 2596 v.reset(OpCopy) 2597 v.Type = y.Type 2598 v.AddArg(y) 2599 return true 2600 } 2601 return false 2602 } 2603 func rewriteValueARM64_OpARM64CSELULT0_0(v *Value) bool { 2604 // match: (CSELULT0 _ (FlagEQ)) 2605 // cond: 2606 // result: (MOVDconst [0]) 2607 for { 2608 _ = v.Args[1] 2609 v_1 := v.Args[1] 2610 if v_1.Op != OpARM64FlagEQ { 2611 break 2612 } 2613 v.reset(OpARM64MOVDconst) 2614 v.AuxInt = 0 2615 return true 2616 } 2617 // match: (CSELULT0 x (FlagLT_ULT)) 2618 // cond: 2619 // result: x 2620 for { 2621 _ = v.Args[1] 2622 x := v.Args[0] 2623 v_1 := v.Args[1] 2624 if v_1.Op != OpARM64FlagLT_ULT { 2625 break 2626 } 2627 v.reset(OpCopy) 2628 v.Type = x.Type 2629 v.AddArg(x) 2630 return true 2631 } 2632 // match: (CSELULT0 _ (FlagLT_UGT)) 2633 // cond: 2634 // result: (MOVDconst [0]) 2635 for { 2636 _ = v.Args[1] 2637 v_1 := v.Args[1] 2638 if v_1.Op != OpARM64FlagLT_UGT { 2639 break 2640 } 2641 v.reset(OpARM64MOVDconst) 2642 v.AuxInt = 0 2643 return true 2644 } 2645 // match: (CSELULT0 x (FlagGT_ULT)) 2646 // cond: 2647 // result: x 2648 for { 2649 _ = v.Args[1] 2650 x := v.Args[0] 2651 v_1 := v.Args[1] 2652 if v_1.Op != OpARM64FlagGT_ULT { 2653 break 2654 } 2655 v.reset(OpCopy) 2656 v.Type = x.Type 2657 v.AddArg(x) 2658 return true 2659 } 2660 // match: (CSELULT0 _ (FlagGT_UGT)) 2661 // cond: 2662 // result: (MOVDconst [0]) 2663 for { 2664 _ = v.Args[1] 2665 v_1 := v.Args[1] 2666 if v_1.Op != OpARM64FlagGT_UGT { 2667 break 2668 } 2669 v.reset(OpARM64MOVDconst) 2670 v.AuxInt = 0 2671 return true 2672 } 2673 return false 2674 } 2675 func rewriteValueARM64_OpARM64DIV_0(v *Value) bool { 2676 // match: (DIV (MOVDconst [c]) (MOVDconst [d])) 2677 // cond: 2678 // result: (MOVDconst [int64(c)/int64(d)]) 2679 for { 2680 _ = v.Args[1] 2681 v_0 := v.Args[0] 2682 if v_0.Op != OpARM64MOVDconst { 2683 break 2684 } 2685 c := v_0.AuxInt 2686 v_1 := v.Args[1] 2687 if v_1.Op != OpARM64MOVDconst { 2688 break 2689 } 2690 d := v_1.AuxInt 2691 v.reset(OpARM64MOVDconst) 2692 v.AuxInt = int64(c) / int64(d) 2693 return true 2694 } 2695 return false 2696 } 2697 func rewriteValueARM64_OpARM64DIVW_0(v *Value) bool { 2698 // match: (DIVW (MOVDconst [c]) (MOVDconst [d])) 2699 // cond: 2700 // result: (MOVDconst [int64(int32(c)/int32(d))]) 2701 for { 2702 _ = v.Args[1] 2703 v_0 := v.Args[0] 2704 if v_0.Op != OpARM64MOVDconst { 2705 break 2706 } 2707 c := v_0.AuxInt 2708 v_1 := v.Args[1] 2709 if v_1.Op != OpARM64MOVDconst { 2710 break 2711 } 2712 d := v_1.AuxInt 2713 v.reset(OpARM64MOVDconst) 2714 v.AuxInt = int64(int32(c) / int32(d)) 2715 return true 2716 } 2717 return false 2718 } 2719 func rewriteValueARM64_OpARM64Equal_0(v *Value) bool { 2720 // match: (Equal (FlagEQ)) 2721 // cond: 2722 // result: (MOVDconst [1]) 2723 for { 2724 v_0 := v.Args[0] 2725 if v_0.Op != OpARM64FlagEQ { 2726 break 2727 } 2728 v.reset(OpARM64MOVDconst) 2729 v.AuxInt = 1 2730 return true 2731 } 2732 // match: (Equal (FlagLT_ULT)) 2733 // cond: 2734 // result: (MOVDconst [0]) 2735 for { 2736 v_0 := v.Args[0] 2737 if v_0.Op != OpARM64FlagLT_ULT { 2738 break 2739 } 2740 v.reset(OpARM64MOVDconst) 2741 v.AuxInt = 0 2742 return true 2743 } 2744 // match: (Equal (FlagLT_UGT)) 2745 // cond: 2746 // result: (MOVDconst [0]) 2747 for { 2748 v_0 := v.Args[0] 2749 if v_0.Op != OpARM64FlagLT_UGT { 2750 break 2751 } 2752 v.reset(OpARM64MOVDconst) 2753 v.AuxInt = 0 2754 return true 2755 } 2756 // match: (Equal (FlagGT_ULT)) 2757 // cond: 2758 // result: (MOVDconst [0]) 2759 for { 2760 v_0 := v.Args[0] 2761 if v_0.Op != OpARM64FlagGT_ULT { 2762 break 2763 } 2764 v.reset(OpARM64MOVDconst) 2765 v.AuxInt = 0 2766 return true 2767 } 2768 // match: (Equal (FlagGT_UGT)) 2769 // cond: 2770 // result: (MOVDconst [0]) 2771 for { 2772 v_0 := v.Args[0] 2773 if v_0.Op != OpARM64FlagGT_UGT { 2774 break 2775 } 2776 v.reset(OpARM64MOVDconst) 2777 v.AuxInt = 0 2778 return true 2779 } 2780 // match: (Equal (InvertFlags x)) 2781 // cond: 2782 // result: (Equal x) 2783 for { 2784 v_0 := v.Args[0] 2785 if v_0.Op != OpARM64InvertFlags { 2786 break 2787 } 2788 x := v_0.Args[0] 2789 v.reset(OpARM64Equal) 2790 v.AddArg(x) 2791 return true 2792 } 2793 return false 2794 } 2795 func rewriteValueARM64_OpARM64FMOVDload_0(v *Value) bool { 2796 b := v.Block 2797 _ = b 2798 config := b.Func.Config 2799 _ = config 2800 // match: (FMOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 2801 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 2802 // result: (FMOVDload [off1+off2] {sym} ptr mem) 2803 for { 2804 off1 := v.AuxInt 2805 sym := v.Aux 2806 _ = v.Args[1] 2807 v_0 := v.Args[0] 2808 if v_0.Op != OpARM64ADDconst { 2809 break 2810 } 2811 off2 := v_0.AuxInt 2812 ptr := v_0.Args[0] 2813 mem := v.Args[1] 2814 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 2815 break 2816 } 2817 v.reset(OpARM64FMOVDload) 2818 v.AuxInt = off1 + off2 2819 v.Aux = sym 2820 v.AddArg(ptr) 2821 v.AddArg(mem) 2822 return true 2823 } 2824 // match: (FMOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 2825 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 2826 // result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 2827 for { 2828 off1 := v.AuxInt 2829 sym1 := v.Aux 2830 _ = v.Args[1] 2831 v_0 := v.Args[0] 2832 if v_0.Op != OpARM64MOVDaddr { 2833 break 2834 } 2835 off2 := v_0.AuxInt 2836 sym2 := v_0.Aux 2837 ptr := v_0.Args[0] 2838 mem := v.Args[1] 2839 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 2840 break 2841 } 2842 v.reset(OpARM64FMOVDload) 2843 v.AuxInt = off1 + off2 2844 v.Aux = mergeSym(sym1, sym2) 2845 v.AddArg(ptr) 2846 v.AddArg(mem) 2847 return true 2848 } 2849 return false 2850 } 2851 func rewriteValueARM64_OpARM64FMOVDstore_0(v *Value) bool { 2852 b := v.Block 2853 _ = b 2854 config := b.Func.Config 2855 _ = config 2856 // match: (FMOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 2857 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 2858 // result: (FMOVDstore [off1+off2] {sym} ptr val mem) 2859 for { 2860 off1 := v.AuxInt 2861 sym := v.Aux 2862 _ = v.Args[2] 2863 v_0 := v.Args[0] 2864 if v_0.Op != OpARM64ADDconst { 2865 break 2866 } 2867 off2 := v_0.AuxInt 2868 ptr := v_0.Args[0] 2869 val := v.Args[1] 2870 mem := v.Args[2] 2871 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 2872 break 2873 } 2874 v.reset(OpARM64FMOVDstore) 2875 v.AuxInt = off1 + off2 2876 v.Aux = sym 2877 v.AddArg(ptr) 2878 v.AddArg(val) 2879 v.AddArg(mem) 2880 return true 2881 } 2882 // match: (FMOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 2883 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 2884 // result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 2885 for { 2886 off1 := v.AuxInt 2887 sym1 := v.Aux 2888 _ = v.Args[2] 2889 v_0 := v.Args[0] 2890 if v_0.Op != OpARM64MOVDaddr { 2891 break 2892 } 2893 off2 := v_0.AuxInt 2894 sym2 := v_0.Aux 2895 ptr := v_0.Args[0] 2896 val := v.Args[1] 2897 mem := v.Args[2] 2898 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 2899 break 2900 } 2901 v.reset(OpARM64FMOVDstore) 2902 v.AuxInt = off1 + off2 2903 v.Aux = mergeSym(sym1, sym2) 2904 v.AddArg(ptr) 2905 v.AddArg(val) 2906 v.AddArg(mem) 2907 return true 2908 } 2909 return false 2910 } 2911 func rewriteValueARM64_OpARM64FMOVSload_0(v *Value) bool { 2912 b := v.Block 2913 _ = b 2914 config := b.Func.Config 2915 _ = config 2916 // match: (FMOVSload [off1] {sym} (ADDconst [off2] ptr) mem) 2917 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 2918 // result: (FMOVSload [off1+off2] {sym} ptr mem) 2919 for { 2920 off1 := v.AuxInt 2921 sym := v.Aux 2922 _ = v.Args[1] 2923 v_0 := v.Args[0] 2924 if v_0.Op != OpARM64ADDconst { 2925 break 2926 } 2927 off2 := v_0.AuxInt 2928 ptr := v_0.Args[0] 2929 mem := v.Args[1] 2930 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 2931 break 2932 } 2933 v.reset(OpARM64FMOVSload) 2934 v.AuxInt = off1 + off2 2935 v.Aux = sym 2936 v.AddArg(ptr) 2937 v.AddArg(mem) 2938 return true 2939 } 2940 // match: (FMOVSload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 2941 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 2942 // result: (FMOVSload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 2943 for { 2944 off1 := v.AuxInt 2945 sym1 := v.Aux 2946 _ = v.Args[1] 2947 v_0 := v.Args[0] 2948 if v_0.Op != OpARM64MOVDaddr { 2949 break 2950 } 2951 off2 := v_0.AuxInt 2952 sym2 := v_0.Aux 2953 ptr := v_0.Args[0] 2954 mem := v.Args[1] 2955 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 2956 break 2957 } 2958 v.reset(OpARM64FMOVSload) 2959 v.AuxInt = off1 + off2 2960 v.Aux = mergeSym(sym1, sym2) 2961 v.AddArg(ptr) 2962 v.AddArg(mem) 2963 return true 2964 } 2965 return false 2966 } 2967 func rewriteValueARM64_OpARM64FMOVSstore_0(v *Value) bool { 2968 b := v.Block 2969 _ = b 2970 config := b.Func.Config 2971 _ = config 2972 // match: (FMOVSstore [off1] {sym} (ADDconst [off2] ptr) val mem) 2973 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 2974 // result: (FMOVSstore [off1+off2] {sym} ptr val mem) 2975 for { 2976 off1 := v.AuxInt 2977 sym := v.Aux 2978 _ = v.Args[2] 2979 v_0 := v.Args[0] 2980 if v_0.Op != OpARM64ADDconst { 2981 break 2982 } 2983 off2 := v_0.AuxInt 2984 ptr := v_0.Args[0] 2985 val := v.Args[1] 2986 mem := v.Args[2] 2987 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 2988 break 2989 } 2990 v.reset(OpARM64FMOVSstore) 2991 v.AuxInt = off1 + off2 2992 v.Aux = sym 2993 v.AddArg(ptr) 2994 v.AddArg(val) 2995 v.AddArg(mem) 2996 return true 2997 } 2998 // match: (FMOVSstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 2999 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3000 // result: (FMOVSstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 3001 for { 3002 off1 := v.AuxInt 3003 sym1 := v.Aux 3004 _ = v.Args[2] 3005 v_0 := v.Args[0] 3006 if v_0.Op != OpARM64MOVDaddr { 3007 break 3008 } 3009 off2 := v_0.AuxInt 3010 sym2 := v_0.Aux 3011 ptr := v_0.Args[0] 3012 val := v.Args[1] 3013 mem := v.Args[2] 3014 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3015 break 3016 } 3017 v.reset(OpARM64FMOVSstore) 3018 v.AuxInt = off1 + off2 3019 v.Aux = mergeSym(sym1, sym2) 3020 v.AddArg(ptr) 3021 v.AddArg(val) 3022 v.AddArg(mem) 3023 return true 3024 } 3025 return false 3026 } 3027 func rewriteValueARM64_OpARM64GreaterEqual_0(v *Value) bool { 3028 // match: (GreaterEqual (FlagEQ)) 3029 // cond: 3030 // result: (MOVDconst [1]) 3031 for { 3032 v_0 := v.Args[0] 3033 if v_0.Op != OpARM64FlagEQ { 3034 break 3035 } 3036 v.reset(OpARM64MOVDconst) 3037 v.AuxInt = 1 3038 return true 3039 } 3040 // match: (GreaterEqual (FlagLT_ULT)) 3041 // cond: 3042 // result: (MOVDconst [0]) 3043 for { 3044 v_0 := v.Args[0] 3045 if v_0.Op != OpARM64FlagLT_ULT { 3046 break 3047 } 3048 v.reset(OpARM64MOVDconst) 3049 v.AuxInt = 0 3050 return true 3051 } 3052 // match: (GreaterEqual (FlagLT_UGT)) 3053 // cond: 3054 // result: (MOVDconst [0]) 3055 for { 3056 v_0 := v.Args[0] 3057 if v_0.Op != OpARM64FlagLT_UGT { 3058 break 3059 } 3060 v.reset(OpARM64MOVDconst) 3061 v.AuxInt = 0 3062 return true 3063 } 3064 // match: (GreaterEqual (FlagGT_ULT)) 3065 // cond: 3066 // result: (MOVDconst [1]) 3067 for { 3068 v_0 := v.Args[0] 3069 if v_0.Op != OpARM64FlagGT_ULT { 3070 break 3071 } 3072 v.reset(OpARM64MOVDconst) 3073 v.AuxInt = 1 3074 return true 3075 } 3076 // match: (GreaterEqual (FlagGT_UGT)) 3077 // cond: 3078 // result: (MOVDconst [1]) 3079 for { 3080 v_0 := v.Args[0] 3081 if v_0.Op != OpARM64FlagGT_UGT { 3082 break 3083 } 3084 v.reset(OpARM64MOVDconst) 3085 v.AuxInt = 1 3086 return true 3087 } 3088 // match: (GreaterEqual (InvertFlags x)) 3089 // cond: 3090 // result: (LessEqual x) 3091 for { 3092 v_0 := v.Args[0] 3093 if v_0.Op != OpARM64InvertFlags { 3094 break 3095 } 3096 x := v_0.Args[0] 3097 v.reset(OpARM64LessEqual) 3098 v.AddArg(x) 3099 return true 3100 } 3101 return false 3102 } 3103 func rewriteValueARM64_OpARM64GreaterEqualU_0(v *Value) bool { 3104 // match: (GreaterEqualU (FlagEQ)) 3105 // cond: 3106 // result: (MOVDconst [1]) 3107 for { 3108 v_0 := v.Args[0] 3109 if v_0.Op != OpARM64FlagEQ { 3110 break 3111 } 3112 v.reset(OpARM64MOVDconst) 3113 v.AuxInt = 1 3114 return true 3115 } 3116 // match: (GreaterEqualU (FlagLT_ULT)) 3117 // cond: 3118 // result: (MOVDconst [0]) 3119 for { 3120 v_0 := v.Args[0] 3121 if v_0.Op != OpARM64FlagLT_ULT { 3122 break 3123 } 3124 v.reset(OpARM64MOVDconst) 3125 v.AuxInt = 0 3126 return true 3127 } 3128 // match: (GreaterEqualU (FlagLT_UGT)) 3129 // cond: 3130 // result: (MOVDconst [1]) 3131 for { 3132 v_0 := v.Args[0] 3133 if v_0.Op != OpARM64FlagLT_UGT { 3134 break 3135 } 3136 v.reset(OpARM64MOVDconst) 3137 v.AuxInt = 1 3138 return true 3139 } 3140 // match: (GreaterEqualU (FlagGT_ULT)) 3141 // cond: 3142 // result: (MOVDconst [0]) 3143 for { 3144 v_0 := v.Args[0] 3145 if v_0.Op != OpARM64FlagGT_ULT { 3146 break 3147 } 3148 v.reset(OpARM64MOVDconst) 3149 v.AuxInt = 0 3150 return true 3151 } 3152 // match: (GreaterEqualU (FlagGT_UGT)) 3153 // cond: 3154 // result: (MOVDconst [1]) 3155 for { 3156 v_0 := v.Args[0] 3157 if v_0.Op != OpARM64FlagGT_UGT { 3158 break 3159 } 3160 v.reset(OpARM64MOVDconst) 3161 v.AuxInt = 1 3162 return true 3163 } 3164 // match: (GreaterEqualU (InvertFlags x)) 3165 // cond: 3166 // result: (LessEqualU x) 3167 for { 3168 v_0 := v.Args[0] 3169 if v_0.Op != OpARM64InvertFlags { 3170 break 3171 } 3172 x := v_0.Args[0] 3173 v.reset(OpARM64LessEqualU) 3174 v.AddArg(x) 3175 return true 3176 } 3177 return false 3178 } 3179 func rewriteValueARM64_OpARM64GreaterThan_0(v *Value) bool { 3180 // match: (GreaterThan (FlagEQ)) 3181 // cond: 3182 // result: (MOVDconst [0]) 3183 for { 3184 v_0 := v.Args[0] 3185 if v_0.Op != OpARM64FlagEQ { 3186 break 3187 } 3188 v.reset(OpARM64MOVDconst) 3189 v.AuxInt = 0 3190 return true 3191 } 3192 // match: (GreaterThan (FlagLT_ULT)) 3193 // cond: 3194 // result: (MOVDconst [0]) 3195 for { 3196 v_0 := v.Args[0] 3197 if v_0.Op != OpARM64FlagLT_ULT { 3198 break 3199 } 3200 v.reset(OpARM64MOVDconst) 3201 v.AuxInt = 0 3202 return true 3203 } 3204 // match: (GreaterThan (FlagLT_UGT)) 3205 // cond: 3206 // result: (MOVDconst [0]) 3207 for { 3208 v_0 := v.Args[0] 3209 if v_0.Op != OpARM64FlagLT_UGT { 3210 break 3211 } 3212 v.reset(OpARM64MOVDconst) 3213 v.AuxInt = 0 3214 return true 3215 } 3216 // match: (GreaterThan (FlagGT_ULT)) 3217 // cond: 3218 // result: (MOVDconst [1]) 3219 for { 3220 v_0 := v.Args[0] 3221 if v_0.Op != OpARM64FlagGT_ULT { 3222 break 3223 } 3224 v.reset(OpARM64MOVDconst) 3225 v.AuxInt = 1 3226 return true 3227 } 3228 // match: (GreaterThan (FlagGT_UGT)) 3229 // cond: 3230 // result: (MOVDconst [1]) 3231 for { 3232 v_0 := v.Args[0] 3233 if v_0.Op != OpARM64FlagGT_UGT { 3234 break 3235 } 3236 v.reset(OpARM64MOVDconst) 3237 v.AuxInt = 1 3238 return true 3239 } 3240 // match: (GreaterThan (InvertFlags x)) 3241 // cond: 3242 // result: (LessThan x) 3243 for { 3244 v_0 := v.Args[0] 3245 if v_0.Op != OpARM64InvertFlags { 3246 break 3247 } 3248 x := v_0.Args[0] 3249 v.reset(OpARM64LessThan) 3250 v.AddArg(x) 3251 return true 3252 } 3253 return false 3254 } 3255 func rewriteValueARM64_OpARM64GreaterThanU_0(v *Value) bool { 3256 // match: (GreaterThanU (FlagEQ)) 3257 // cond: 3258 // result: (MOVDconst [0]) 3259 for { 3260 v_0 := v.Args[0] 3261 if v_0.Op != OpARM64FlagEQ { 3262 break 3263 } 3264 v.reset(OpARM64MOVDconst) 3265 v.AuxInt = 0 3266 return true 3267 } 3268 // match: (GreaterThanU (FlagLT_ULT)) 3269 // cond: 3270 // result: (MOVDconst [0]) 3271 for { 3272 v_0 := v.Args[0] 3273 if v_0.Op != OpARM64FlagLT_ULT { 3274 break 3275 } 3276 v.reset(OpARM64MOVDconst) 3277 v.AuxInt = 0 3278 return true 3279 } 3280 // match: (GreaterThanU (FlagLT_UGT)) 3281 // cond: 3282 // result: (MOVDconst [1]) 3283 for { 3284 v_0 := v.Args[0] 3285 if v_0.Op != OpARM64FlagLT_UGT { 3286 break 3287 } 3288 v.reset(OpARM64MOVDconst) 3289 v.AuxInt = 1 3290 return true 3291 } 3292 // match: (GreaterThanU (FlagGT_ULT)) 3293 // cond: 3294 // result: (MOVDconst [0]) 3295 for { 3296 v_0 := v.Args[0] 3297 if v_0.Op != OpARM64FlagGT_ULT { 3298 break 3299 } 3300 v.reset(OpARM64MOVDconst) 3301 v.AuxInt = 0 3302 return true 3303 } 3304 // match: (GreaterThanU (FlagGT_UGT)) 3305 // cond: 3306 // result: (MOVDconst [1]) 3307 for { 3308 v_0 := v.Args[0] 3309 if v_0.Op != OpARM64FlagGT_UGT { 3310 break 3311 } 3312 v.reset(OpARM64MOVDconst) 3313 v.AuxInt = 1 3314 return true 3315 } 3316 // match: (GreaterThanU (InvertFlags x)) 3317 // cond: 3318 // result: (LessThanU x) 3319 for { 3320 v_0 := v.Args[0] 3321 if v_0.Op != OpARM64InvertFlags { 3322 break 3323 } 3324 x := v_0.Args[0] 3325 v.reset(OpARM64LessThanU) 3326 v.AddArg(x) 3327 return true 3328 } 3329 return false 3330 } 3331 func rewriteValueARM64_OpARM64LessEqual_0(v *Value) bool { 3332 // match: (LessEqual (FlagEQ)) 3333 // cond: 3334 // result: (MOVDconst [1]) 3335 for { 3336 v_0 := v.Args[0] 3337 if v_0.Op != OpARM64FlagEQ { 3338 break 3339 } 3340 v.reset(OpARM64MOVDconst) 3341 v.AuxInt = 1 3342 return true 3343 } 3344 // match: (LessEqual (FlagLT_ULT)) 3345 // cond: 3346 // result: (MOVDconst [1]) 3347 for { 3348 v_0 := v.Args[0] 3349 if v_0.Op != OpARM64FlagLT_ULT { 3350 break 3351 } 3352 v.reset(OpARM64MOVDconst) 3353 v.AuxInt = 1 3354 return true 3355 } 3356 // match: (LessEqual (FlagLT_UGT)) 3357 // cond: 3358 // result: (MOVDconst [1]) 3359 for { 3360 v_0 := v.Args[0] 3361 if v_0.Op != OpARM64FlagLT_UGT { 3362 break 3363 } 3364 v.reset(OpARM64MOVDconst) 3365 v.AuxInt = 1 3366 return true 3367 } 3368 // match: (LessEqual (FlagGT_ULT)) 3369 // cond: 3370 // result: (MOVDconst [0]) 3371 for { 3372 v_0 := v.Args[0] 3373 if v_0.Op != OpARM64FlagGT_ULT { 3374 break 3375 } 3376 v.reset(OpARM64MOVDconst) 3377 v.AuxInt = 0 3378 return true 3379 } 3380 // match: (LessEqual (FlagGT_UGT)) 3381 // cond: 3382 // result: (MOVDconst [0]) 3383 for { 3384 v_0 := v.Args[0] 3385 if v_0.Op != OpARM64FlagGT_UGT { 3386 break 3387 } 3388 v.reset(OpARM64MOVDconst) 3389 v.AuxInt = 0 3390 return true 3391 } 3392 // match: (LessEqual (InvertFlags x)) 3393 // cond: 3394 // result: (GreaterEqual x) 3395 for { 3396 v_0 := v.Args[0] 3397 if v_0.Op != OpARM64InvertFlags { 3398 break 3399 } 3400 x := v_0.Args[0] 3401 v.reset(OpARM64GreaterEqual) 3402 v.AddArg(x) 3403 return true 3404 } 3405 return false 3406 } 3407 func rewriteValueARM64_OpARM64LessEqualU_0(v *Value) bool { 3408 // match: (LessEqualU (FlagEQ)) 3409 // cond: 3410 // result: (MOVDconst [1]) 3411 for { 3412 v_0 := v.Args[0] 3413 if v_0.Op != OpARM64FlagEQ { 3414 break 3415 } 3416 v.reset(OpARM64MOVDconst) 3417 v.AuxInt = 1 3418 return true 3419 } 3420 // match: (LessEqualU (FlagLT_ULT)) 3421 // cond: 3422 // result: (MOVDconst [1]) 3423 for { 3424 v_0 := v.Args[0] 3425 if v_0.Op != OpARM64FlagLT_ULT { 3426 break 3427 } 3428 v.reset(OpARM64MOVDconst) 3429 v.AuxInt = 1 3430 return true 3431 } 3432 // match: (LessEqualU (FlagLT_UGT)) 3433 // cond: 3434 // result: (MOVDconst [0]) 3435 for { 3436 v_0 := v.Args[0] 3437 if v_0.Op != OpARM64FlagLT_UGT { 3438 break 3439 } 3440 v.reset(OpARM64MOVDconst) 3441 v.AuxInt = 0 3442 return true 3443 } 3444 // match: (LessEqualU (FlagGT_ULT)) 3445 // cond: 3446 // result: (MOVDconst [1]) 3447 for { 3448 v_0 := v.Args[0] 3449 if v_0.Op != OpARM64FlagGT_ULT { 3450 break 3451 } 3452 v.reset(OpARM64MOVDconst) 3453 v.AuxInt = 1 3454 return true 3455 } 3456 // match: (LessEqualU (FlagGT_UGT)) 3457 // cond: 3458 // result: (MOVDconst [0]) 3459 for { 3460 v_0 := v.Args[0] 3461 if v_0.Op != OpARM64FlagGT_UGT { 3462 break 3463 } 3464 v.reset(OpARM64MOVDconst) 3465 v.AuxInt = 0 3466 return true 3467 } 3468 // match: (LessEqualU (InvertFlags x)) 3469 // cond: 3470 // result: (GreaterEqualU x) 3471 for { 3472 v_0 := v.Args[0] 3473 if v_0.Op != OpARM64InvertFlags { 3474 break 3475 } 3476 x := v_0.Args[0] 3477 v.reset(OpARM64GreaterEqualU) 3478 v.AddArg(x) 3479 return true 3480 } 3481 return false 3482 } 3483 func rewriteValueARM64_OpARM64LessThan_0(v *Value) bool { 3484 // match: (LessThan (FlagEQ)) 3485 // cond: 3486 // result: (MOVDconst [0]) 3487 for { 3488 v_0 := v.Args[0] 3489 if v_0.Op != OpARM64FlagEQ { 3490 break 3491 } 3492 v.reset(OpARM64MOVDconst) 3493 v.AuxInt = 0 3494 return true 3495 } 3496 // match: (LessThan (FlagLT_ULT)) 3497 // cond: 3498 // result: (MOVDconst [1]) 3499 for { 3500 v_0 := v.Args[0] 3501 if v_0.Op != OpARM64FlagLT_ULT { 3502 break 3503 } 3504 v.reset(OpARM64MOVDconst) 3505 v.AuxInt = 1 3506 return true 3507 } 3508 // match: (LessThan (FlagLT_UGT)) 3509 // cond: 3510 // result: (MOVDconst [1]) 3511 for { 3512 v_0 := v.Args[0] 3513 if v_0.Op != OpARM64FlagLT_UGT { 3514 break 3515 } 3516 v.reset(OpARM64MOVDconst) 3517 v.AuxInt = 1 3518 return true 3519 } 3520 // match: (LessThan (FlagGT_ULT)) 3521 // cond: 3522 // result: (MOVDconst [0]) 3523 for { 3524 v_0 := v.Args[0] 3525 if v_0.Op != OpARM64FlagGT_ULT { 3526 break 3527 } 3528 v.reset(OpARM64MOVDconst) 3529 v.AuxInt = 0 3530 return true 3531 } 3532 // match: (LessThan (FlagGT_UGT)) 3533 // cond: 3534 // result: (MOVDconst [0]) 3535 for { 3536 v_0 := v.Args[0] 3537 if v_0.Op != OpARM64FlagGT_UGT { 3538 break 3539 } 3540 v.reset(OpARM64MOVDconst) 3541 v.AuxInt = 0 3542 return true 3543 } 3544 // match: (LessThan (InvertFlags x)) 3545 // cond: 3546 // result: (GreaterThan x) 3547 for { 3548 v_0 := v.Args[0] 3549 if v_0.Op != OpARM64InvertFlags { 3550 break 3551 } 3552 x := v_0.Args[0] 3553 v.reset(OpARM64GreaterThan) 3554 v.AddArg(x) 3555 return true 3556 } 3557 return false 3558 } 3559 func rewriteValueARM64_OpARM64LessThanU_0(v *Value) bool { 3560 // match: (LessThanU (FlagEQ)) 3561 // cond: 3562 // result: (MOVDconst [0]) 3563 for { 3564 v_0 := v.Args[0] 3565 if v_0.Op != OpARM64FlagEQ { 3566 break 3567 } 3568 v.reset(OpARM64MOVDconst) 3569 v.AuxInt = 0 3570 return true 3571 } 3572 // match: (LessThanU (FlagLT_ULT)) 3573 // cond: 3574 // result: (MOVDconst [1]) 3575 for { 3576 v_0 := v.Args[0] 3577 if v_0.Op != OpARM64FlagLT_ULT { 3578 break 3579 } 3580 v.reset(OpARM64MOVDconst) 3581 v.AuxInt = 1 3582 return true 3583 } 3584 // match: (LessThanU (FlagLT_UGT)) 3585 // cond: 3586 // result: (MOVDconst [0]) 3587 for { 3588 v_0 := v.Args[0] 3589 if v_0.Op != OpARM64FlagLT_UGT { 3590 break 3591 } 3592 v.reset(OpARM64MOVDconst) 3593 v.AuxInt = 0 3594 return true 3595 } 3596 // match: (LessThanU (FlagGT_ULT)) 3597 // cond: 3598 // result: (MOVDconst [1]) 3599 for { 3600 v_0 := v.Args[0] 3601 if v_0.Op != OpARM64FlagGT_ULT { 3602 break 3603 } 3604 v.reset(OpARM64MOVDconst) 3605 v.AuxInt = 1 3606 return true 3607 } 3608 // match: (LessThanU (FlagGT_UGT)) 3609 // cond: 3610 // result: (MOVDconst [0]) 3611 for { 3612 v_0 := v.Args[0] 3613 if v_0.Op != OpARM64FlagGT_UGT { 3614 break 3615 } 3616 v.reset(OpARM64MOVDconst) 3617 v.AuxInt = 0 3618 return true 3619 } 3620 // match: (LessThanU (InvertFlags x)) 3621 // cond: 3622 // result: (GreaterThanU x) 3623 for { 3624 v_0 := v.Args[0] 3625 if v_0.Op != OpARM64InvertFlags { 3626 break 3627 } 3628 x := v_0.Args[0] 3629 v.reset(OpARM64GreaterThanU) 3630 v.AddArg(x) 3631 return true 3632 } 3633 return false 3634 } 3635 func rewriteValueARM64_OpARM64MOD_0(v *Value) bool { 3636 // match: (MOD (MOVDconst [c]) (MOVDconst [d])) 3637 // cond: 3638 // result: (MOVDconst [int64(c)%int64(d)]) 3639 for { 3640 _ = v.Args[1] 3641 v_0 := v.Args[0] 3642 if v_0.Op != OpARM64MOVDconst { 3643 break 3644 } 3645 c := v_0.AuxInt 3646 v_1 := v.Args[1] 3647 if v_1.Op != OpARM64MOVDconst { 3648 break 3649 } 3650 d := v_1.AuxInt 3651 v.reset(OpARM64MOVDconst) 3652 v.AuxInt = int64(c) % int64(d) 3653 return true 3654 } 3655 return false 3656 } 3657 func rewriteValueARM64_OpARM64MODW_0(v *Value) bool { 3658 // match: (MODW (MOVDconst [c]) (MOVDconst [d])) 3659 // cond: 3660 // result: (MOVDconst [int64(int32(c)%int32(d))]) 3661 for { 3662 _ = v.Args[1] 3663 v_0 := v.Args[0] 3664 if v_0.Op != OpARM64MOVDconst { 3665 break 3666 } 3667 c := v_0.AuxInt 3668 v_1 := v.Args[1] 3669 if v_1.Op != OpARM64MOVDconst { 3670 break 3671 } 3672 d := v_1.AuxInt 3673 v.reset(OpARM64MOVDconst) 3674 v.AuxInt = int64(int32(c) % int32(d)) 3675 return true 3676 } 3677 return false 3678 } 3679 func rewriteValueARM64_OpARM64MOVBUload_0(v *Value) bool { 3680 b := v.Block 3681 _ = b 3682 config := b.Func.Config 3683 _ = config 3684 // match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem) 3685 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3686 // result: (MOVBUload [off1+off2] {sym} ptr mem) 3687 for { 3688 off1 := v.AuxInt 3689 sym := v.Aux 3690 _ = v.Args[1] 3691 v_0 := v.Args[0] 3692 if v_0.Op != OpARM64ADDconst { 3693 break 3694 } 3695 off2 := v_0.AuxInt 3696 ptr := v_0.Args[0] 3697 mem := v.Args[1] 3698 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3699 break 3700 } 3701 v.reset(OpARM64MOVBUload) 3702 v.AuxInt = off1 + off2 3703 v.Aux = sym 3704 v.AddArg(ptr) 3705 v.AddArg(mem) 3706 return true 3707 } 3708 // match: (MOVBUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 3709 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3710 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3711 for { 3712 off1 := v.AuxInt 3713 sym1 := v.Aux 3714 _ = v.Args[1] 3715 v_0 := v.Args[0] 3716 if v_0.Op != OpARM64MOVDaddr { 3717 break 3718 } 3719 off2 := v_0.AuxInt 3720 sym2 := v_0.Aux 3721 ptr := v_0.Args[0] 3722 mem := v.Args[1] 3723 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3724 break 3725 } 3726 v.reset(OpARM64MOVBUload) 3727 v.AuxInt = off1 + off2 3728 v.Aux = mergeSym(sym1, sym2) 3729 v.AddArg(ptr) 3730 v.AddArg(mem) 3731 return true 3732 } 3733 // match: (MOVBUload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 3734 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 3735 // result: (MOVDconst [0]) 3736 for { 3737 off := v.AuxInt 3738 sym := v.Aux 3739 _ = v.Args[1] 3740 ptr := v.Args[0] 3741 v_1 := v.Args[1] 3742 if v_1.Op != OpARM64MOVBstorezero { 3743 break 3744 } 3745 off2 := v_1.AuxInt 3746 sym2 := v_1.Aux 3747 _ = v_1.Args[1] 3748 ptr2 := v_1.Args[0] 3749 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 3750 break 3751 } 3752 v.reset(OpARM64MOVDconst) 3753 v.AuxInt = 0 3754 return true 3755 } 3756 return false 3757 } 3758 func rewriteValueARM64_OpARM64MOVBUreg_0(v *Value) bool { 3759 // match: (MOVBUreg x:(MOVBUload _ _)) 3760 // cond: 3761 // result: (MOVDreg x) 3762 for { 3763 x := v.Args[0] 3764 if x.Op != OpARM64MOVBUload { 3765 break 3766 } 3767 _ = x.Args[1] 3768 v.reset(OpARM64MOVDreg) 3769 v.AddArg(x) 3770 return true 3771 } 3772 // match: (MOVBUreg x:(MOVBUreg _)) 3773 // cond: 3774 // result: (MOVDreg x) 3775 for { 3776 x := v.Args[0] 3777 if x.Op != OpARM64MOVBUreg { 3778 break 3779 } 3780 v.reset(OpARM64MOVDreg) 3781 v.AddArg(x) 3782 return true 3783 } 3784 // match: (MOVBUreg (MOVDconst [c])) 3785 // cond: 3786 // result: (MOVDconst [int64(uint8(c))]) 3787 for { 3788 v_0 := v.Args[0] 3789 if v_0.Op != OpARM64MOVDconst { 3790 break 3791 } 3792 c := v_0.AuxInt 3793 v.reset(OpARM64MOVDconst) 3794 v.AuxInt = int64(uint8(c)) 3795 return true 3796 } 3797 // match: (MOVBUreg x) 3798 // cond: x.Type.IsBoolean() 3799 // result: (MOVDreg x) 3800 for { 3801 x := v.Args[0] 3802 if !(x.Type.IsBoolean()) { 3803 break 3804 } 3805 v.reset(OpARM64MOVDreg) 3806 v.AddArg(x) 3807 return true 3808 } 3809 return false 3810 } 3811 func rewriteValueARM64_OpARM64MOVBload_0(v *Value) bool { 3812 b := v.Block 3813 _ = b 3814 config := b.Func.Config 3815 _ = config 3816 // match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem) 3817 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3818 // result: (MOVBload [off1+off2] {sym} ptr mem) 3819 for { 3820 off1 := v.AuxInt 3821 sym := v.Aux 3822 _ = v.Args[1] 3823 v_0 := v.Args[0] 3824 if v_0.Op != OpARM64ADDconst { 3825 break 3826 } 3827 off2 := v_0.AuxInt 3828 ptr := v_0.Args[0] 3829 mem := v.Args[1] 3830 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3831 break 3832 } 3833 v.reset(OpARM64MOVBload) 3834 v.AuxInt = off1 + off2 3835 v.Aux = sym 3836 v.AddArg(ptr) 3837 v.AddArg(mem) 3838 return true 3839 } 3840 // match: (MOVBload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 3841 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3842 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3843 for { 3844 off1 := v.AuxInt 3845 sym1 := v.Aux 3846 _ = v.Args[1] 3847 v_0 := v.Args[0] 3848 if v_0.Op != OpARM64MOVDaddr { 3849 break 3850 } 3851 off2 := v_0.AuxInt 3852 sym2 := v_0.Aux 3853 ptr := v_0.Args[0] 3854 mem := v.Args[1] 3855 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3856 break 3857 } 3858 v.reset(OpARM64MOVBload) 3859 v.AuxInt = off1 + off2 3860 v.Aux = mergeSym(sym1, sym2) 3861 v.AddArg(ptr) 3862 v.AddArg(mem) 3863 return true 3864 } 3865 // match: (MOVBload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 3866 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 3867 // result: (MOVDconst [0]) 3868 for { 3869 off := v.AuxInt 3870 sym := v.Aux 3871 _ = v.Args[1] 3872 ptr := v.Args[0] 3873 v_1 := v.Args[1] 3874 if v_1.Op != OpARM64MOVBstorezero { 3875 break 3876 } 3877 off2 := v_1.AuxInt 3878 sym2 := v_1.Aux 3879 _ = v_1.Args[1] 3880 ptr2 := v_1.Args[0] 3881 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 3882 break 3883 } 3884 v.reset(OpARM64MOVDconst) 3885 v.AuxInt = 0 3886 return true 3887 } 3888 return false 3889 } 3890 func rewriteValueARM64_OpARM64MOVBreg_0(v *Value) bool { 3891 // match: (MOVBreg x:(MOVBload _ _)) 3892 // cond: 3893 // result: (MOVDreg x) 3894 for { 3895 x := v.Args[0] 3896 if x.Op != OpARM64MOVBload { 3897 break 3898 } 3899 _ = x.Args[1] 3900 v.reset(OpARM64MOVDreg) 3901 v.AddArg(x) 3902 return true 3903 } 3904 // match: (MOVBreg x:(MOVBreg _)) 3905 // cond: 3906 // result: (MOVDreg x) 3907 for { 3908 x := v.Args[0] 3909 if x.Op != OpARM64MOVBreg { 3910 break 3911 } 3912 v.reset(OpARM64MOVDreg) 3913 v.AddArg(x) 3914 return true 3915 } 3916 // match: (MOVBreg (MOVDconst [c])) 3917 // cond: 3918 // result: (MOVDconst [int64(int8(c))]) 3919 for { 3920 v_0 := v.Args[0] 3921 if v_0.Op != OpARM64MOVDconst { 3922 break 3923 } 3924 c := v_0.AuxInt 3925 v.reset(OpARM64MOVDconst) 3926 v.AuxInt = int64(int8(c)) 3927 return true 3928 } 3929 return false 3930 } 3931 func rewriteValueARM64_OpARM64MOVBstore_0(v *Value) bool { 3932 b := v.Block 3933 _ = b 3934 config := b.Func.Config 3935 _ = config 3936 // match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem) 3937 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3938 // result: (MOVBstore [off1+off2] {sym} ptr val mem) 3939 for { 3940 off1 := v.AuxInt 3941 sym := v.Aux 3942 _ = v.Args[2] 3943 v_0 := v.Args[0] 3944 if v_0.Op != OpARM64ADDconst { 3945 break 3946 } 3947 off2 := v_0.AuxInt 3948 ptr := v_0.Args[0] 3949 val := v.Args[1] 3950 mem := v.Args[2] 3951 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3952 break 3953 } 3954 v.reset(OpARM64MOVBstore) 3955 v.AuxInt = off1 + off2 3956 v.Aux = sym 3957 v.AddArg(ptr) 3958 v.AddArg(val) 3959 v.AddArg(mem) 3960 return true 3961 } 3962 // match: (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 3963 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 3964 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 3965 for { 3966 off1 := v.AuxInt 3967 sym1 := v.Aux 3968 _ = v.Args[2] 3969 v_0 := v.Args[0] 3970 if v_0.Op != OpARM64MOVDaddr { 3971 break 3972 } 3973 off2 := v_0.AuxInt 3974 sym2 := v_0.Aux 3975 ptr := v_0.Args[0] 3976 val := v.Args[1] 3977 mem := v.Args[2] 3978 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 3979 break 3980 } 3981 v.reset(OpARM64MOVBstore) 3982 v.AuxInt = off1 + off2 3983 v.Aux = mergeSym(sym1, sym2) 3984 v.AddArg(ptr) 3985 v.AddArg(val) 3986 v.AddArg(mem) 3987 return true 3988 } 3989 // match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem) 3990 // cond: 3991 // result: (MOVBstorezero [off] {sym} ptr mem) 3992 for { 3993 off := v.AuxInt 3994 sym := v.Aux 3995 _ = v.Args[2] 3996 ptr := v.Args[0] 3997 v_1 := v.Args[1] 3998 if v_1.Op != OpARM64MOVDconst { 3999 break 4000 } 4001 if v_1.AuxInt != 0 { 4002 break 4003 } 4004 mem := v.Args[2] 4005 v.reset(OpARM64MOVBstorezero) 4006 v.AuxInt = off 4007 v.Aux = sym 4008 v.AddArg(ptr) 4009 v.AddArg(mem) 4010 return true 4011 } 4012 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem) 4013 // cond: 4014 // result: (MOVBstore [off] {sym} ptr x mem) 4015 for { 4016 off := v.AuxInt 4017 sym := v.Aux 4018 _ = v.Args[2] 4019 ptr := v.Args[0] 4020 v_1 := v.Args[1] 4021 if v_1.Op != OpARM64MOVBreg { 4022 break 4023 } 4024 x := v_1.Args[0] 4025 mem := v.Args[2] 4026 v.reset(OpARM64MOVBstore) 4027 v.AuxInt = off 4028 v.Aux = sym 4029 v.AddArg(ptr) 4030 v.AddArg(x) 4031 v.AddArg(mem) 4032 return true 4033 } 4034 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem) 4035 // cond: 4036 // result: (MOVBstore [off] {sym} ptr x mem) 4037 for { 4038 off := v.AuxInt 4039 sym := v.Aux 4040 _ = v.Args[2] 4041 ptr := v.Args[0] 4042 v_1 := v.Args[1] 4043 if v_1.Op != OpARM64MOVBUreg { 4044 break 4045 } 4046 x := v_1.Args[0] 4047 mem := v.Args[2] 4048 v.reset(OpARM64MOVBstore) 4049 v.AuxInt = off 4050 v.Aux = sym 4051 v.AddArg(ptr) 4052 v.AddArg(x) 4053 v.AddArg(mem) 4054 return true 4055 } 4056 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem) 4057 // cond: 4058 // result: (MOVBstore [off] {sym} ptr x mem) 4059 for { 4060 off := v.AuxInt 4061 sym := v.Aux 4062 _ = v.Args[2] 4063 ptr := v.Args[0] 4064 v_1 := v.Args[1] 4065 if v_1.Op != OpARM64MOVHreg { 4066 break 4067 } 4068 x := v_1.Args[0] 4069 mem := v.Args[2] 4070 v.reset(OpARM64MOVBstore) 4071 v.AuxInt = off 4072 v.Aux = sym 4073 v.AddArg(ptr) 4074 v.AddArg(x) 4075 v.AddArg(mem) 4076 return true 4077 } 4078 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem) 4079 // cond: 4080 // result: (MOVBstore [off] {sym} ptr x mem) 4081 for { 4082 off := v.AuxInt 4083 sym := v.Aux 4084 _ = v.Args[2] 4085 ptr := v.Args[0] 4086 v_1 := v.Args[1] 4087 if v_1.Op != OpARM64MOVHUreg { 4088 break 4089 } 4090 x := v_1.Args[0] 4091 mem := v.Args[2] 4092 v.reset(OpARM64MOVBstore) 4093 v.AuxInt = off 4094 v.Aux = sym 4095 v.AddArg(ptr) 4096 v.AddArg(x) 4097 v.AddArg(mem) 4098 return true 4099 } 4100 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem) 4101 // cond: 4102 // result: (MOVBstore [off] {sym} ptr x mem) 4103 for { 4104 off := v.AuxInt 4105 sym := v.Aux 4106 _ = v.Args[2] 4107 ptr := v.Args[0] 4108 v_1 := v.Args[1] 4109 if v_1.Op != OpARM64MOVWreg { 4110 break 4111 } 4112 x := v_1.Args[0] 4113 mem := v.Args[2] 4114 v.reset(OpARM64MOVBstore) 4115 v.AuxInt = off 4116 v.Aux = sym 4117 v.AddArg(ptr) 4118 v.AddArg(x) 4119 v.AddArg(mem) 4120 return true 4121 } 4122 // match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem) 4123 // cond: 4124 // result: (MOVBstore [off] {sym} ptr x mem) 4125 for { 4126 off := v.AuxInt 4127 sym := v.Aux 4128 _ = v.Args[2] 4129 ptr := v.Args[0] 4130 v_1 := v.Args[1] 4131 if v_1.Op != OpARM64MOVWUreg { 4132 break 4133 } 4134 x := v_1.Args[0] 4135 mem := v.Args[2] 4136 v.reset(OpARM64MOVBstore) 4137 v.AuxInt = off 4138 v.Aux = sym 4139 v.AddArg(ptr) 4140 v.AddArg(x) 4141 v.AddArg(mem) 4142 return true 4143 } 4144 return false 4145 } 4146 func rewriteValueARM64_OpARM64MOVBstorezero_0(v *Value) bool { 4147 b := v.Block 4148 _ = b 4149 config := b.Func.Config 4150 _ = config 4151 // match: (MOVBstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 4152 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4153 // result: (MOVBstorezero [off1+off2] {sym} ptr mem) 4154 for { 4155 off1 := v.AuxInt 4156 sym := v.Aux 4157 _ = v.Args[1] 4158 v_0 := v.Args[0] 4159 if v_0.Op != OpARM64ADDconst { 4160 break 4161 } 4162 off2 := v_0.AuxInt 4163 ptr := v_0.Args[0] 4164 mem := v.Args[1] 4165 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4166 break 4167 } 4168 v.reset(OpARM64MOVBstorezero) 4169 v.AuxInt = off1 + off2 4170 v.Aux = sym 4171 v.AddArg(ptr) 4172 v.AddArg(mem) 4173 return true 4174 } 4175 // match: (MOVBstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4176 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4177 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4178 for { 4179 off1 := v.AuxInt 4180 sym1 := v.Aux 4181 _ = v.Args[1] 4182 v_0 := v.Args[0] 4183 if v_0.Op != OpARM64MOVDaddr { 4184 break 4185 } 4186 off2 := v_0.AuxInt 4187 sym2 := v_0.Aux 4188 ptr := v_0.Args[0] 4189 mem := v.Args[1] 4190 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4191 break 4192 } 4193 v.reset(OpARM64MOVBstorezero) 4194 v.AuxInt = off1 + off2 4195 v.Aux = mergeSym(sym1, sym2) 4196 v.AddArg(ptr) 4197 v.AddArg(mem) 4198 return true 4199 } 4200 return false 4201 } 4202 func rewriteValueARM64_OpARM64MOVDload_0(v *Value) bool { 4203 b := v.Block 4204 _ = b 4205 config := b.Func.Config 4206 _ = config 4207 // match: (MOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 4208 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4209 // result: (MOVDload [off1+off2] {sym} ptr mem) 4210 for { 4211 off1 := v.AuxInt 4212 sym := v.Aux 4213 _ = v.Args[1] 4214 v_0 := v.Args[0] 4215 if v_0.Op != OpARM64ADDconst { 4216 break 4217 } 4218 off2 := v_0.AuxInt 4219 ptr := v_0.Args[0] 4220 mem := v.Args[1] 4221 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4222 break 4223 } 4224 v.reset(OpARM64MOVDload) 4225 v.AuxInt = off1 + off2 4226 v.Aux = sym 4227 v.AddArg(ptr) 4228 v.AddArg(mem) 4229 return true 4230 } 4231 // match: (MOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4232 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4233 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4234 for { 4235 off1 := v.AuxInt 4236 sym1 := v.Aux 4237 _ = v.Args[1] 4238 v_0 := v.Args[0] 4239 if v_0.Op != OpARM64MOVDaddr { 4240 break 4241 } 4242 off2 := v_0.AuxInt 4243 sym2 := v_0.Aux 4244 ptr := v_0.Args[0] 4245 mem := v.Args[1] 4246 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4247 break 4248 } 4249 v.reset(OpARM64MOVDload) 4250 v.AuxInt = off1 + off2 4251 v.Aux = mergeSym(sym1, sym2) 4252 v.AddArg(ptr) 4253 v.AddArg(mem) 4254 return true 4255 } 4256 // match: (MOVDload [off] {sym} ptr (MOVDstorezero [off2] {sym2} ptr2 _)) 4257 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4258 // result: (MOVDconst [0]) 4259 for { 4260 off := v.AuxInt 4261 sym := v.Aux 4262 _ = v.Args[1] 4263 ptr := v.Args[0] 4264 v_1 := v.Args[1] 4265 if v_1.Op != OpARM64MOVDstorezero { 4266 break 4267 } 4268 off2 := v_1.AuxInt 4269 sym2 := v_1.Aux 4270 _ = v_1.Args[1] 4271 ptr2 := v_1.Args[0] 4272 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4273 break 4274 } 4275 v.reset(OpARM64MOVDconst) 4276 v.AuxInt = 0 4277 return true 4278 } 4279 return false 4280 } 4281 func rewriteValueARM64_OpARM64MOVDreg_0(v *Value) bool { 4282 // match: (MOVDreg x) 4283 // cond: x.Uses == 1 4284 // result: (MOVDnop x) 4285 for { 4286 x := v.Args[0] 4287 if !(x.Uses == 1) { 4288 break 4289 } 4290 v.reset(OpARM64MOVDnop) 4291 v.AddArg(x) 4292 return true 4293 } 4294 // match: (MOVDreg (MOVDconst [c])) 4295 // cond: 4296 // result: (MOVDconst [c]) 4297 for { 4298 v_0 := v.Args[0] 4299 if v_0.Op != OpARM64MOVDconst { 4300 break 4301 } 4302 c := v_0.AuxInt 4303 v.reset(OpARM64MOVDconst) 4304 v.AuxInt = c 4305 return true 4306 } 4307 return false 4308 } 4309 func rewriteValueARM64_OpARM64MOVDstore_0(v *Value) bool { 4310 b := v.Block 4311 _ = b 4312 config := b.Func.Config 4313 _ = config 4314 // match: (MOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 4315 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4316 // result: (MOVDstore [off1+off2] {sym} ptr val mem) 4317 for { 4318 off1 := v.AuxInt 4319 sym := v.Aux 4320 _ = v.Args[2] 4321 v_0 := v.Args[0] 4322 if v_0.Op != OpARM64ADDconst { 4323 break 4324 } 4325 off2 := v_0.AuxInt 4326 ptr := v_0.Args[0] 4327 val := v.Args[1] 4328 mem := v.Args[2] 4329 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4330 break 4331 } 4332 v.reset(OpARM64MOVDstore) 4333 v.AuxInt = off1 + off2 4334 v.Aux = sym 4335 v.AddArg(ptr) 4336 v.AddArg(val) 4337 v.AddArg(mem) 4338 return true 4339 } 4340 // match: (MOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 4341 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4342 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4343 for { 4344 off1 := v.AuxInt 4345 sym1 := v.Aux 4346 _ = v.Args[2] 4347 v_0 := v.Args[0] 4348 if v_0.Op != OpARM64MOVDaddr { 4349 break 4350 } 4351 off2 := v_0.AuxInt 4352 sym2 := v_0.Aux 4353 ptr := v_0.Args[0] 4354 val := v.Args[1] 4355 mem := v.Args[2] 4356 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4357 break 4358 } 4359 v.reset(OpARM64MOVDstore) 4360 v.AuxInt = off1 + off2 4361 v.Aux = mergeSym(sym1, sym2) 4362 v.AddArg(ptr) 4363 v.AddArg(val) 4364 v.AddArg(mem) 4365 return true 4366 } 4367 // match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem) 4368 // cond: 4369 // result: (MOVDstorezero [off] {sym} ptr mem) 4370 for { 4371 off := v.AuxInt 4372 sym := v.Aux 4373 _ = v.Args[2] 4374 ptr := v.Args[0] 4375 v_1 := v.Args[1] 4376 if v_1.Op != OpARM64MOVDconst { 4377 break 4378 } 4379 if v_1.AuxInt != 0 { 4380 break 4381 } 4382 mem := v.Args[2] 4383 v.reset(OpARM64MOVDstorezero) 4384 v.AuxInt = off 4385 v.Aux = sym 4386 v.AddArg(ptr) 4387 v.AddArg(mem) 4388 return true 4389 } 4390 return false 4391 } 4392 func rewriteValueARM64_OpARM64MOVDstorezero_0(v *Value) bool { 4393 b := v.Block 4394 _ = b 4395 config := b.Func.Config 4396 _ = config 4397 // match: (MOVDstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 4398 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4399 // result: (MOVDstorezero [off1+off2] {sym} ptr mem) 4400 for { 4401 off1 := v.AuxInt 4402 sym := v.Aux 4403 _ = v.Args[1] 4404 v_0 := v.Args[0] 4405 if v_0.Op != OpARM64ADDconst { 4406 break 4407 } 4408 off2 := v_0.AuxInt 4409 ptr := v_0.Args[0] 4410 mem := v.Args[1] 4411 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4412 break 4413 } 4414 v.reset(OpARM64MOVDstorezero) 4415 v.AuxInt = off1 + off2 4416 v.Aux = sym 4417 v.AddArg(ptr) 4418 v.AddArg(mem) 4419 return true 4420 } 4421 // match: (MOVDstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4422 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4423 // result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4424 for { 4425 off1 := v.AuxInt 4426 sym1 := v.Aux 4427 _ = v.Args[1] 4428 v_0 := v.Args[0] 4429 if v_0.Op != OpARM64MOVDaddr { 4430 break 4431 } 4432 off2 := v_0.AuxInt 4433 sym2 := v_0.Aux 4434 ptr := v_0.Args[0] 4435 mem := v.Args[1] 4436 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4437 break 4438 } 4439 v.reset(OpARM64MOVDstorezero) 4440 v.AuxInt = off1 + off2 4441 v.Aux = mergeSym(sym1, sym2) 4442 v.AddArg(ptr) 4443 v.AddArg(mem) 4444 return true 4445 } 4446 return false 4447 } 4448 func rewriteValueARM64_OpARM64MOVHUload_0(v *Value) bool { 4449 b := v.Block 4450 _ = b 4451 config := b.Func.Config 4452 _ = config 4453 // match: (MOVHUload [off1] {sym} (ADDconst [off2] ptr) mem) 4454 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4455 // result: (MOVHUload [off1+off2] {sym} ptr mem) 4456 for { 4457 off1 := v.AuxInt 4458 sym := v.Aux 4459 _ = v.Args[1] 4460 v_0 := v.Args[0] 4461 if v_0.Op != OpARM64ADDconst { 4462 break 4463 } 4464 off2 := v_0.AuxInt 4465 ptr := v_0.Args[0] 4466 mem := v.Args[1] 4467 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4468 break 4469 } 4470 v.reset(OpARM64MOVHUload) 4471 v.AuxInt = off1 + off2 4472 v.Aux = sym 4473 v.AddArg(ptr) 4474 v.AddArg(mem) 4475 return true 4476 } 4477 // match: (MOVHUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4478 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4479 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4480 for { 4481 off1 := v.AuxInt 4482 sym1 := v.Aux 4483 _ = v.Args[1] 4484 v_0 := v.Args[0] 4485 if v_0.Op != OpARM64MOVDaddr { 4486 break 4487 } 4488 off2 := v_0.AuxInt 4489 sym2 := v_0.Aux 4490 ptr := v_0.Args[0] 4491 mem := v.Args[1] 4492 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4493 break 4494 } 4495 v.reset(OpARM64MOVHUload) 4496 v.AuxInt = off1 + off2 4497 v.Aux = mergeSym(sym1, sym2) 4498 v.AddArg(ptr) 4499 v.AddArg(mem) 4500 return true 4501 } 4502 // match: (MOVHUload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 4503 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4504 // result: (MOVDconst [0]) 4505 for { 4506 off := v.AuxInt 4507 sym := v.Aux 4508 _ = v.Args[1] 4509 ptr := v.Args[0] 4510 v_1 := v.Args[1] 4511 if v_1.Op != OpARM64MOVHstorezero { 4512 break 4513 } 4514 off2 := v_1.AuxInt 4515 sym2 := v_1.Aux 4516 _ = v_1.Args[1] 4517 ptr2 := v_1.Args[0] 4518 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4519 break 4520 } 4521 v.reset(OpARM64MOVDconst) 4522 v.AuxInt = 0 4523 return true 4524 } 4525 return false 4526 } 4527 func rewriteValueARM64_OpARM64MOVHUreg_0(v *Value) bool { 4528 // match: (MOVHUreg x:(MOVBUload _ _)) 4529 // cond: 4530 // result: (MOVDreg x) 4531 for { 4532 x := v.Args[0] 4533 if x.Op != OpARM64MOVBUload { 4534 break 4535 } 4536 _ = x.Args[1] 4537 v.reset(OpARM64MOVDreg) 4538 v.AddArg(x) 4539 return true 4540 } 4541 // match: (MOVHUreg x:(MOVHUload _ _)) 4542 // cond: 4543 // result: (MOVDreg x) 4544 for { 4545 x := v.Args[0] 4546 if x.Op != OpARM64MOVHUload { 4547 break 4548 } 4549 _ = x.Args[1] 4550 v.reset(OpARM64MOVDreg) 4551 v.AddArg(x) 4552 return true 4553 } 4554 // match: (MOVHUreg x:(MOVBUreg _)) 4555 // cond: 4556 // result: (MOVDreg x) 4557 for { 4558 x := v.Args[0] 4559 if x.Op != OpARM64MOVBUreg { 4560 break 4561 } 4562 v.reset(OpARM64MOVDreg) 4563 v.AddArg(x) 4564 return true 4565 } 4566 // match: (MOVHUreg x:(MOVHUreg _)) 4567 // cond: 4568 // result: (MOVDreg x) 4569 for { 4570 x := v.Args[0] 4571 if x.Op != OpARM64MOVHUreg { 4572 break 4573 } 4574 v.reset(OpARM64MOVDreg) 4575 v.AddArg(x) 4576 return true 4577 } 4578 // match: (MOVHUreg (MOVDconst [c])) 4579 // cond: 4580 // result: (MOVDconst [int64(uint16(c))]) 4581 for { 4582 v_0 := v.Args[0] 4583 if v_0.Op != OpARM64MOVDconst { 4584 break 4585 } 4586 c := v_0.AuxInt 4587 v.reset(OpARM64MOVDconst) 4588 v.AuxInt = int64(uint16(c)) 4589 return true 4590 } 4591 return false 4592 } 4593 func rewriteValueARM64_OpARM64MOVHload_0(v *Value) bool { 4594 b := v.Block 4595 _ = b 4596 config := b.Func.Config 4597 _ = config 4598 // match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem) 4599 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4600 // result: (MOVHload [off1+off2] {sym} ptr mem) 4601 for { 4602 off1 := v.AuxInt 4603 sym := v.Aux 4604 _ = v.Args[1] 4605 v_0 := v.Args[0] 4606 if v_0.Op != OpARM64ADDconst { 4607 break 4608 } 4609 off2 := v_0.AuxInt 4610 ptr := v_0.Args[0] 4611 mem := v.Args[1] 4612 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4613 break 4614 } 4615 v.reset(OpARM64MOVHload) 4616 v.AuxInt = off1 + off2 4617 v.Aux = sym 4618 v.AddArg(ptr) 4619 v.AddArg(mem) 4620 return true 4621 } 4622 // match: (MOVHload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4623 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4624 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4625 for { 4626 off1 := v.AuxInt 4627 sym1 := v.Aux 4628 _ = v.Args[1] 4629 v_0 := v.Args[0] 4630 if v_0.Op != OpARM64MOVDaddr { 4631 break 4632 } 4633 off2 := v_0.AuxInt 4634 sym2 := v_0.Aux 4635 ptr := v_0.Args[0] 4636 mem := v.Args[1] 4637 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4638 break 4639 } 4640 v.reset(OpARM64MOVHload) 4641 v.AuxInt = off1 + off2 4642 v.Aux = mergeSym(sym1, sym2) 4643 v.AddArg(ptr) 4644 v.AddArg(mem) 4645 return true 4646 } 4647 // match: (MOVHload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 4648 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4649 // result: (MOVDconst [0]) 4650 for { 4651 off := v.AuxInt 4652 sym := v.Aux 4653 _ = v.Args[1] 4654 ptr := v.Args[0] 4655 v_1 := v.Args[1] 4656 if v_1.Op != OpARM64MOVHstorezero { 4657 break 4658 } 4659 off2 := v_1.AuxInt 4660 sym2 := v_1.Aux 4661 _ = v_1.Args[1] 4662 ptr2 := v_1.Args[0] 4663 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4664 break 4665 } 4666 v.reset(OpARM64MOVDconst) 4667 v.AuxInt = 0 4668 return true 4669 } 4670 return false 4671 } 4672 func rewriteValueARM64_OpARM64MOVHreg_0(v *Value) bool { 4673 // match: (MOVHreg x:(MOVBload _ _)) 4674 // cond: 4675 // result: (MOVDreg x) 4676 for { 4677 x := v.Args[0] 4678 if x.Op != OpARM64MOVBload { 4679 break 4680 } 4681 _ = x.Args[1] 4682 v.reset(OpARM64MOVDreg) 4683 v.AddArg(x) 4684 return true 4685 } 4686 // match: (MOVHreg x:(MOVBUload _ _)) 4687 // cond: 4688 // result: (MOVDreg x) 4689 for { 4690 x := v.Args[0] 4691 if x.Op != OpARM64MOVBUload { 4692 break 4693 } 4694 _ = x.Args[1] 4695 v.reset(OpARM64MOVDreg) 4696 v.AddArg(x) 4697 return true 4698 } 4699 // match: (MOVHreg x:(MOVHload _ _)) 4700 // cond: 4701 // result: (MOVDreg x) 4702 for { 4703 x := v.Args[0] 4704 if x.Op != OpARM64MOVHload { 4705 break 4706 } 4707 _ = x.Args[1] 4708 v.reset(OpARM64MOVDreg) 4709 v.AddArg(x) 4710 return true 4711 } 4712 // match: (MOVHreg x:(MOVBreg _)) 4713 // cond: 4714 // result: (MOVDreg x) 4715 for { 4716 x := v.Args[0] 4717 if x.Op != OpARM64MOVBreg { 4718 break 4719 } 4720 v.reset(OpARM64MOVDreg) 4721 v.AddArg(x) 4722 return true 4723 } 4724 // match: (MOVHreg x:(MOVBUreg _)) 4725 // cond: 4726 // result: (MOVDreg x) 4727 for { 4728 x := v.Args[0] 4729 if x.Op != OpARM64MOVBUreg { 4730 break 4731 } 4732 v.reset(OpARM64MOVDreg) 4733 v.AddArg(x) 4734 return true 4735 } 4736 // match: (MOVHreg x:(MOVHreg _)) 4737 // cond: 4738 // result: (MOVDreg x) 4739 for { 4740 x := v.Args[0] 4741 if x.Op != OpARM64MOVHreg { 4742 break 4743 } 4744 v.reset(OpARM64MOVDreg) 4745 v.AddArg(x) 4746 return true 4747 } 4748 // match: (MOVHreg (MOVDconst [c])) 4749 // cond: 4750 // result: (MOVDconst [int64(int16(c))]) 4751 for { 4752 v_0 := v.Args[0] 4753 if v_0.Op != OpARM64MOVDconst { 4754 break 4755 } 4756 c := v_0.AuxInt 4757 v.reset(OpARM64MOVDconst) 4758 v.AuxInt = int64(int16(c)) 4759 return true 4760 } 4761 return false 4762 } 4763 func rewriteValueARM64_OpARM64MOVHstore_0(v *Value) bool { 4764 b := v.Block 4765 _ = b 4766 config := b.Func.Config 4767 _ = config 4768 // match: (MOVHstore [off1] {sym} (ADDconst [off2] ptr) val mem) 4769 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4770 // result: (MOVHstore [off1+off2] {sym} ptr val mem) 4771 for { 4772 off1 := v.AuxInt 4773 sym := v.Aux 4774 _ = v.Args[2] 4775 v_0 := v.Args[0] 4776 if v_0.Op != OpARM64ADDconst { 4777 break 4778 } 4779 off2 := v_0.AuxInt 4780 ptr := v_0.Args[0] 4781 val := v.Args[1] 4782 mem := v.Args[2] 4783 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4784 break 4785 } 4786 v.reset(OpARM64MOVHstore) 4787 v.AuxInt = off1 + off2 4788 v.Aux = sym 4789 v.AddArg(ptr) 4790 v.AddArg(val) 4791 v.AddArg(mem) 4792 return true 4793 } 4794 // match: (MOVHstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 4795 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4796 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4797 for { 4798 off1 := v.AuxInt 4799 sym1 := v.Aux 4800 _ = v.Args[2] 4801 v_0 := v.Args[0] 4802 if v_0.Op != OpARM64MOVDaddr { 4803 break 4804 } 4805 off2 := v_0.AuxInt 4806 sym2 := v_0.Aux 4807 ptr := v_0.Args[0] 4808 val := v.Args[1] 4809 mem := v.Args[2] 4810 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4811 break 4812 } 4813 v.reset(OpARM64MOVHstore) 4814 v.AuxInt = off1 + off2 4815 v.Aux = mergeSym(sym1, sym2) 4816 v.AddArg(ptr) 4817 v.AddArg(val) 4818 v.AddArg(mem) 4819 return true 4820 } 4821 // match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem) 4822 // cond: 4823 // result: (MOVHstorezero [off] {sym} ptr mem) 4824 for { 4825 off := v.AuxInt 4826 sym := v.Aux 4827 _ = v.Args[2] 4828 ptr := v.Args[0] 4829 v_1 := v.Args[1] 4830 if v_1.Op != OpARM64MOVDconst { 4831 break 4832 } 4833 if v_1.AuxInt != 0 { 4834 break 4835 } 4836 mem := v.Args[2] 4837 v.reset(OpARM64MOVHstorezero) 4838 v.AuxInt = off 4839 v.Aux = sym 4840 v.AddArg(ptr) 4841 v.AddArg(mem) 4842 return true 4843 } 4844 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem) 4845 // cond: 4846 // result: (MOVHstore [off] {sym} ptr x mem) 4847 for { 4848 off := v.AuxInt 4849 sym := v.Aux 4850 _ = v.Args[2] 4851 ptr := v.Args[0] 4852 v_1 := v.Args[1] 4853 if v_1.Op != OpARM64MOVHreg { 4854 break 4855 } 4856 x := v_1.Args[0] 4857 mem := v.Args[2] 4858 v.reset(OpARM64MOVHstore) 4859 v.AuxInt = off 4860 v.Aux = sym 4861 v.AddArg(ptr) 4862 v.AddArg(x) 4863 v.AddArg(mem) 4864 return true 4865 } 4866 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem) 4867 // cond: 4868 // result: (MOVHstore [off] {sym} ptr x mem) 4869 for { 4870 off := v.AuxInt 4871 sym := v.Aux 4872 _ = v.Args[2] 4873 ptr := v.Args[0] 4874 v_1 := v.Args[1] 4875 if v_1.Op != OpARM64MOVHUreg { 4876 break 4877 } 4878 x := v_1.Args[0] 4879 mem := v.Args[2] 4880 v.reset(OpARM64MOVHstore) 4881 v.AuxInt = off 4882 v.Aux = sym 4883 v.AddArg(ptr) 4884 v.AddArg(x) 4885 v.AddArg(mem) 4886 return true 4887 } 4888 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem) 4889 // cond: 4890 // result: (MOVHstore [off] {sym} ptr x mem) 4891 for { 4892 off := v.AuxInt 4893 sym := v.Aux 4894 _ = v.Args[2] 4895 ptr := v.Args[0] 4896 v_1 := v.Args[1] 4897 if v_1.Op != OpARM64MOVWreg { 4898 break 4899 } 4900 x := v_1.Args[0] 4901 mem := v.Args[2] 4902 v.reset(OpARM64MOVHstore) 4903 v.AuxInt = off 4904 v.Aux = sym 4905 v.AddArg(ptr) 4906 v.AddArg(x) 4907 v.AddArg(mem) 4908 return true 4909 } 4910 // match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem) 4911 // cond: 4912 // result: (MOVHstore [off] {sym} ptr x mem) 4913 for { 4914 off := v.AuxInt 4915 sym := v.Aux 4916 _ = v.Args[2] 4917 ptr := v.Args[0] 4918 v_1 := v.Args[1] 4919 if v_1.Op != OpARM64MOVWUreg { 4920 break 4921 } 4922 x := v_1.Args[0] 4923 mem := v.Args[2] 4924 v.reset(OpARM64MOVHstore) 4925 v.AuxInt = off 4926 v.Aux = sym 4927 v.AddArg(ptr) 4928 v.AddArg(x) 4929 v.AddArg(mem) 4930 return true 4931 } 4932 return false 4933 } 4934 func rewriteValueARM64_OpARM64MOVHstorezero_0(v *Value) bool { 4935 b := v.Block 4936 _ = b 4937 config := b.Func.Config 4938 _ = config 4939 // match: (MOVHstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 4940 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4941 // result: (MOVHstorezero [off1+off2] {sym} ptr mem) 4942 for { 4943 off1 := v.AuxInt 4944 sym := v.Aux 4945 _ = v.Args[1] 4946 v_0 := v.Args[0] 4947 if v_0.Op != OpARM64ADDconst { 4948 break 4949 } 4950 off2 := v_0.AuxInt 4951 ptr := v_0.Args[0] 4952 mem := v.Args[1] 4953 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4954 break 4955 } 4956 v.reset(OpARM64MOVHstorezero) 4957 v.AuxInt = off1 + off2 4958 v.Aux = sym 4959 v.AddArg(ptr) 4960 v.AddArg(mem) 4961 return true 4962 } 4963 // match: (MOVHstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4964 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4965 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4966 for { 4967 off1 := v.AuxInt 4968 sym1 := v.Aux 4969 _ = v.Args[1] 4970 v_0 := v.Args[0] 4971 if v_0.Op != OpARM64MOVDaddr { 4972 break 4973 } 4974 off2 := v_0.AuxInt 4975 sym2 := v_0.Aux 4976 ptr := v_0.Args[0] 4977 mem := v.Args[1] 4978 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4979 break 4980 } 4981 v.reset(OpARM64MOVHstorezero) 4982 v.AuxInt = off1 + off2 4983 v.Aux = mergeSym(sym1, sym2) 4984 v.AddArg(ptr) 4985 v.AddArg(mem) 4986 return true 4987 } 4988 return false 4989 } 4990 func rewriteValueARM64_OpARM64MOVQstorezero_0(v *Value) bool { 4991 b := v.Block 4992 _ = b 4993 config := b.Func.Config 4994 _ = config 4995 // match: (MOVQstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 4996 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4997 // result: (MOVQstorezero [off1+off2] {sym} ptr mem) 4998 for { 4999 off1 := v.AuxInt 5000 sym := v.Aux 5001 _ = v.Args[1] 5002 v_0 := v.Args[0] 5003 if v_0.Op != OpARM64ADDconst { 5004 break 5005 } 5006 off2 := v_0.AuxInt 5007 ptr := v_0.Args[0] 5008 mem := v.Args[1] 5009 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5010 break 5011 } 5012 v.reset(OpARM64MOVQstorezero) 5013 v.AuxInt = off1 + off2 5014 v.Aux = sym 5015 v.AddArg(ptr) 5016 v.AddArg(mem) 5017 return true 5018 } 5019 // match: (MOVQstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5020 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5021 // result: (MOVQstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5022 for { 5023 off1 := v.AuxInt 5024 sym1 := v.Aux 5025 _ = v.Args[1] 5026 v_0 := v.Args[0] 5027 if v_0.Op != OpARM64MOVDaddr { 5028 break 5029 } 5030 off2 := v_0.AuxInt 5031 sym2 := v_0.Aux 5032 ptr := v_0.Args[0] 5033 mem := v.Args[1] 5034 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5035 break 5036 } 5037 v.reset(OpARM64MOVQstorezero) 5038 v.AuxInt = off1 + off2 5039 v.Aux = mergeSym(sym1, sym2) 5040 v.AddArg(ptr) 5041 v.AddArg(mem) 5042 return true 5043 } 5044 return false 5045 } 5046 func rewriteValueARM64_OpARM64MOVWUload_0(v *Value) bool { 5047 b := v.Block 5048 _ = b 5049 config := b.Func.Config 5050 _ = config 5051 // match: (MOVWUload [off1] {sym} (ADDconst [off2] ptr) mem) 5052 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5053 // result: (MOVWUload [off1+off2] {sym} ptr mem) 5054 for { 5055 off1 := v.AuxInt 5056 sym := v.Aux 5057 _ = v.Args[1] 5058 v_0 := v.Args[0] 5059 if v_0.Op != OpARM64ADDconst { 5060 break 5061 } 5062 off2 := v_0.AuxInt 5063 ptr := v_0.Args[0] 5064 mem := v.Args[1] 5065 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5066 break 5067 } 5068 v.reset(OpARM64MOVWUload) 5069 v.AuxInt = off1 + off2 5070 v.Aux = sym 5071 v.AddArg(ptr) 5072 v.AddArg(mem) 5073 return true 5074 } 5075 // match: (MOVWUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5076 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5077 // result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5078 for { 5079 off1 := v.AuxInt 5080 sym1 := v.Aux 5081 _ = v.Args[1] 5082 v_0 := v.Args[0] 5083 if v_0.Op != OpARM64MOVDaddr { 5084 break 5085 } 5086 off2 := v_0.AuxInt 5087 sym2 := v_0.Aux 5088 ptr := v_0.Args[0] 5089 mem := v.Args[1] 5090 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5091 break 5092 } 5093 v.reset(OpARM64MOVWUload) 5094 v.AuxInt = off1 + off2 5095 v.Aux = mergeSym(sym1, sym2) 5096 v.AddArg(ptr) 5097 v.AddArg(mem) 5098 return true 5099 } 5100 // match: (MOVWUload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 5101 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 5102 // result: (MOVDconst [0]) 5103 for { 5104 off := v.AuxInt 5105 sym := v.Aux 5106 _ = v.Args[1] 5107 ptr := v.Args[0] 5108 v_1 := v.Args[1] 5109 if v_1.Op != OpARM64MOVWstorezero { 5110 break 5111 } 5112 off2 := v_1.AuxInt 5113 sym2 := v_1.Aux 5114 _ = v_1.Args[1] 5115 ptr2 := v_1.Args[0] 5116 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 5117 break 5118 } 5119 v.reset(OpARM64MOVDconst) 5120 v.AuxInt = 0 5121 return true 5122 } 5123 return false 5124 } 5125 func rewriteValueARM64_OpARM64MOVWUreg_0(v *Value) bool { 5126 // match: (MOVWUreg x:(MOVBUload _ _)) 5127 // cond: 5128 // result: (MOVDreg x) 5129 for { 5130 x := v.Args[0] 5131 if x.Op != OpARM64MOVBUload { 5132 break 5133 } 5134 _ = x.Args[1] 5135 v.reset(OpARM64MOVDreg) 5136 v.AddArg(x) 5137 return true 5138 } 5139 // match: (MOVWUreg x:(MOVHUload _ _)) 5140 // cond: 5141 // result: (MOVDreg x) 5142 for { 5143 x := v.Args[0] 5144 if x.Op != OpARM64MOVHUload { 5145 break 5146 } 5147 _ = x.Args[1] 5148 v.reset(OpARM64MOVDreg) 5149 v.AddArg(x) 5150 return true 5151 } 5152 // match: (MOVWUreg x:(MOVWUload _ _)) 5153 // cond: 5154 // result: (MOVDreg x) 5155 for { 5156 x := v.Args[0] 5157 if x.Op != OpARM64MOVWUload { 5158 break 5159 } 5160 _ = x.Args[1] 5161 v.reset(OpARM64MOVDreg) 5162 v.AddArg(x) 5163 return true 5164 } 5165 // match: (MOVWUreg x:(MOVBUreg _)) 5166 // cond: 5167 // result: (MOVDreg x) 5168 for { 5169 x := v.Args[0] 5170 if x.Op != OpARM64MOVBUreg { 5171 break 5172 } 5173 v.reset(OpARM64MOVDreg) 5174 v.AddArg(x) 5175 return true 5176 } 5177 // match: (MOVWUreg x:(MOVHUreg _)) 5178 // cond: 5179 // result: (MOVDreg x) 5180 for { 5181 x := v.Args[0] 5182 if x.Op != OpARM64MOVHUreg { 5183 break 5184 } 5185 v.reset(OpARM64MOVDreg) 5186 v.AddArg(x) 5187 return true 5188 } 5189 // match: (MOVWUreg x:(MOVWUreg _)) 5190 // cond: 5191 // result: (MOVDreg x) 5192 for { 5193 x := v.Args[0] 5194 if x.Op != OpARM64MOVWUreg { 5195 break 5196 } 5197 v.reset(OpARM64MOVDreg) 5198 v.AddArg(x) 5199 return true 5200 } 5201 // match: (MOVWUreg (MOVDconst [c])) 5202 // cond: 5203 // result: (MOVDconst [int64(uint32(c))]) 5204 for { 5205 v_0 := v.Args[0] 5206 if v_0.Op != OpARM64MOVDconst { 5207 break 5208 } 5209 c := v_0.AuxInt 5210 v.reset(OpARM64MOVDconst) 5211 v.AuxInt = int64(uint32(c)) 5212 return true 5213 } 5214 return false 5215 } 5216 func rewriteValueARM64_OpARM64MOVWload_0(v *Value) bool { 5217 b := v.Block 5218 _ = b 5219 config := b.Func.Config 5220 _ = config 5221 // match: (MOVWload [off1] {sym} (ADDconst [off2] ptr) mem) 5222 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5223 // result: (MOVWload [off1+off2] {sym} ptr mem) 5224 for { 5225 off1 := v.AuxInt 5226 sym := v.Aux 5227 _ = v.Args[1] 5228 v_0 := v.Args[0] 5229 if v_0.Op != OpARM64ADDconst { 5230 break 5231 } 5232 off2 := v_0.AuxInt 5233 ptr := v_0.Args[0] 5234 mem := v.Args[1] 5235 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5236 break 5237 } 5238 v.reset(OpARM64MOVWload) 5239 v.AuxInt = off1 + off2 5240 v.Aux = sym 5241 v.AddArg(ptr) 5242 v.AddArg(mem) 5243 return true 5244 } 5245 // match: (MOVWload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5246 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5247 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5248 for { 5249 off1 := v.AuxInt 5250 sym1 := v.Aux 5251 _ = v.Args[1] 5252 v_0 := v.Args[0] 5253 if v_0.Op != OpARM64MOVDaddr { 5254 break 5255 } 5256 off2 := v_0.AuxInt 5257 sym2 := v_0.Aux 5258 ptr := v_0.Args[0] 5259 mem := v.Args[1] 5260 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5261 break 5262 } 5263 v.reset(OpARM64MOVWload) 5264 v.AuxInt = off1 + off2 5265 v.Aux = mergeSym(sym1, sym2) 5266 v.AddArg(ptr) 5267 v.AddArg(mem) 5268 return true 5269 } 5270 // match: (MOVWload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 5271 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 5272 // result: (MOVDconst [0]) 5273 for { 5274 off := v.AuxInt 5275 sym := v.Aux 5276 _ = v.Args[1] 5277 ptr := v.Args[0] 5278 v_1 := v.Args[1] 5279 if v_1.Op != OpARM64MOVWstorezero { 5280 break 5281 } 5282 off2 := v_1.AuxInt 5283 sym2 := v_1.Aux 5284 _ = v_1.Args[1] 5285 ptr2 := v_1.Args[0] 5286 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 5287 break 5288 } 5289 v.reset(OpARM64MOVDconst) 5290 v.AuxInt = 0 5291 return true 5292 } 5293 return false 5294 } 5295 func rewriteValueARM64_OpARM64MOVWreg_0(v *Value) bool { 5296 // match: (MOVWreg x:(MOVBload _ _)) 5297 // cond: 5298 // result: (MOVDreg x) 5299 for { 5300 x := v.Args[0] 5301 if x.Op != OpARM64MOVBload { 5302 break 5303 } 5304 _ = x.Args[1] 5305 v.reset(OpARM64MOVDreg) 5306 v.AddArg(x) 5307 return true 5308 } 5309 // match: (MOVWreg x:(MOVBUload _ _)) 5310 // cond: 5311 // result: (MOVDreg x) 5312 for { 5313 x := v.Args[0] 5314 if x.Op != OpARM64MOVBUload { 5315 break 5316 } 5317 _ = x.Args[1] 5318 v.reset(OpARM64MOVDreg) 5319 v.AddArg(x) 5320 return true 5321 } 5322 // match: (MOVWreg x:(MOVHload _ _)) 5323 // cond: 5324 // result: (MOVDreg x) 5325 for { 5326 x := v.Args[0] 5327 if x.Op != OpARM64MOVHload { 5328 break 5329 } 5330 _ = x.Args[1] 5331 v.reset(OpARM64MOVDreg) 5332 v.AddArg(x) 5333 return true 5334 } 5335 // match: (MOVWreg x:(MOVHUload _ _)) 5336 // cond: 5337 // result: (MOVDreg x) 5338 for { 5339 x := v.Args[0] 5340 if x.Op != OpARM64MOVHUload { 5341 break 5342 } 5343 _ = x.Args[1] 5344 v.reset(OpARM64MOVDreg) 5345 v.AddArg(x) 5346 return true 5347 } 5348 // match: (MOVWreg x:(MOVWload _ _)) 5349 // cond: 5350 // result: (MOVDreg x) 5351 for { 5352 x := v.Args[0] 5353 if x.Op != OpARM64MOVWload { 5354 break 5355 } 5356 _ = x.Args[1] 5357 v.reset(OpARM64MOVDreg) 5358 v.AddArg(x) 5359 return true 5360 } 5361 // match: (MOVWreg x:(MOVBreg _)) 5362 // cond: 5363 // result: (MOVDreg x) 5364 for { 5365 x := v.Args[0] 5366 if x.Op != OpARM64MOVBreg { 5367 break 5368 } 5369 v.reset(OpARM64MOVDreg) 5370 v.AddArg(x) 5371 return true 5372 } 5373 // match: (MOVWreg x:(MOVBUreg _)) 5374 // cond: 5375 // result: (MOVDreg x) 5376 for { 5377 x := v.Args[0] 5378 if x.Op != OpARM64MOVBUreg { 5379 break 5380 } 5381 v.reset(OpARM64MOVDreg) 5382 v.AddArg(x) 5383 return true 5384 } 5385 // match: (MOVWreg x:(MOVHreg _)) 5386 // cond: 5387 // result: (MOVDreg x) 5388 for { 5389 x := v.Args[0] 5390 if x.Op != OpARM64MOVHreg { 5391 break 5392 } 5393 v.reset(OpARM64MOVDreg) 5394 v.AddArg(x) 5395 return true 5396 } 5397 // match: (MOVWreg x:(MOVHreg _)) 5398 // cond: 5399 // result: (MOVDreg x) 5400 for { 5401 x := v.Args[0] 5402 if x.Op != OpARM64MOVHreg { 5403 break 5404 } 5405 v.reset(OpARM64MOVDreg) 5406 v.AddArg(x) 5407 return true 5408 } 5409 // match: (MOVWreg x:(MOVWreg _)) 5410 // cond: 5411 // result: (MOVDreg x) 5412 for { 5413 x := v.Args[0] 5414 if x.Op != OpARM64MOVWreg { 5415 break 5416 } 5417 v.reset(OpARM64MOVDreg) 5418 v.AddArg(x) 5419 return true 5420 } 5421 return false 5422 } 5423 func rewriteValueARM64_OpARM64MOVWreg_10(v *Value) bool { 5424 // match: (MOVWreg (MOVDconst [c])) 5425 // cond: 5426 // result: (MOVDconst [int64(int32(c))]) 5427 for { 5428 v_0 := v.Args[0] 5429 if v_0.Op != OpARM64MOVDconst { 5430 break 5431 } 5432 c := v_0.AuxInt 5433 v.reset(OpARM64MOVDconst) 5434 v.AuxInt = int64(int32(c)) 5435 return true 5436 } 5437 return false 5438 } 5439 func rewriteValueARM64_OpARM64MOVWstore_0(v *Value) bool { 5440 b := v.Block 5441 _ = b 5442 config := b.Func.Config 5443 _ = config 5444 // match: (MOVWstore [off1] {sym} (ADDconst [off2] ptr) val mem) 5445 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5446 // result: (MOVWstore [off1+off2] {sym} ptr val mem) 5447 for { 5448 off1 := v.AuxInt 5449 sym := v.Aux 5450 _ = v.Args[2] 5451 v_0 := v.Args[0] 5452 if v_0.Op != OpARM64ADDconst { 5453 break 5454 } 5455 off2 := v_0.AuxInt 5456 ptr := v_0.Args[0] 5457 val := v.Args[1] 5458 mem := v.Args[2] 5459 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5460 break 5461 } 5462 v.reset(OpARM64MOVWstore) 5463 v.AuxInt = off1 + off2 5464 v.Aux = sym 5465 v.AddArg(ptr) 5466 v.AddArg(val) 5467 v.AddArg(mem) 5468 return true 5469 } 5470 // match: (MOVWstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 5471 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5472 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 5473 for { 5474 off1 := v.AuxInt 5475 sym1 := v.Aux 5476 _ = v.Args[2] 5477 v_0 := v.Args[0] 5478 if v_0.Op != OpARM64MOVDaddr { 5479 break 5480 } 5481 off2 := v_0.AuxInt 5482 sym2 := v_0.Aux 5483 ptr := v_0.Args[0] 5484 val := v.Args[1] 5485 mem := v.Args[2] 5486 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5487 break 5488 } 5489 v.reset(OpARM64MOVWstore) 5490 v.AuxInt = off1 + off2 5491 v.Aux = mergeSym(sym1, sym2) 5492 v.AddArg(ptr) 5493 v.AddArg(val) 5494 v.AddArg(mem) 5495 return true 5496 } 5497 // match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem) 5498 // cond: 5499 // result: (MOVWstorezero [off] {sym} ptr mem) 5500 for { 5501 off := v.AuxInt 5502 sym := v.Aux 5503 _ = v.Args[2] 5504 ptr := v.Args[0] 5505 v_1 := v.Args[1] 5506 if v_1.Op != OpARM64MOVDconst { 5507 break 5508 } 5509 if v_1.AuxInt != 0 { 5510 break 5511 } 5512 mem := v.Args[2] 5513 v.reset(OpARM64MOVWstorezero) 5514 v.AuxInt = off 5515 v.Aux = sym 5516 v.AddArg(ptr) 5517 v.AddArg(mem) 5518 return true 5519 } 5520 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem) 5521 // cond: 5522 // result: (MOVWstore [off] {sym} ptr x mem) 5523 for { 5524 off := v.AuxInt 5525 sym := v.Aux 5526 _ = v.Args[2] 5527 ptr := v.Args[0] 5528 v_1 := v.Args[1] 5529 if v_1.Op != OpARM64MOVWreg { 5530 break 5531 } 5532 x := v_1.Args[0] 5533 mem := v.Args[2] 5534 v.reset(OpARM64MOVWstore) 5535 v.AuxInt = off 5536 v.Aux = sym 5537 v.AddArg(ptr) 5538 v.AddArg(x) 5539 v.AddArg(mem) 5540 return true 5541 } 5542 // match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem) 5543 // cond: 5544 // result: (MOVWstore [off] {sym} ptr x mem) 5545 for { 5546 off := v.AuxInt 5547 sym := v.Aux 5548 _ = v.Args[2] 5549 ptr := v.Args[0] 5550 v_1 := v.Args[1] 5551 if v_1.Op != OpARM64MOVWUreg { 5552 break 5553 } 5554 x := v_1.Args[0] 5555 mem := v.Args[2] 5556 v.reset(OpARM64MOVWstore) 5557 v.AuxInt = off 5558 v.Aux = sym 5559 v.AddArg(ptr) 5560 v.AddArg(x) 5561 v.AddArg(mem) 5562 return true 5563 } 5564 return false 5565 } 5566 func rewriteValueARM64_OpARM64MOVWstorezero_0(v *Value) bool { 5567 b := v.Block 5568 _ = b 5569 config := b.Func.Config 5570 _ = config 5571 // match: (MOVWstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 5572 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5573 // result: (MOVWstorezero [off1+off2] {sym} ptr mem) 5574 for { 5575 off1 := v.AuxInt 5576 sym := v.Aux 5577 _ = v.Args[1] 5578 v_0 := v.Args[0] 5579 if v_0.Op != OpARM64ADDconst { 5580 break 5581 } 5582 off2 := v_0.AuxInt 5583 ptr := v_0.Args[0] 5584 mem := v.Args[1] 5585 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5586 break 5587 } 5588 v.reset(OpARM64MOVWstorezero) 5589 v.AuxInt = off1 + off2 5590 v.Aux = sym 5591 v.AddArg(ptr) 5592 v.AddArg(mem) 5593 return true 5594 } 5595 // match: (MOVWstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5596 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5597 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5598 for { 5599 off1 := v.AuxInt 5600 sym1 := v.Aux 5601 _ = v.Args[1] 5602 v_0 := v.Args[0] 5603 if v_0.Op != OpARM64MOVDaddr { 5604 break 5605 } 5606 off2 := v_0.AuxInt 5607 sym2 := v_0.Aux 5608 ptr := v_0.Args[0] 5609 mem := v.Args[1] 5610 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5611 break 5612 } 5613 v.reset(OpARM64MOVWstorezero) 5614 v.AuxInt = off1 + off2 5615 v.Aux = mergeSym(sym1, sym2) 5616 v.AddArg(ptr) 5617 v.AddArg(mem) 5618 return true 5619 } 5620 return false 5621 } 5622 func rewriteValueARM64_OpARM64MUL_0(v *Value) bool { 5623 // match: (MUL x (MOVDconst [-1])) 5624 // cond: 5625 // result: (NEG x) 5626 for { 5627 _ = v.Args[1] 5628 x := v.Args[0] 5629 v_1 := v.Args[1] 5630 if v_1.Op != OpARM64MOVDconst { 5631 break 5632 } 5633 if v_1.AuxInt != -1 { 5634 break 5635 } 5636 v.reset(OpARM64NEG) 5637 v.AddArg(x) 5638 return true 5639 } 5640 // match: (MUL (MOVDconst [-1]) x) 5641 // cond: 5642 // result: (NEG x) 5643 for { 5644 _ = v.Args[1] 5645 v_0 := v.Args[0] 5646 if v_0.Op != OpARM64MOVDconst { 5647 break 5648 } 5649 if v_0.AuxInt != -1 { 5650 break 5651 } 5652 x := v.Args[1] 5653 v.reset(OpARM64NEG) 5654 v.AddArg(x) 5655 return true 5656 } 5657 // match: (MUL _ (MOVDconst [0])) 5658 // cond: 5659 // result: (MOVDconst [0]) 5660 for { 5661 _ = v.Args[1] 5662 v_1 := v.Args[1] 5663 if v_1.Op != OpARM64MOVDconst { 5664 break 5665 } 5666 if v_1.AuxInt != 0 { 5667 break 5668 } 5669 v.reset(OpARM64MOVDconst) 5670 v.AuxInt = 0 5671 return true 5672 } 5673 // match: (MUL (MOVDconst [0]) _) 5674 // cond: 5675 // result: (MOVDconst [0]) 5676 for { 5677 _ = v.Args[1] 5678 v_0 := v.Args[0] 5679 if v_0.Op != OpARM64MOVDconst { 5680 break 5681 } 5682 if v_0.AuxInt != 0 { 5683 break 5684 } 5685 v.reset(OpARM64MOVDconst) 5686 v.AuxInt = 0 5687 return true 5688 } 5689 // match: (MUL x (MOVDconst [1])) 5690 // cond: 5691 // result: x 5692 for { 5693 _ = v.Args[1] 5694 x := v.Args[0] 5695 v_1 := v.Args[1] 5696 if v_1.Op != OpARM64MOVDconst { 5697 break 5698 } 5699 if v_1.AuxInt != 1 { 5700 break 5701 } 5702 v.reset(OpCopy) 5703 v.Type = x.Type 5704 v.AddArg(x) 5705 return true 5706 } 5707 // match: (MUL (MOVDconst [1]) x) 5708 // cond: 5709 // result: x 5710 for { 5711 _ = v.Args[1] 5712 v_0 := v.Args[0] 5713 if v_0.Op != OpARM64MOVDconst { 5714 break 5715 } 5716 if v_0.AuxInt != 1 { 5717 break 5718 } 5719 x := v.Args[1] 5720 v.reset(OpCopy) 5721 v.Type = x.Type 5722 v.AddArg(x) 5723 return true 5724 } 5725 // match: (MUL x (MOVDconst [c])) 5726 // cond: isPowerOfTwo(c) 5727 // result: (SLLconst [log2(c)] x) 5728 for { 5729 _ = v.Args[1] 5730 x := v.Args[0] 5731 v_1 := v.Args[1] 5732 if v_1.Op != OpARM64MOVDconst { 5733 break 5734 } 5735 c := v_1.AuxInt 5736 if !(isPowerOfTwo(c)) { 5737 break 5738 } 5739 v.reset(OpARM64SLLconst) 5740 v.AuxInt = log2(c) 5741 v.AddArg(x) 5742 return true 5743 } 5744 // match: (MUL (MOVDconst [c]) x) 5745 // cond: isPowerOfTwo(c) 5746 // result: (SLLconst [log2(c)] x) 5747 for { 5748 _ = v.Args[1] 5749 v_0 := v.Args[0] 5750 if v_0.Op != OpARM64MOVDconst { 5751 break 5752 } 5753 c := v_0.AuxInt 5754 x := v.Args[1] 5755 if !(isPowerOfTwo(c)) { 5756 break 5757 } 5758 v.reset(OpARM64SLLconst) 5759 v.AuxInt = log2(c) 5760 v.AddArg(x) 5761 return true 5762 } 5763 // match: (MUL x (MOVDconst [c])) 5764 // cond: isPowerOfTwo(c-1) && c >= 3 5765 // result: (ADDshiftLL x x [log2(c-1)]) 5766 for { 5767 _ = v.Args[1] 5768 x := v.Args[0] 5769 v_1 := v.Args[1] 5770 if v_1.Op != OpARM64MOVDconst { 5771 break 5772 } 5773 c := v_1.AuxInt 5774 if !(isPowerOfTwo(c-1) && c >= 3) { 5775 break 5776 } 5777 v.reset(OpARM64ADDshiftLL) 5778 v.AuxInt = log2(c - 1) 5779 v.AddArg(x) 5780 v.AddArg(x) 5781 return true 5782 } 5783 // match: (MUL (MOVDconst [c]) x) 5784 // cond: isPowerOfTwo(c-1) && c >= 3 5785 // result: (ADDshiftLL x x [log2(c-1)]) 5786 for { 5787 _ = v.Args[1] 5788 v_0 := v.Args[0] 5789 if v_0.Op != OpARM64MOVDconst { 5790 break 5791 } 5792 c := v_0.AuxInt 5793 x := v.Args[1] 5794 if !(isPowerOfTwo(c-1) && c >= 3) { 5795 break 5796 } 5797 v.reset(OpARM64ADDshiftLL) 5798 v.AuxInt = log2(c - 1) 5799 v.AddArg(x) 5800 v.AddArg(x) 5801 return true 5802 } 5803 return false 5804 } 5805 func rewriteValueARM64_OpARM64MUL_10(v *Value) bool { 5806 b := v.Block 5807 _ = b 5808 // match: (MUL x (MOVDconst [c])) 5809 // cond: isPowerOfTwo(c+1) && c >= 7 5810 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 5811 for { 5812 _ = v.Args[1] 5813 x := v.Args[0] 5814 v_1 := v.Args[1] 5815 if v_1.Op != OpARM64MOVDconst { 5816 break 5817 } 5818 c := v_1.AuxInt 5819 if !(isPowerOfTwo(c+1) && c >= 7) { 5820 break 5821 } 5822 v.reset(OpARM64ADDshiftLL) 5823 v.AuxInt = log2(c + 1) 5824 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5825 v0.AddArg(x) 5826 v.AddArg(v0) 5827 v.AddArg(x) 5828 return true 5829 } 5830 // match: (MUL (MOVDconst [c]) x) 5831 // cond: isPowerOfTwo(c+1) && c >= 7 5832 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 5833 for { 5834 _ = v.Args[1] 5835 v_0 := v.Args[0] 5836 if v_0.Op != OpARM64MOVDconst { 5837 break 5838 } 5839 c := v_0.AuxInt 5840 x := v.Args[1] 5841 if !(isPowerOfTwo(c+1) && c >= 7) { 5842 break 5843 } 5844 v.reset(OpARM64ADDshiftLL) 5845 v.AuxInt = log2(c + 1) 5846 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5847 v0.AddArg(x) 5848 v.AddArg(v0) 5849 v.AddArg(x) 5850 return true 5851 } 5852 // match: (MUL x (MOVDconst [c])) 5853 // cond: c%3 == 0 && isPowerOfTwo(c/3) 5854 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 5855 for { 5856 _ = v.Args[1] 5857 x := v.Args[0] 5858 v_1 := v.Args[1] 5859 if v_1.Op != OpARM64MOVDconst { 5860 break 5861 } 5862 c := v_1.AuxInt 5863 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 5864 break 5865 } 5866 v.reset(OpARM64SLLconst) 5867 v.AuxInt = log2(c / 3) 5868 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5869 v0.AuxInt = 1 5870 v0.AddArg(x) 5871 v0.AddArg(x) 5872 v.AddArg(v0) 5873 return true 5874 } 5875 // match: (MUL (MOVDconst [c]) x) 5876 // cond: c%3 == 0 && isPowerOfTwo(c/3) 5877 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 5878 for { 5879 _ = v.Args[1] 5880 v_0 := v.Args[0] 5881 if v_0.Op != OpARM64MOVDconst { 5882 break 5883 } 5884 c := v_0.AuxInt 5885 x := v.Args[1] 5886 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 5887 break 5888 } 5889 v.reset(OpARM64SLLconst) 5890 v.AuxInt = log2(c / 3) 5891 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5892 v0.AuxInt = 1 5893 v0.AddArg(x) 5894 v0.AddArg(x) 5895 v.AddArg(v0) 5896 return true 5897 } 5898 // match: (MUL x (MOVDconst [c])) 5899 // cond: c%5 == 0 && isPowerOfTwo(c/5) 5900 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 5901 for { 5902 _ = v.Args[1] 5903 x := v.Args[0] 5904 v_1 := v.Args[1] 5905 if v_1.Op != OpARM64MOVDconst { 5906 break 5907 } 5908 c := v_1.AuxInt 5909 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 5910 break 5911 } 5912 v.reset(OpARM64SLLconst) 5913 v.AuxInt = log2(c / 5) 5914 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5915 v0.AuxInt = 2 5916 v0.AddArg(x) 5917 v0.AddArg(x) 5918 v.AddArg(v0) 5919 return true 5920 } 5921 // match: (MUL (MOVDconst [c]) x) 5922 // cond: c%5 == 0 && isPowerOfTwo(c/5) 5923 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 5924 for { 5925 _ = v.Args[1] 5926 v_0 := v.Args[0] 5927 if v_0.Op != OpARM64MOVDconst { 5928 break 5929 } 5930 c := v_0.AuxInt 5931 x := v.Args[1] 5932 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 5933 break 5934 } 5935 v.reset(OpARM64SLLconst) 5936 v.AuxInt = log2(c / 5) 5937 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5938 v0.AuxInt = 2 5939 v0.AddArg(x) 5940 v0.AddArg(x) 5941 v.AddArg(v0) 5942 return true 5943 } 5944 // match: (MUL x (MOVDconst [c])) 5945 // cond: c%7 == 0 && isPowerOfTwo(c/7) 5946 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 5947 for { 5948 _ = v.Args[1] 5949 x := v.Args[0] 5950 v_1 := v.Args[1] 5951 if v_1.Op != OpARM64MOVDconst { 5952 break 5953 } 5954 c := v_1.AuxInt 5955 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 5956 break 5957 } 5958 v.reset(OpARM64SLLconst) 5959 v.AuxInt = log2(c / 7) 5960 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5961 v0.AuxInt = 3 5962 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5963 v1.AddArg(x) 5964 v0.AddArg(v1) 5965 v0.AddArg(x) 5966 v.AddArg(v0) 5967 return true 5968 } 5969 // match: (MUL (MOVDconst [c]) x) 5970 // cond: c%7 == 0 && isPowerOfTwo(c/7) 5971 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 5972 for { 5973 _ = v.Args[1] 5974 v_0 := v.Args[0] 5975 if v_0.Op != OpARM64MOVDconst { 5976 break 5977 } 5978 c := v_0.AuxInt 5979 x := v.Args[1] 5980 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 5981 break 5982 } 5983 v.reset(OpARM64SLLconst) 5984 v.AuxInt = log2(c / 7) 5985 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 5986 v0.AuxInt = 3 5987 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 5988 v1.AddArg(x) 5989 v0.AddArg(v1) 5990 v0.AddArg(x) 5991 v.AddArg(v0) 5992 return true 5993 } 5994 // match: (MUL x (MOVDconst [c])) 5995 // cond: c%9 == 0 && isPowerOfTwo(c/9) 5996 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 5997 for { 5998 _ = v.Args[1] 5999 x := v.Args[0] 6000 v_1 := v.Args[1] 6001 if v_1.Op != OpARM64MOVDconst { 6002 break 6003 } 6004 c := v_1.AuxInt 6005 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 6006 break 6007 } 6008 v.reset(OpARM64SLLconst) 6009 v.AuxInt = log2(c / 9) 6010 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6011 v0.AuxInt = 3 6012 v0.AddArg(x) 6013 v0.AddArg(x) 6014 v.AddArg(v0) 6015 return true 6016 } 6017 // match: (MUL (MOVDconst [c]) x) 6018 // cond: c%9 == 0 && isPowerOfTwo(c/9) 6019 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 6020 for { 6021 _ = v.Args[1] 6022 v_0 := v.Args[0] 6023 if v_0.Op != OpARM64MOVDconst { 6024 break 6025 } 6026 c := v_0.AuxInt 6027 x := v.Args[1] 6028 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 6029 break 6030 } 6031 v.reset(OpARM64SLLconst) 6032 v.AuxInt = log2(c / 9) 6033 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6034 v0.AuxInt = 3 6035 v0.AddArg(x) 6036 v0.AddArg(x) 6037 v.AddArg(v0) 6038 return true 6039 } 6040 return false 6041 } 6042 func rewriteValueARM64_OpARM64MUL_20(v *Value) bool { 6043 // match: (MUL (MOVDconst [c]) (MOVDconst [d])) 6044 // cond: 6045 // result: (MOVDconst [c*d]) 6046 for { 6047 _ = v.Args[1] 6048 v_0 := v.Args[0] 6049 if v_0.Op != OpARM64MOVDconst { 6050 break 6051 } 6052 c := v_0.AuxInt 6053 v_1 := v.Args[1] 6054 if v_1.Op != OpARM64MOVDconst { 6055 break 6056 } 6057 d := v_1.AuxInt 6058 v.reset(OpARM64MOVDconst) 6059 v.AuxInt = c * d 6060 return true 6061 } 6062 // match: (MUL (MOVDconst [d]) (MOVDconst [c])) 6063 // cond: 6064 // result: (MOVDconst [c*d]) 6065 for { 6066 _ = v.Args[1] 6067 v_0 := v.Args[0] 6068 if v_0.Op != OpARM64MOVDconst { 6069 break 6070 } 6071 d := v_0.AuxInt 6072 v_1 := v.Args[1] 6073 if v_1.Op != OpARM64MOVDconst { 6074 break 6075 } 6076 c := v_1.AuxInt 6077 v.reset(OpARM64MOVDconst) 6078 v.AuxInt = c * d 6079 return true 6080 } 6081 return false 6082 } 6083 func rewriteValueARM64_OpARM64MULW_0(v *Value) bool { 6084 // match: (MULW x (MOVDconst [c])) 6085 // cond: int32(c)==-1 6086 // result: (NEG x) 6087 for { 6088 _ = v.Args[1] 6089 x := v.Args[0] 6090 v_1 := v.Args[1] 6091 if v_1.Op != OpARM64MOVDconst { 6092 break 6093 } 6094 c := v_1.AuxInt 6095 if !(int32(c) == -1) { 6096 break 6097 } 6098 v.reset(OpARM64NEG) 6099 v.AddArg(x) 6100 return true 6101 } 6102 // match: (MULW (MOVDconst [c]) x) 6103 // cond: int32(c)==-1 6104 // result: (NEG x) 6105 for { 6106 _ = v.Args[1] 6107 v_0 := v.Args[0] 6108 if v_0.Op != OpARM64MOVDconst { 6109 break 6110 } 6111 c := v_0.AuxInt 6112 x := v.Args[1] 6113 if !(int32(c) == -1) { 6114 break 6115 } 6116 v.reset(OpARM64NEG) 6117 v.AddArg(x) 6118 return true 6119 } 6120 // match: (MULW _ (MOVDconst [c])) 6121 // cond: int32(c)==0 6122 // result: (MOVDconst [0]) 6123 for { 6124 _ = v.Args[1] 6125 v_1 := v.Args[1] 6126 if v_1.Op != OpARM64MOVDconst { 6127 break 6128 } 6129 c := v_1.AuxInt 6130 if !(int32(c) == 0) { 6131 break 6132 } 6133 v.reset(OpARM64MOVDconst) 6134 v.AuxInt = 0 6135 return true 6136 } 6137 // match: (MULW (MOVDconst [c]) _) 6138 // cond: int32(c)==0 6139 // result: (MOVDconst [0]) 6140 for { 6141 _ = v.Args[1] 6142 v_0 := v.Args[0] 6143 if v_0.Op != OpARM64MOVDconst { 6144 break 6145 } 6146 c := v_0.AuxInt 6147 if !(int32(c) == 0) { 6148 break 6149 } 6150 v.reset(OpARM64MOVDconst) 6151 v.AuxInt = 0 6152 return true 6153 } 6154 // match: (MULW x (MOVDconst [c])) 6155 // cond: int32(c)==1 6156 // result: x 6157 for { 6158 _ = v.Args[1] 6159 x := v.Args[0] 6160 v_1 := v.Args[1] 6161 if v_1.Op != OpARM64MOVDconst { 6162 break 6163 } 6164 c := v_1.AuxInt 6165 if !(int32(c) == 1) { 6166 break 6167 } 6168 v.reset(OpCopy) 6169 v.Type = x.Type 6170 v.AddArg(x) 6171 return true 6172 } 6173 // match: (MULW (MOVDconst [c]) x) 6174 // cond: int32(c)==1 6175 // result: x 6176 for { 6177 _ = v.Args[1] 6178 v_0 := v.Args[0] 6179 if v_0.Op != OpARM64MOVDconst { 6180 break 6181 } 6182 c := v_0.AuxInt 6183 x := v.Args[1] 6184 if !(int32(c) == 1) { 6185 break 6186 } 6187 v.reset(OpCopy) 6188 v.Type = x.Type 6189 v.AddArg(x) 6190 return true 6191 } 6192 // match: (MULW x (MOVDconst [c])) 6193 // cond: isPowerOfTwo(c) 6194 // result: (SLLconst [log2(c)] x) 6195 for { 6196 _ = v.Args[1] 6197 x := v.Args[0] 6198 v_1 := v.Args[1] 6199 if v_1.Op != OpARM64MOVDconst { 6200 break 6201 } 6202 c := v_1.AuxInt 6203 if !(isPowerOfTwo(c)) { 6204 break 6205 } 6206 v.reset(OpARM64SLLconst) 6207 v.AuxInt = log2(c) 6208 v.AddArg(x) 6209 return true 6210 } 6211 // match: (MULW (MOVDconst [c]) x) 6212 // cond: isPowerOfTwo(c) 6213 // result: (SLLconst [log2(c)] x) 6214 for { 6215 _ = v.Args[1] 6216 v_0 := v.Args[0] 6217 if v_0.Op != OpARM64MOVDconst { 6218 break 6219 } 6220 c := v_0.AuxInt 6221 x := v.Args[1] 6222 if !(isPowerOfTwo(c)) { 6223 break 6224 } 6225 v.reset(OpARM64SLLconst) 6226 v.AuxInt = log2(c) 6227 v.AddArg(x) 6228 return true 6229 } 6230 // match: (MULW x (MOVDconst [c])) 6231 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 6232 // result: (ADDshiftLL x x [log2(c-1)]) 6233 for { 6234 _ = v.Args[1] 6235 x := v.Args[0] 6236 v_1 := v.Args[1] 6237 if v_1.Op != OpARM64MOVDconst { 6238 break 6239 } 6240 c := v_1.AuxInt 6241 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 6242 break 6243 } 6244 v.reset(OpARM64ADDshiftLL) 6245 v.AuxInt = log2(c - 1) 6246 v.AddArg(x) 6247 v.AddArg(x) 6248 return true 6249 } 6250 // match: (MULW (MOVDconst [c]) x) 6251 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 6252 // result: (ADDshiftLL x x [log2(c-1)]) 6253 for { 6254 _ = v.Args[1] 6255 v_0 := v.Args[0] 6256 if v_0.Op != OpARM64MOVDconst { 6257 break 6258 } 6259 c := v_0.AuxInt 6260 x := v.Args[1] 6261 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 6262 break 6263 } 6264 v.reset(OpARM64ADDshiftLL) 6265 v.AuxInt = log2(c - 1) 6266 v.AddArg(x) 6267 v.AddArg(x) 6268 return true 6269 } 6270 return false 6271 } 6272 func rewriteValueARM64_OpARM64MULW_10(v *Value) bool { 6273 b := v.Block 6274 _ = b 6275 // match: (MULW x (MOVDconst [c])) 6276 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 6277 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 6278 for { 6279 _ = v.Args[1] 6280 x := v.Args[0] 6281 v_1 := v.Args[1] 6282 if v_1.Op != OpARM64MOVDconst { 6283 break 6284 } 6285 c := v_1.AuxInt 6286 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 6287 break 6288 } 6289 v.reset(OpARM64ADDshiftLL) 6290 v.AuxInt = log2(c + 1) 6291 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 6292 v0.AddArg(x) 6293 v.AddArg(v0) 6294 v.AddArg(x) 6295 return true 6296 } 6297 // match: (MULW (MOVDconst [c]) x) 6298 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 6299 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 6300 for { 6301 _ = v.Args[1] 6302 v_0 := v.Args[0] 6303 if v_0.Op != OpARM64MOVDconst { 6304 break 6305 } 6306 c := v_0.AuxInt 6307 x := v.Args[1] 6308 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 6309 break 6310 } 6311 v.reset(OpARM64ADDshiftLL) 6312 v.AuxInt = log2(c + 1) 6313 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 6314 v0.AddArg(x) 6315 v.AddArg(v0) 6316 v.AddArg(x) 6317 return true 6318 } 6319 // match: (MULW x (MOVDconst [c])) 6320 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 6321 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 6322 for { 6323 _ = v.Args[1] 6324 x := v.Args[0] 6325 v_1 := v.Args[1] 6326 if v_1.Op != OpARM64MOVDconst { 6327 break 6328 } 6329 c := v_1.AuxInt 6330 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 6331 break 6332 } 6333 v.reset(OpARM64SLLconst) 6334 v.AuxInt = log2(c / 3) 6335 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6336 v0.AuxInt = 1 6337 v0.AddArg(x) 6338 v0.AddArg(x) 6339 v.AddArg(v0) 6340 return true 6341 } 6342 // match: (MULW (MOVDconst [c]) x) 6343 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 6344 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 6345 for { 6346 _ = v.Args[1] 6347 v_0 := v.Args[0] 6348 if v_0.Op != OpARM64MOVDconst { 6349 break 6350 } 6351 c := v_0.AuxInt 6352 x := v.Args[1] 6353 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 6354 break 6355 } 6356 v.reset(OpARM64SLLconst) 6357 v.AuxInt = log2(c / 3) 6358 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6359 v0.AuxInt = 1 6360 v0.AddArg(x) 6361 v0.AddArg(x) 6362 v.AddArg(v0) 6363 return true 6364 } 6365 // match: (MULW x (MOVDconst [c])) 6366 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 6367 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 6368 for { 6369 _ = v.Args[1] 6370 x := v.Args[0] 6371 v_1 := v.Args[1] 6372 if v_1.Op != OpARM64MOVDconst { 6373 break 6374 } 6375 c := v_1.AuxInt 6376 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 6377 break 6378 } 6379 v.reset(OpARM64SLLconst) 6380 v.AuxInt = log2(c / 5) 6381 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6382 v0.AuxInt = 2 6383 v0.AddArg(x) 6384 v0.AddArg(x) 6385 v.AddArg(v0) 6386 return true 6387 } 6388 // match: (MULW (MOVDconst [c]) x) 6389 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 6390 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 6391 for { 6392 _ = v.Args[1] 6393 v_0 := v.Args[0] 6394 if v_0.Op != OpARM64MOVDconst { 6395 break 6396 } 6397 c := v_0.AuxInt 6398 x := v.Args[1] 6399 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 6400 break 6401 } 6402 v.reset(OpARM64SLLconst) 6403 v.AuxInt = log2(c / 5) 6404 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6405 v0.AuxInt = 2 6406 v0.AddArg(x) 6407 v0.AddArg(x) 6408 v.AddArg(v0) 6409 return true 6410 } 6411 // match: (MULW x (MOVDconst [c])) 6412 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 6413 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 6414 for { 6415 _ = v.Args[1] 6416 x := v.Args[0] 6417 v_1 := v.Args[1] 6418 if v_1.Op != OpARM64MOVDconst { 6419 break 6420 } 6421 c := v_1.AuxInt 6422 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 6423 break 6424 } 6425 v.reset(OpARM64SLLconst) 6426 v.AuxInt = log2(c / 7) 6427 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6428 v0.AuxInt = 3 6429 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 6430 v1.AddArg(x) 6431 v0.AddArg(v1) 6432 v0.AddArg(x) 6433 v.AddArg(v0) 6434 return true 6435 } 6436 // match: (MULW (MOVDconst [c]) x) 6437 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 6438 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 6439 for { 6440 _ = v.Args[1] 6441 v_0 := v.Args[0] 6442 if v_0.Op != OpARM64MOVDconst { 6443 break 6444 } 6445 c := v_0.AuxInt 6446 x := v.Args[1] 6447 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 6448 break 6449 } 6450 v.reset(OpARM64SLLconst) 6451 v.AuxInt = log2(c / 7) 6452 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6453 v0.AuxInt = 3 6454 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 6455 v1.AddArg(x) 6456 v0.AddArg(v1) 6457 v0.AddArg(x) 6458 v.AddArg(v0) 6459 return true 6460 } 6461 // match: (MULW x (MOVDconst [c])) 6462 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 6463 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 6464 for { 6465 _ = v.Args[1] 6466 x := v.Args[0] 6467 v_1 := v.Args[1] 6468 if v_1.Op != OpARM64MOVDconst { 6469 break 6470 } 6471 c := v_1.AuxInt 6472 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 6473 break 6474 } 6475 v.reset(OpARM64SLLconst) 6476 v.AuxInt = log2(c / 9) 6477 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6478 v0.AuxInt = 3 6479 v0.AddArg(x) 6480 v0.AddArg(x) 6481 v.AddArg(v0) 6482 return true 6483 } 6484 // match: (MULW (MOVDconst [c]) x) 6485 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 6486 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 6487 for { 6488 _ = v.Args[1] 6489 v_0 := v.Args[0] 6490 if v_0.Op != OpARM64MOVDconst { 6491 break 6492 } 6493 c := v_0.AuxInt 6494 x := v.Args[1] 6495 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 6496 break 6497 } 6498 v.reset(OpARM64SLLconst) 6499 v.AuxInt = log2(c / 9) 6500 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6501 v0.AuxInt = 3 6502 v0.AddArg(x) 6503 v0.AddArg(x) 6504 v.AddArg(v0) 6505 return true 6506 } 6507 return false 6508 } 6509 func rewriteValueARM64_OpARM64MULW_20(v *Value) bool { 6510 // match: (MULW (MOVDconst [c]) (MOVDconst [d])) 6511 // cond: 6512 // result: (MOVDconst [int64(int32(c)*int32(d))]) 6513 for { 6514 _ = v.Args[1] 6515 v_0 := v.Args[0] 6516 if v_0.Op != OpARM64MOVDconst { 6517 break 6518 } 6519 c := v_0.AuxInt 6520 v_1 := v.Args[1] 6521 if v_1.Op != OpARM64MOVDconst { 6522 break 6523 } 6524 d := v_1.AuxInt 6525 v.reset(OpARM64MOVDconst) 6526 v.AuxInt = int64(int32(c) * int32(d)) 6527 return true 6528 } 6529 // match: (MULW (MOVDconst [d]) (MOVDconst [c])) 6530 // cond: 6531 // result: (MOVDconst [int64(int32(c)*int32(d))]) 6532 for { 6533 _ = v.Args[1] 6534 v_0 := v.Args[0] 6535 if v_0.Op != OpARM64MOVDconst { 6536 break 6537 } 6538 d := v_0.AuxInt 6539 v_1 := v.Args[1] 6540 if v_1.Op != OpARM64MOVDconst { 6541 break 6542 } 6543 c := v_1.AuxInt 6544 v.reset(OpARM64MOVDconst) 6545 v.AuxInt = int64(int32(c) * int32(d)) 6546 return true 6547 } 6548 return false 6549 } 6550 func rewriteValueARM64_OpARM64MVN_0(v *Value) bool { 6551 // match: (MVN (MOVDconst [c])) 6552 // cond: 6553 // result: (MOVDconst [^c]) 6554 for { 6555 v_0 := v.Args[0] 6556 if v_0.Op != OpARM64MOVDconst { 6557 break 6558 } 6559 c := v_0.AuxInt 6560 v.reset(OpARM64MOVDconst) 6561 v.AuxInt = ^c 6562 return true 6563 } 6564 return false 6565 } 6566 func rewriteValueARM64_OpARM64NEG_0(v *Value) bool { 6567 // match: (NEG (MOVDconst [c])) 6568 // cond: 6569 // result: (MOVDconst [-c]) 6570 for { 6571 v_0 := v.Args[0] 6572 if v_0.Op != OpARM64MOVDconst { 6573 break 6574 } 6575 c := v_0.AuxInt 6576 v.reset(OpARM64MOVDconst) 6577 v.AuxInt = -c 6578 return true 6579 } 6580 return false 6581 } 6582 func rewriteValueARM64_OpARM64NotEqual_0(v *Value) bool { 6583 // match: (NotEqual (FlagEQ)) 6584 // cond: 6585 // result: (MOVDconst [0]) 6586 for { 6587 v_0 := v.Args[0] 6588 if v_0.Op != OpARM64FlagEQ { 6589 break 6590 } 6591 v.reset(OpARM64MOVDconst) 6592 v.AuxInt = 0 6593 return true 6594 } 6595 // match: (NotEqual (FlagLT_ULT)) 6596 // cond: 6597 // result: (MOVDconst [1]) 6598 for { 6599 v_0 := v.Args[0] 6600 if v_0.Op != OpARM64FlagLT_ULT { 6601 break 6602 } 6603 v.reset(OpARM64MOVDconst) 6604 v.AuxInt = 1 6605 return true 6606 } 6607 // match: (NotEqual (FlagLT_UGT)) 6608 // cond: 6609 // result: (MOVDconst [1]) 6610 for { 6611 v_0 := v.Args[0] 6612 if v_0.Op != OpARM64FlagLT_UGT { 6613 break 6614 } 6615 v.reset(OpARM64MOVDconst) 6616 v.AuxInt = 1 6617 return true 6618 } 6619 // match: (NotEqual (FlagGT_ULT)) 6620 // cond: 6621 // result: (MOVDconst [1]) 6622 for { 6623 v_0 := v.Args[0] 6624 if v_0.Op != OpARM64FlagGT_ULT { 6625 break 6626 } 6627 v.reset(OpARM64MOVDconst) 6628 v.AuxInt = 1 6629 return true 6630 } 6631 // match: (NotEqual (FlagGT_UGT)) 6632 // cond: 6633 // result: (MOVDconst [1]) 6634 for { 6635 v_0 := v.Args[0] 6636 if v_0.Op != OpARM64FlagGT_UGT { 6637 break 6638 } 6639 v.reset(OpARM64MOVDconst) 6640 v.AuxInt = 1 6641 return true 6642 } 6643 // match: (NotEqual (InvertFlags x)) 6644 // cond: 6645 // result: (NotEqual x) 6646 for { 6647 v_0 := v.Args[0] 6648 if v_0.Op != OpARM64InvertFlags { 6649 break 6650 } 6651 x := v_0.Args[0] 6652 v.reset(OpARM64NotEqual) 6653 v.AddArg(x) 6654 return true 6655 } 6656 return false 6657 } 6658 func rewriteValueARM64_OpARM64OR_0(v *Value) bool { 6659 b := v.Block 6660 _ = b 6661 // match: (OR x (MOVDconst [c])) 6662 // cond: 6663 // result: (ORconst [c] x) 6664 for { 6665 _ = v.Args[1] 6666 x := v.Args[0] 6667 v_1 := v.Args[1] 6668 if v_1.Op != OpARM64MOVDconst { 6669 break 6670 } 6671 c := v_1.AuxInt 6672 v.reset(OpARM64ORconst) 6673 v.AuxInt = c 6674 v.AddArg(x) 6675 return true 6676 } 6677 // match: (OR (MOVDconst [c]) x) 6678 // cond: 6679 // result: (ORconst [c] x) 6680 for { 6681 _ = v.Args[1] 6682 v_0 := v.Args[0] 6683 if v_0.Op != OpARM64MOVDconst { 6684 break 6685 } 6686 c := v_0.AuxInt 6687 x := v.Args[1] 6688 v.reset(OpARM64ORconst) 6689 v.AuxInt = c 6690 v.AddArg(x) 6691 return true 6692 } 6693 // match: (OR x x) 6694 // cond: 6695 // result: x 6696 for { 6697 _ = v.Args[1] 6698 x := v.Args[0] 6699 if x != v.Args[1] { 6700 break 6701 } 6702 v.reset(OpCopy) 6703 v.Type = x.Type 6704 v.AddArg(x) 6705 return true 6706 } 6707 // match: (OR x (SLLconst [c] y)) 6708 // cond: 6709 // result: (ORshiftLL x y [c]) 6710 for { 6711 _ = v.Args[1] 6712 x := v.Args[0] 6713 v_1 := v.Args[1] 6714 if v_1.Op != OpARM64SLLconst { 6715 break 6716 } 6717 c := v_1.AuxInt 6718 y := v_1.Args[0] 6719 v.reset(OpARM64ORshiftLL) 6720 v.AuxInt = c 6721 v.AddArg(x) 6722 v.AddArg(y) 6723 return true 6724 } 6725 // match: (OR (SLLconst [c] y) x) 6726 // cond: 6727 // result: (ORshiftLL x y [c]) 6728 for { 6729 _ = v.Args[1] 6730 v_0 := v.Args[0] 6731 if v_0.Op != OpARM64SLLconst { 6732 break 6733 } 6734 c := v_0.AuxInt 6735 y := v_0.Args[0] 6736 x := v.Args[1] 6737 v.reset(OpARM64ORshiftLL) 6738 v.AuxInt = c 6739 v.AddArg(x) 6740 v.AddArg(y) 6741 return true 6742 } 6743 // match: (OR x (SRLconst [c] y)) 6744 // cond: 6745 // result: (ORshiftRL x y [c]) 6746 for { 6747 _ = v.Args[1] 6748 x := v.Args[0] 6749 v_1 := v.Args[1] 6750 if v_1.Op != OpARM64SRLconst { 6751 break 6752 } 6753 c := v_1.AuxInt 6754 y := v_1.Args[0] 6755 v.reset(OpARM64ORshiftRL) 6756 v.AuxInt = c 6757 v.AddArg(x) 6758 v.AddArg(y) 6759 return true 6760 } 6761 // match: (OR (SRLconst [c] y) x) 6762 // cond: 6763 // result: (ORshiftRL x y [c]) 6764 for { 6765 _ = v.Args[1] 6766 v_0 := v.Args[0] 6767 if v_0.Op != OpARM64SRLconst { 6768 break 6769 } 6770 c := v_0.AuxInt 6771 y := v_0.Args[0] 6772 x := v.Args[1] 6773 v.reset(OpARM64ORshiftRL) 6774 v.AuxInt = c 6775 v.AddArg(x) 6776 v.AddArg(y) 6777 return true 6778 } 6779 // match: (OR x (SRAconst [c] y)) 6780 // cond: 6781 // result: (ORshiftRA x y [c]) 6782 for { 6783 _ = v.Args[1] 6784 x := v.Args[0] 6785 v_1 := v.Args[1] 6786 if v_1.Op != OpARM64SRAconst { 6787 break 6788 } 6789 c := v_1.AuxInt 6790 y := v_1.Args[0] 6791 v.reset(OpARM64ORshiftRA) 6792 v.AuxInt = c 6793 v.AddArg(x) 6794 v.AddArg(y) 6795 return true 6796 } 6797 // match: (OR (SRAconst [c] y) x) 6798 // cond: 6799 // result: (ORshiftRA x y [c]) 6800 for { 6801 _ = v.Args[1] 6802 v_0 := v.Args[0] 6803 if v_0.Op != OpARM64SRAconst { 6804 break 6805 } 6806 c := v_0.AuxInt 6807 y := v_0.Args[0] 6808 x := v.Args[1] 6809 v.reset(OpARM64ORshiftRA) 6810 v.AuxInt = c 6811 v.AddArg(x) 6812 v.AddArg(y) 6813 return true 6814 } 6815 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i1] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i0] {s} p mem))) 6816 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 6817 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 6818 for { 6819 t := v.Type 6820 _ = v.Args[1] 6821 o0 := v.Args[0] 6822 if o0.Op != OpARM64ORshiftLL { 6823 break 6824 } 6825 if o0.AuxInt != 8 { 6826 break 6827 } 6828 _ = o0.Args[1] 6829 o1 := o0.Args[0] 6830 if o1.Op != OpARM64ORshiftLL { 6831 break 6832 } 6833 if o1.AuxInt != 16 { 6834 break 6835 } 6836 _ = o1.Args[1] 6837 s0 := o1.Args[0] 6838 if s0.Op != OpARM64SLLconst { 6839 break 6840 } 6841 if s0.AuxInt != 24 { 6842 break 6843 } 6844 y0 := s0.Args[0] 6845 if y0.Op != OpARM64MOVDnop { 6846 break 6847 } 6848 x0 := y0.Args[0] 6849 if x0.Op != OpARM64MOVBUload { 6850 break 6851 } 6852 i3 := x0.AuxInt 6853 s := x0.Aux 6854 _ = x0.Args[1] 6855 p := x0.Args[0] 6856 mem := x0.Args[1] 6857 y1 := o1.Args[1] 6858 if y1.Op != OpARM64MOVDnop { 6859 break 6860 } 6861 x1 := y1.Args[0] 6862 if x1.Op != OpARM64MOVBUload { 6863 break 6864 } 6865 i2 := x1.AuxInt 6866 if x1.Aux != s { 6867 break 6868 } 6869 _ = x1.Args[1] 6870 if p != x1.Args[0] { 6871 break 6872 } 6873 if mem != x1.Args[1] { 6874 break 6875 } 6876 y2 := o0.Args[1] 6877 if y2.Op != OpARM64MOVDnop { 6878 break 6879 } 6880 x2 := y2.Args[0] 6881 if x2.Op != OpARM64MOVBUload { 6882 break 6883 } 6884 i1 := x2.AuxInt 6885 if x2.Aux != s { 6886 break 6887 } 6888 _ = x2.Args[1] 6889 if p != x2.Args[0] { 6890 break 6891 } 6892 if mem != x2.Args[1] { 6893 break 6894 } 6895 y3 := v.Args[1] 6896 if y3.Op != OpARM64MOVDnop { 6897 break 6898 } 6899 x3 := y3.Args[0] 6900 if x3.Op != OpARM64MOVBUload { 6901 break 6902 } 6903 i0 := x3.AuxInt 6904 if x3.Aux != s { 6905 break 6906 } 6907 _ = x3.Args[1] 6908 if p != x3.Args[0] { 6909 break 6910 } 6911 if mem != x3.Args[1] { 6912 break 6913 } 6914 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 6915 break 6916 } 6917 b = mergePoint(b, x0, x1, x2, x3) 6918 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 6919 v.reset(OpCopy) 6920 v.AddArg(v0) 6921 v0.Aux = s 6922 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 6923 v1.AuxInt = i0 6924 v1.AddArg(p) 6925 v0.AddArg(v1) 6926 v0.AddArg(mem) 6927 return true 6928 } 6929 return false 6930 } 6931 func rewriteValueARM64_OpARM64OR_10(v *Value) bool { 6932 b := v.Block 6933 _ = b 6934 // match: (OR <t> y3:(MOVDnop x3:(MOVBUload [i0] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i1] {s} p mem)))) 6935 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 6936 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 6937 for { 6938 t := v.Type 6939 _ = v.Args[1] 6940 y3 := v.Args[0] 6941 if y3.Op != OpARM64MOVDnop { 6942 break 6943 } 6944 x3 := y3.Args[0] 6945 if x3.Op != OpARM64MOVBUload { 6946 break 6947 } 6948 i0 := x3.AuxInt 6949 s := x3.Aux 6950 _ = x3.Args[1] 6951 p := x3.Args[0] 6952 mem := x3.Args[1] 6953 o0 := v.Args[1] 6954 if o0.Op != OpARM64ORshiftLL { 6955 break 6956 } 6957 if o0.AuxInt != 8 { 6958 break 6959 } 6960 _ = o0.Args[1] 6961 o1 := o0.Args[0] 6962 if o1.Op != OpARM64ORshiftLL { 6963 break 6964 } 6965 if o1.AuxInt != 16 { 6966 break 6967 } 6968 _ = o1.Args[1] 6969 s0 := o1.Args[0] 6970 if s0.Op != OpARM64SLLconst { 6971 break 6972 } 6973 if s0.AuxInt != 24 { 6974 break 6975 } 6976 y0 := s0.Args[0] 6977 if y0.Op != OpARM64MOVDnop { 6978 break 6979 } 6980 x0 := y0.Args[0] 6981 if x0.Op != OpARM64MOVBUload { 6982 break 6983 } 6984 i3 := x0.AuxInt 6985 if x0.Aux != s { 6986 break 6987 } 6988 _ = x0.Args[1] 6989 if p != x0.Args[0] { 6990 break 6991 } 6992 if mem != x0.Args[1] { 6993 break 6994 } 6995 y1 := o1.Args[1] 6996 if y1.Op != OpARM64MOVDnop { 6997 break 6998 } 6999 x1 := y1.Args[0] 7000 if x1.Op != OpARM64MOVBUload { 7001 break 7002 } 7003 i2 := x1.AuxInt 7004 if x1.Aux != s { 7005 break 7006 } 7007 _ = x1.Args[1] 7008 if p != x1.Args[0] { 7009 break 7010 } 7011 if mem != x1.Args[1] { 7012 break 7013 } 7014 y2 := o0.Args[1] 7015 if y2.Op != OpARM64MOVDnop { 7016 break 7017 } 7018 x2 := y2.Args[0] 7019 if x2.Op != OpARM64MOVBUload { 7020 break 7021 } 7022 i1 := x2.AuxInt 7023 if x2.Aux != s { 7024 break 7025 } 7026 _ = x2.Args[1] 7027 if p != x2.Args[0] { 7028 break 7029 } 7030 if mem != x2.Args[1] { 7031 break 7032 } 7033 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 7034 break 7035 } 7036 b = mergePoint(b, x0, x1, x2, x3) 7037 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 7038 v.reset(OpCopy) 7039 v.AddArg(v0) 7040 v0.Aux = s 7041 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7042 v1.AuxInt = i0 7043 v1.AddArg(p) 7044 v0.AddArg(v1) 7045 v0.AddArg(mem) 7046 return true 7047 } 7048 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem))) 7049 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 7050 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 7051 for { 7052 t := v.Type 7053 _ = v.Args[1] 7054 o0 := v.Args[0] 7055 if o0.Op != OpARM64ORshiftLL { 7056 break 7057 } 7058 if o0.AuxInt != 8 { 7059 break 7060 } 7061 _ = o0.Args[1] 7062 o1 := o0.Args[0] 7063 if o1.Op != OpARM64ORshiftLL { 7064 break 7065 } 7066 if o1.AuxInt != 16 { 7067 break 7068 } 7069 _ = o1.Args[1] 7070 o2 := o1.Args[0] 7071 if o2.Op != OpARM64ORshiftLL { 7072 break 7073 } 7074 if o2.AuxInt != 24 { 7075 break 7076 } 7077 _ = o2.Args[1] 7078 o3 := o2.Args[0] 7079 if o3.Op != OpARM64ORshiftLL { 7080 break 7081 } 7082 if o3.AuxInt != 32 { 7083 break 7084 } 7085 _ = o3.Args[1] 7086 o4 := o3.Args[0] 7087 if o4.Op != OpARM64ORshiftLL { 7088 break 7089 } 7090 if o4.AuxInt != 40 { 7091 break 7092 } 7093 _ = o4.Args[1] 7094 o5 := o4.Args[0] 7095 if o5.Op != OpARM64ORshiftLL { 7096 break 7097 } 7098 if o5.AuxInt != 48 { 7099 break 7100 } 7101 _ = o5.Args[1] 7102 s0 := o5.Args[0] 7103 if s0.Op != OpARM64SLLconst { 7104 break 7105 } 7106 if s0.AuxInt != 56 { 7107 break 7108 } 7109 y0 := s0.Args[0] 7110 if y0.Op != OpARM64MOVDnop { 7111 break 7112 } 7113 x0 := y0.Args[0] 7114 if x0.Op != OpARM64MOVBUload { 7115 break 7116 } 7117 i7 := x0.AuxInt 7118 s := x0.Aux 7119 _ = x0.Args[1] 7120 p := x0.Args[0] 7121 mem := x0.Args[1] 7122 y1 := o5.Args[1] 7123 if y1.Op != OpARM64MOVDnop { 7124 break 7125 } 7126 x1 := y1.Args[0] 7127 if x1.Op != OpARM64MOVBUload { 7128 break 7129 } 7130 i6 := x1.AuxInt 7131 if x1.Aux != s { 7132 break 7133 } 7134 _ = x1.Args[1] 7135 if p != x1.Args[0] { 7136 break 7137 } 7138 if mem != x1.Args[1] { 7139 break 7140 } 7141 y2 := o4.Args[1] 7142 if y2.Op != OpARM64MOVDnop { 7143 break 7144 } 7145 x2 := y2.Args[0] 7146 if x2.Op != OpARM64MOVBUload { 7147 break 7148 } 7149 i5 := x2.AuxInt 7150 if x2.Aux != s { 7151 break 7152 } 7153 _ = x2.Args[1] 7154 if p != x2.Args[0] { 7155 break 7156 } 7157 if mem != x2.Args[1] { 7158 break 7159 } 7160 y3 := o3.Args[1] 7161 if y3.Op != OpARM64MOVDnop { 7162 break 7163 } 7164 x3 := y3.Args[0] 7165 if x3.Op != OpARM64MOVBUload { 7166 break 7167 } 7168 i4 := x3.AuxInt 7169 if x3.Aux != s { 7170 break 7171 } 7172 _ = x3.Args[1] 7173 if p != x3.Args[0] { 7174 break 7175 } 7176 if mem != x3.Args[1] { 7177 break 7178 } 7179 y4 := o2.Args[1] 7180 if y4.Op != OpARM64MOVDnop { 7181 break 7182 } 7183 x4 := y4.Args[0] 7184 if x4.Op != OpARM64MOVBUload { 7185 break 7186 } 7187 i3 := x4.AuxInt 7188 if x4.Aux != s { 7189 break 7190 } 7191 _ = x4.Args[1] 7192 if p != x4.Args[0] { 7193 break 7194 } 7195 if mem != x4.Args[1] { 7196 break 7197 } 7198 y5 := o1.Args[1] 7199 if y5.Op != OpARM64MOVDnop { 7200 break 7201 } 7202 x5 := y5.Args[0] 7203 if x5.Op != OpARM64MOVBUload { 7204 break 7205 } 7206 i2 := x5.AuxInt 7207 if x5.Aux != s { 7208 break 7209 } 7210 _ = x5.Args[1] 7211 if p != x5.Args[0] { 7212 break 7213 } 7214 if mem != x5.Args[1] { 7215 break 7216 } 7217 y6 := o0.Args[1] 7218 if y6.Op != OpARM64MOVDnop { 7219 break 7220 } 7221 x6 := y6.Args[0] 7222 if x6.Op != OpARM64MOVBUload { 7223 break 7224 } 7225 i1 := x6.AuxInt 7226 if x6.Aux != s { 7227 break 7228 } 7229 _ = x6.Args[1] 7230 if p != x6.Args[0] { 7231 break 7232 } 7233 if mem != x6.Args[1] { 7234 break 7235 } 7236 y7 := v.Args[1] 7237 if y7.Op != OpARM64MOVDnop { 7238 break 7239 } 7240 x7 := y7.Args[0] 7241 if x7.Op != OpARM64MOVBUload { 7242 break 7243 } 7244 i0 := x7.AuxInt 7245 if x7.Aux != s { 7246 break 7247 } 7248 _ = x7.Args[1] 7249 if p != x7.Args[0] { 7250 break 7251 } 7252 if mem != x7.Args[1] { 7253 break 7254 } 7255 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 7256 break 7257 } 7258 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 7259 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 7260 v.reset(OpCopy) 7261 v.AddArg(v0) 7262 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 7263 v1.Aux = s 7264 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7265 v2.AuxInt = i0 7266 v2.AddArg(p) 7267 v1.AddArg(v2) 7268 v1.AddArg(mem) 7269 v0.AddArg(v1) 7270 return true 7271 } 7272 // match: (OR <t> y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem)))) 7273 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 7274 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 7275 for { 7276 t := v.Type 7277 _ = v.Args[1] 7278 y7 := v.Args[0] 7279 if y7.Op != OpARM64MOVDnop { 7280 break 7281 } 7282 x7 := y7.Args[0] 7283 if x7.Op != OpARM64MOVBUload { 7284 break 7285 } 7286 i0 := x7.AuxInt 7287 s := x7.Aux 7288 _ = x7.Args[1] 7289 p := x7.Args[0] 7290 mem := x7.Args[1] 7291 o0 := v.Args[1] 7292 if o0.Op != OpARM64ORshiftLL { 7293 break 7294 } 7295 if o0.AuxInt != 8 { 7296 break 7297 } 7298 _ = o0.Args[1] 7299 o1 := o0.Args[0] 7300 if o1.Op != OpARM64ORshiftLL { 7301 break 7302 } 7303 if o1.AuxInt != 16 { 7304 break 7305 } 7306 _ = o1.Args[1] 7307 o2 := o1.Args[0] 7308 if o2.Op != OpARM64ORshiftLL { 7309 break 7310 } 7311 if o2.AuxInt != 24 { 7312 break 7313 } 7314 _ = o2.Args[1] 7315 o3 := o2.Args[0] 7316 if o3.Op != OpARM64ORshiftLL { 7317 break 7318 } 7319 if o3.AuxInt != 32 { 7320 break 7321 } 7322 _ = o3.Args[1] 7323 o4 := o3.Args[0] 7324 if o4.Op != OpARM64ORshiftLL { 7325 break 7326 } 7327 if o4.AuxInt != 40 { 7328 break 7329 } 7330 _ = o4.Args[1] 7331 o5 := o4.Args[0] 7332 if o5.Op != OpARM64ORshiftLL { 7333 break 7334 } 7335 if o5.AuxInt != 48 { 7336 break 7337 } 7338 _ = o5.Args[1] 7339 s0 := o5.Args[0] 7340 if s0.Op != OpARM64SLLconst { 7341 break 7342 } 7343 if s0.AuxInt != 56 { 7344 break 7345 } 7346 y0 := s0.Args[0] 7347 if y0.Op != OpARM64MOVDnop { 7348 break 7349 } 7350 x0 := y0.Args[0] 7351 if x0.Op != OpARM64MOVBUload { 7352 break 7353 } 7354 i7 := x0.AuxInt 7355 if x0.Aux != s { 7356 break 7357 } 7358 _ = x0.Args[1] 7359 if p != x0.Args[0] { 7360 break 7361 } 7362 if mem != x0.Args[1] { 7363 break 7364 } 7365 y1 := o5.Args[1] 7366 if y1.Op != OpARM64MOVDnop { 7367 break 7368 } 7369 x1 := y1.Args[0] 7370 if x1.Op != OpARM64MOVBUload { 7371 break 7372 } 7373 i6 := x1.AuxInt 7374 if x1.Aux != s { 7375 break 7376 } 7377 _ = x1.Args[1] 7378 if p != x1.Args[0] { 7379 break 7380 } 7381 if mem != x1.Args[1] { 7382 break 7383 } 7384 y2 := o4.Args[1] 7385 if y2.Op != OpARM64MOVDnop { 7386 break 7387 } 7388 x2 := y2.Args[0] 7389 if x2.Op != OpARM64MOVBUload { 7390 break 7391 } 7392 i5 := x2.AuxInt 7393 if x2.Aux != s { 7394 break 7395 } 7396 _ = x2.Args[1] 7397 if p != x2.Args[0] { 7398 break 7399 } 7400 if mem != x2.Args[1] { 7401 break 7402 } 7403 y3 := o3.Args[1] 7404 if y3.Op != OpARM64MOVDnop { 7405 break 7406 } 7407 x3 := y3.Args[0] 7408 if x3.Op != OpARM64MOVBUload { 7409 break 7410 } 7411 i4 := x3.AuxInt 7412 if x3.Aux != s { 7413 break 7414 } 7415 _ = x3.Args[1] 7416 if p != x3.Args[0] { 7417 break 7418 } 7419 if mem != x3.Args[1] { 7420 break 7421 } 7422 y4 := o2.Args[1] 7423 if y4.Op != OpARM64MOVDnop { 7424 break 7425 } 7426 x4 := y4.Args[0] 7427 if x4.Op != OpARM64MOVBUload { 7428 break 7429 } 7430 i3 := x4.AuxInt 7431 if x4.Aux != s { 7432 break 7433 } 7434 _ = x4.Args[1] 7435 if p != x4.Args[0] { 7436 break 7437 } 7438 if mem != x4.Args[1] { 7439 break 7440 } 7441 y5 := o1.Args[1] 7442 if y5.Op != OpARM64MOVDnop { 7443 break 7444 } 7445 x5 := y5.Args[0] 7446 if x5.Op != OpARM64MOVBUload { 7447 break 7448 } 7449 i2 := x5.AuxInt 7450 if x5.Aux != s { 7451 break 7452 } 7453 _ = x5.Args[1] 7454 if p != x5.Args[0] { 7455 break 7456 } 7457 if mem != x5.Args[1] { 7458 break 7459 } 7460 y6 := o0.Args[1] 7461 if y6.Op != OpARM64MOVDnop { 7462 break 7463 } 7464 x6 := y6.Args[0] 7465 if x6.Op != OpARM64MOVBUload { 7466 break 7467 } 7468 i1 := x6.AuxInt 7469 if x6.Aux != s { 7470 break 7471 } 7472 _ = x6.Args[1] 7473 if p != x6.Args[0] { 7474 break 7475 } 7476 if mem != x6.Args[1] { 7477 break 7478 } 7479 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 7480 break 7481 } 7482 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 7483 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 7484 v.reset(OpCopy) 7485 v.AddArg(v0) 7486 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 7487 v1.Aux = s 7488 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7489 v2.AuxInt = i0 7490 v2.AddArg(p) 7491 v1.AddArg(v2) 7492 v1.AddArg(mem) 7493 v0.AddArg(v1) 7494 return true 7495 } 7496 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) 7497 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 7498 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 7499 for { 7500 t := v.Type 7501 _ = v.Args[1] 7502 o0 := v.Args[0] 7503 if o0.Op != OpARM64ORshiftLL { 7504 break 7505 } 7506 if o0.AuxInt != 8 { 7507 break 7508 } 7509 _ = o0.Args[1] 7510 o1 := o0.Args[0] 7511 if o1.Op != OpARM64ORshiftLL { 7512 break 7513 } 7514 if o1.AuxInt != 16 { 7515 break 7516 } 7517 _ = o1.Args[1] 7518 s0 := o1.Args[0] 7519 if s0.Op != OpARM64SLLconst { 7520 break 7521 } 7522 if s0.AuxInt != 24 { 7523 break 7524 } 7525 y0 := s0.Args[0] 7526 if y0.Op != OpARM64MOVDnop { 7527 break 7528 } 7529 x0 := y0.Args[0] 7530 if x0.Op != OpARM64MOVBUload { 7531 break 7532 } 7533 i0 := x0.AuxInt 7534 s := x0.Aux 7535 _ = x0.Args[1] 7536 p := x0.Args[0] 7537 mem := x0.Args[1] 7538 y1 := o1.Args[1] 7539 if y1.Op != OpARM64MOVDnop { 7540 break 7541 } 7542 x1 := y1.Args[0] 7543 if x1.Op != OpARM64MOVBUload { 7544 break 7545 } 7546 i1 := x1.AuxInt 7547 if x1.Aux != s { 7548 break 7549 } 7550 _ = x1.Args[1] 7551 if p != x1.Args[0] { 7552 break 7553 } 7554 if mem != x1.Args[1] { 7555 break 7556 } 7557 y2 := o0.Args[1] 7558 if y2.Op != OpARM64MOVDnop { 7559 break 7560 } 7561 x2 := y2.Args[0] 7562 if x2.Op != OpARM64MOVBUload { 7563 break 7564 } 7565 i2 := x2.AuxInt 7566 if x2.Aux != s { 7567 break 7568 } 7569 _ = x2.Args[1] 7570 if p != x2.Args[0] { 7571 break 7572 } 7573 if mem != x2.Args[1] { 7574 break 7575 } 7576 y3 := v.Args[1] 7577 if y3.Op != OpARM64MOVDnop { 7578 break 7579 } 7580 x3 := y3.Args[0] 7581 if x3.Op != OpARM64MOVBUload { 7582 break 7583 } 7584 i3 := x3.AuxInt 7585 if x3.Aux != s { 7586 break 7587 } 7588 _ = x3.Args[1] 7589 if p != x3.Args[0] { 7590 break 7591 } 7592 if mem != x3.Args[1] { 7593 break 7594 } 7595 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 7596 break 7597 } 7598 b = mergePoint(b, x0, x1, x2, x3) 7599 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 7600 v.reset(OpCopy) 7601 v.AddArg(v0) 7602 v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 7603 v1.Aux = s 7604 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7605 v2.AuxInt = i0 7606 v2.AddArg(p) 7607 v1.AddArg(v2) 7608 v1.AddArg(mem) 7609 v0.AddArg(v1) 7610 return true 7611 } 7612 // match: (OR <t> y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem)))) 7613 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 7614 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 7615 for { 7616 t := v.Type 7617 _ = v.Args[1] 7618 y3 := v.Args[0] 7619 if y3.Op != OpARM64MOVDnop { 7620 break 7621 } 7622 x3 := y3.Args[0] 7623 if x3.Op != OpARM64MOVBUload { 7624 break 7625 } 7626 i3 := x3.AuxInt 7627 s := x3.Aux 7628 _ = x3.Args[1] 7629 p := x3.Args[0] 7630 mem := x3.Args[1] 7631 o0 := v.Args[1] 7632 if o0.Op != OpARM64ORshiftLL { 7633 break 7634 } 7635 if o0.AuxInt != 8 { 7636 break 7637 } 7638 _ = o0.Args[1] 7639 o1 := o0.Args[0] 7640 if o1.Op != OpARM64ORshiftLL { 7641 break 7642 } 7643 if o1.AuxInt != 16 { 7644 break 7645 } 7646 _ = o1.Args[1] 7647 s0 := o1.Args[0] 7648 if s0.Op != OpARM64SLLconst { 7649 break 7650 } 7651 if s0.AuxInt != 24 { 7652 break 7653 } 7654 y0 := s0.Args[0] 7655 if y0.Op != OpARM64MOVDnop { 7656 break 7657 } 7658 x0 := y0.Args[0] 7659 if x0.Op != OpARM64MOVBUload { 7660 break 7661 } 7662 i0 := x0.AuxInt 7663 if x0.Aux != s { 7664 break 7665 } 7666 _ = x0.Args[1] 7667 if p != x0.Args[0] { 7668 break 7669 } 7670 if mem != x0.Args[1] { 7671 break 7672 } 7673 y1 := o1.Args[1] 7674 if y1.Op != OpARM64MOVDnop { 7675 break 7676 } 7677 x1 := y1.Args[0] 7678 if x1.Op != OpARM64MOVBUload { 7679 break 7680 } 7681 i1 := x1.AuxInt 7682 if x1.Aux != s { 7683 break 7684 } 7685 _ = x1.Args[1] 7686 if p != x1.Args[0] { 7687 break 7688 } 7689 if mem != x1.Args[1] { 7690 break 7691 } 7692 y2 := o0.Args[1] 7693 if y2.Op != OpARM64MOVDnop { 7694 break 7695 } 7696 x2 := y2.Args[0] 7697 if x2.Op != OpARM64MOVBUload { 7698 break 7699 } 7700 i2 := x2.AuxInt 7701 if x2.Aux != s { 7702 break 7703 } 7704 _ = x2.Args[1] 7705 if p != x2.Args[0] { 7706 break 7707 } 7708 if mem != x2.Args[1] { 7709 break 7710 } 7711 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 7712 break 7713 } 7714 b = mergePoint(b, x0, x1, x2, x3) 7715 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 7716 v.reset(OpCopy) 7717 v.AddArg(v0) 7718 v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 7719 v1.Aux = s 7720 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7721 v2.AuxInt = i0 7722 v2.AddArg(p) 7723 v1.AddArg(v2) 7724 v1.AddArg(mem) 7725 v0.AddArg(v1) 7726 return true 7727 } 7728 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i6] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i7] {s} p mem))) 7729 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 7730 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 7731 for { 7732 t := v.Type 7733 _ = v.Args[1] 7734 o0 := v.Args[0] 7735 if o0.Op != OpARM64ORshiftLL { 7736 break 7737 } 7738 if o0.AuxInt != 8 { 7739 break 7740 } 7741 _ = o0.Args[1] 7742 o1 := o0.Args[0] 7743 if o1.Op != OpARM64ORshiftLL { 7744 break 7745 } 7746 if o1.AuxInt != 16 { 7747 break 7748 } 7749 _ = o1.Args[1] 7750 o2 := o1.Args[0] 7751 if o2.Op != OpARM64ORshiftLL { 7752 break 7753 } 7754 if o2.AuxInt != 24 { 7755 break 7756 } 7757 _ = o2.Args[1] 7758 o3 := o2.Args[0] 7759 if o3.Op != OpARM64ORshiftLL { 7760 break 7761 } 7762 if o3.AuxInt != 32 { 7763 break 7764 } 7765 _ = o3.Args[1] 7766 o4 := o3.Args[0] 7767 if o4.Op != OpARM64ORshiftLL { 7768 break 7769 } 7770 if o4.AuxInt != 40 { 7771 break 7772 } 7773 _ = o4.Args[1] 7774 o5 := o4.Args[0] 7775 if o5.Op != OpARM64ORshiftLL { 7776 break 7777 } 7778 if o5.AuxInt != 48 { 7779 break 7780 } 7781 _ = o5.Args[1] 7782 s0 := o5.Args[0] 7783 if s0.Op != OpARM64SLLconst { 7784 break 7785 } 7786 if s0.AuxInt != 56 { 7787 break 7788 } 7789 y0 := s0.Args[0] 7790 if y0.Op != OpARM64MOVDnop { 7791 break 7792 } 7793 x0 := y0.Args[0] 7794 if x0.Op != OpARM64MOVBUload { 7795 break 7796 } 7797 i0 := x0.AuxInt 7798 s := x0.Aux 7799 _ = x0.Args[1] 7800 p := x0.Args[0] 7801 mem := x0.Args[1] 7802 y1 := o5.Args[1] 7803 if y1.Op != OpARM64MOVDnop { 7804 break 7805 } 7806 x1 := y1.Args[0] 7807 if x1.Op != OpARM64MOVBUload { 7808 break 7809 } 7810 i1 := x1.AuxInt 7811 if x1.Aux != s { 7812 break 7813 } 7814 _ = x1.Args[1] 7815 if p != x1.Args[0] { 7816 break 7817 } 7818 if mem != x1.Args[1] { 7819 break 7820 } 7821 y2 := o4.Args[1] 7822 if y2.Op != OpARM64MOVDnop { 7823 break 7824 } 7825 x2 := y2.Args[0] 7826 if x2.Op != OpARM64MOVBUload { 7827 break 7828 } 7829 i2 := x2.AuxInt 7830 if x2.Aux != s { 7831 break 7832 } 7833 _ = x2.Args[1] 7834 if p != x2.Args[0] { 7835 break 7836 } 7837 if mem != x2.Args[1] { 7838 break 7839 } 7840 y3 := o3.Args[1] 7841 if y3.Op != OpARM64MOVDnop { 7842 break 7843 } 7844 x3 := y3.Args[0] 7845 if x3.Op != OpARM64MOVBUload { 7846 break 7847 } 7848 i3 := x3.AuxInt 7849 if x3.Aux != s { 7850 break 7851 } 7852 _ = x3.Args[1] 7853 if p != x3.Args[0] { 7854 break 7855 } 7856 if mem != x3.Args[1] { 7857 break 7858 } 7859 y4 := o2.Args[1] 7860 if y4.Op != OpARM64MOVDnop { 7861 break 7862 } 7863 x4 := y4.Args[0] 7864 if x4.Op != OpARM64MOVBUload { 7865 break 7866 } 7867 i4 := x4.AuxInt 7868 if x4.Aux != s { 7869 break 7870 } 7871 _ = x4.Args[1] 7872 if p != x4.Args[0] { 7873 break 7874 } 7875 if mem != x4.Args[1] { 7876 break 7877 } 7878 y5 := o1.Args[1] 7879 if y5.Op != OpARM64MOVDnop { 7880 break 7881 } 7882 x5 := y5.Args[0] 7883 if x5.Op != OpARM64MOVBUload { 7884 break 7885 } 7886 i5 := x5.AuxInt 7887 if x5.Aux != s { 7888 break 7889 } 7890 _ = x5.Args[1] 7891 if p != x5.Args[0] { 7892 break 7893 } 7894 if mem != x5.Args[1] { 7895 break 7896 } 7897 y6 := o0.Args[1] 7898 if y6.Op != OpARM64MOVDnop { 7899 break 7900 } 7901 x6 := y6.Args[0] 7902 if x6.Op != OpARM64MOVBUload { 7903 break 7904 } 7905 i6 := x6.AuxInt 7906 if x6.Aux != s { 7907 break 7908 } 7909 _ = x6.Args[1] 7910 if p != x6.Args[0] { 7911 break 7912 } 7913 if mem != x6.Args[1] { 7914 break 7915 } 7916 y7 := v.Args[1] 7917 if y7.Op != OpARM64MOVDnop { 7918 break 7919 } 7920 x7 := y7.Args[0] 7921 if x7.Op != OpARM64MOVBUload { 7922 break 7923 } 7924 i7 := x7.AuxInt 7925 if x7.Aux != s { 7926 break 7927 } 7928 _ = x7.Args[1] 7929 if p != x7.Args[0] { 7930 break 7931 } 7932 if mem != x7.Args[1] { 7933 break 7934 } 7935 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 7936 break 7937 } 7938 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 7939 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 7940 v.reset(OpCopy) 7941 v.AddArg(v0) 7942 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 7943 v1.Aux = s 7944 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 7945 v2.AuxInt = i0 7946 v2.AddArg(p) 7947 v1.AddArg(v2) 7948 v1.AddArg(mem) 7949 v0.AddArg(v1) 7950 return true 7951 } 7952 // match: (OR <t> y7:(MOVDnop x7:(MOVBUload [i7] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i6] {s} p mem)))) 7953 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 7954 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 7955 for { 7956 t := v.Type 7957 _ = v.Args[1] 7958 y7 := v.Args[0] 7959 if y7.Op != OpARM64MOVDnop { 7960 break 7961 } 7962 x7 := y7.Args[0] 7963 if x7.Op != OpARM64MOVBUload { 7964 break 7965 } 7966 i7 := x7.AuxInt 7967 s := x7.Aux 7968 _ = x7.Args[1] 7969 p := x7.Args[0] 7970 mem := x7.Args[1] 7971 o0 := v.Args[1] 7972 if o0.Op != OpARM64ORshiftLL { 7973 break 7974 } 7975 if o0.AuxInt != 8 { 7976 break 7977 } 7978 _ = o0.Args[1] 7979 o1 := o0.Args[0] 7980 if o1.Op != OpARM64ORshiftLL { 7981 break 7982 } 7983 if o1.AuxInt != 16 { 7984 break 7985 } 7986 _ = o1.Args[1] 7987 o2 := o1.Args[0] 7988 if o2.Op != OpARM64ORshiftLL { 7989 break 7990 } 7991 if o2.AuxInt != 24 { 7992 break 7993 } 7994 _ = o2.Args[1] 7995 o3 := o2.Args[0] 7996 if o3.Op != OpARM64ORshiftLL { 7997 break 7998 } 7999 if o3.AuxInt != 32 { 8000 break 8001 } 8002 _ = o3.Args[1] 8003 o4 := o3.Args[0] 8004 if o4.Op != OpARM64ORshiftLL { 8005 break 8006 } 8007 if o4.AuxInt != 40 { 8008 break 8009 } 8010 _ = o4.Args[1] 8011 o5 := o4.Args[0] 8012 if o5.Op != OpARM64ORshiftLL { 8013 break 8014 } 8015 if o5.AuxInt != 48 { 8016 break 8017 } 8018 _ = o5.Args[1] 8019 s0 := o5.Args[0] 8020 if s0.Op != OpARM64SLLconst { 8021 break 8022 } 8023 if s0.AuxInt != 56 { 8024 break 8025 } 8026 y0 := s0.Args[0] 8027 if y0.Op != OpARM64MOVDnop { 8028 break 8029 } 8030 x0 := y0.Args[0] 8031 if x0.Op != OpARM64MOVBUload { 8032 break 8033 } 8034 i0 := x0.AuxInt 8035 if x0.Aux != s { 8036 break 8037 } 8038 _ = x0.Args[1] 8039 if p != x0.Args[0] { 8040 break 8041 } 8042 if mem != x0.Args[1] { 8043 break 8044 } 8045 y1 := o5.Args[1] 8046 if y1.Op != OpARM64MOVDnop { 8047 break 8048 } 8049 x1 := y1.Args[0] 8050 if x1.Op != OpARM64MOVBUload { 8051 break 8052 } 8053 i1 := x1.AuxInt 8054 if x1.Aux != s { 8055 break 8056 } 8057 _ = x1.Args[1] 8058 if p != x1.Args[0] { 8059 break 8060 } 8061 if mem != x1.Args[1] { 8062 break 8063 } 8064 y2 := o4.Args[1] 8065 if y2.Op != OpARM64MOVDnop { 8066 break 8067 } 8068 x2 := y2.Args[0] 8069 if x2.Op != OpARM64MOVBUload { 8070 break 8071 } 8072 i2 := x2.AuxInt 8073 if x2.Aux != s { 8074 break 8075 } 8076 _ = x2.Args[1] 8077 if p != x2.Args[0] { 8078 break 8079 } 8080 if mem != x2.Args[1] { 8081 break 8082 } 8083 y3 := o3.Args[1] 8084 if y3.Op != OpARM64MOVDnop { 8085 break 8086 } 8087 x3 := y3.Args[0] 8088 if x3.Op != OpARM64MOVBUload { 8089 break 8090 } 8091 i3 := x3.AuxInt 8092 if x3.Aux != s { 8093 break 8094 } 8095 _ = x3.Args[1] 8096 if p != x3.Args[0] { 8097 break 8098 } 8099 if mem != x3.Args[1] { 8100 break 8101 } 8102 y4 := o2.Args[1] 8103 if y4.Op != OpARM64MOVDnop { 8104 break 8105 } 8106 x4 := y4.Args[0] 8107 if x4.Op != OpARM64MOVBUload { 8108 break 8109 } 8110 i4 := x4.AuxInt 8111 if x4.Aux != s { 8112 break 8113 } 8114 _ = x4.Args[1] 8115 if p != x4.Args[0] { 8116 break 8117 } 8118 if mem != x4.Args[1] { 8119 break 8120 } 8121 y5 := o1.Args[1] 8122 if y5.Op != OpARM64MOVDnop { 8123 break 8124 } 8125 x5 := y5.Args[0] 8126 if x5.Op != OpARM64MOVBUload { 8127 break 8128 } 8129 i5 := x5.AuxInt 8130 if x5.Aux != s { 8131 break 8132 } 8133 _ = x5.Args[1] 8134 if p != x5.Args[0] { 8135 break 8136 } 8137 if mem != x5.Args[1] { 8138 break 8139 } 8140 y6 := o0.Args[1] 8141 if y6.Op != OpARM64MOVDnop { 8142 break 8143 } 8144 x6 := y6.Args[0] 8145 if x6.Op != OpARM64MOVBUload { 8146 break 8147 } 8148 i6 := x6.AuxInt 8149 if x6.Aux != s { 8150 break 8151 } 8152 _ = x6.Args[1] 8153 if p != x6.Args[0] { 8154 break 8155 } 8156 if mem != x6.Args[1] { 8157 break 8158 } 8159 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 8160 break 8161 } 8162 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 8163 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 8164 v.reset(OpCopy) 8165 v.AddArg(v0) 8166 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 8167 v1.Aux = s 8168 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8169 v2.AuxInt = i0 8170 v2.AddArg(p) 8171 v1.AddArg(v2) 8172 v1.AddArg(mem) 8173 v0.AddArg(v1) 8174 return true 8175 } 8176 return false 8177 } 8178 func rewriteValueARM64_OpARM64ORconst_0(v *Value) bool { 8179 // match: (ORconst [0] x) 8180 // cond: 8181 // result: x 8182 for { 8183 if v.AuxInt != 0 { 8184 break 8185 } 8186 x := v.Args[0] 8187 v.reset(OpCopy) 8188 v.Type = x.Type 8189 v.AddArg(x) 8190 return true 8191 } 8192 // match: (ORconst [-1] _) 8193 // cond: 8194 // result: (MOVDconst [-1]) 8195 for { 8196 if v.AuxInt != -1 { 8197 break 8198 } 8199 v.reset(OpARM64MOVDconst) 8200 v.AuxInt = -1 8201 return true 8202 } 8203 // match: (ORconst [c] (MOVDconst [d])) 8204 // cond: 8205 // result: (MOVDconst [c|d]) 8206 for { 8207 c := v.AuxInt 8208 v_0 := v.Args[0] 8209 if v_0.Op != OpARM64MOVDconst { 8210 break 8211 } 8212 d := v_0.AuxInt 8213 v.reset(OpARM64MOVDconst) 8214 v.AuxInt = c | d 8215 return true 8216 } 8217 // match: (ORconst [c] (ORconst [d] x)) 8218 // cond: 8219 // result: (ORconst [c|d] x) 8220 for { 8221 c := v.AuxInt 8222 v_0 := v.Args[0] 8223 if v_0.Op != OpARM64ORconst { 8224 break 8225 } 8226 d := v_0.AuxInt 8227 x := v_0.Args[0] 8228 v.reset(OpARM64ORconst) 8229 v.AuxInt = c | d 8230 v.AddArg(x) 8231 return true 8232 } 8233 return false 8234 } 8235 func rewriteValueARM64_OpARM64ORshiftLL_0(v *Value) bool { 8236 b := v.Block 8237 _ = b 8238 // match: (ORshiftLL (MOVDconst [c]) x [d]) 8239 // cond: 8240 // result: (ORconst [c] (SLLconst <x.Type> x [d])) 8241 for { 8242 d := v.AuxInt 8243 _ = v.Args[1] 8244 v_0 := v.Args[0] 8245 if v_0.Op != OpARM64MOVDconst { 8246 break 8247 } 8248 c := v_0.AuxInt 8249 x := v.Args[1] 8250 v.reset(OpARM64ORconst) 8251 v.AuxInt = c 8252 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8253 v0.AuxInt = d 8254 v0.AddArg(x) 8255 v.AddArg(v0) 8256 return true 8257 } 8258 // match: (ORshiftLL x (MOVDconst [c]) [d]) 8259 // cond: 8260 // result: (ORconst x [int64(uint64(c)<<uint64(d))]) 8261 for { 8262 d := v.AuxInt 8263 _ = v.Args[1] 8264 x := v.Args[0] 8265 v_1 := v.Args[1] 8266 if v_1.Op != OpARM64MOVDconst { 8267 break 8268 } 8269 c := v_1.AuxInt 8270 v.reset(OpARM64ORconst) 8271 v.AuxInt = int64(uint64(c) << uint64(d)) 8272 v.AddArg(x) 8273 return true 8274 } 8275 // match: (ORshiftLL x y:(SLLconst x [c]) [d]) 8276 // cond: c==d 8277 // result: y 8278 for { 8279 d := v.AuxInt 8280 _ = v.Args[1] 8281 x := v.Args[0] 8282 y := v.Args[1] 8283 if y.Op != OpARM64SLLconst { 8284 break 8285 } 8286 c := y.AuxInt 8287 if x != y.Args[0] { 8288 break 8289 } 8290 if !(c == d) { 8291 break 8292 } 8293 v.reset(OpCopy) 8294 v.Type = y.Type 8295 v.AddArg(y) 8296 return true 8297 } 8298 // match: (ORshiftLL [c] (SRLconst x [64-c]) x) 8299 // cond: 8300 // result: (RORconst [64-c] x) 8301 for { 8302 c := v.AuxInt 8303 _ = v.Args[1] 8304 v_0 := v.Args[0] 8305 if v_0.Op != OpARM64SRLconst { 8306 break 8307 } 8308 if v_0.AuxInt != 64-c { 8309 break 8310 } 8311 x := v_0.Args[0] 8312 if x != v.Args[1] { 8313 break 8314 } 8315 v.reset(OpARM64RORconst) 8316 v.AuxInt = 64 - c 8317 v.AddArg(x) 8318 return true 8319 } 8320 // match: (ORshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x) 8321 // cond: c < 32 && t.Size() == 4 8322 // result: (RORWconst [32-c] x) 8323 for { 8324 t := v.Type 8325 c := v.AuxInt 8326 _ = v.Args[1] 8327 v_0 := v.Args[0] 8328 if v_0.Op != OpARM64SRLconst { 8329 break 8330 } 8331 if v_0.AuxInt != 32-c { 8332 break 8333 } 8334 v_0_0 := v_0.Args[0] 8335 if v_0_0.Op != OpARM64MOVWUreg { 8336 break 8337 } 8338 x := v_0_0.Args[0] 8339 if x != v.Args[1] { 8340 break 8341 } 8342 if !(c < 32 && t.Size() == 4) { 8343 break 8344 } 8345 v.reset(OpARM64RORWconst) 8346 v.AuxInt = 32 - c 8347 v.AddArg(x) 8348 return true 8349 } 8350 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) 8351 // cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 8352 // result: @mergePoint(b,x0,x1) (MOVHUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 8353 for { 8354 t := v.Type 8355 if v.AuxInt != 8 { 8356 break 8357 } 8358 _ = v.Args[1] 8359 y0 := v.Args[0] 8360 if y0.Op != OpARM64MOVDnop { 8361 break 8362 } 8363 x0 := y0.Args[0] 8364 if x0.Op != OpARM64MOVBUload { 8365 break 8366 } 8367 i0 := x0.AuxInt 8368 s := x0.Aux 8369 _ = x0.Args[1] 8370 p := x0.Args[0] 8371 mem := x0.Args[1] 8372 y1 := v.Args[1] 8373 if y1.Op != OpARM64MOVDnop { 8374 break 8375 } 8376 x1 := y1.Args[0] 8377 if x1.Op != OpARM64MOVBUload { 8378 break 8379 } 8380 i1 := x1.AuxInt 8381 if x1.Aux != s { 8382 break 8383 } 8384 _ = x1.Args[1] 8385 if p != x1.Args[0] { 8386 break 8387 } 8388 if mem != x1.Args[1] { 8389 break 8390 } 8391 if !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 8392 break 8393 } 8394 b = mergePoint(b, x0, x1) 8395 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, t) 8396 v.reset(OpCopy) 8397 v.AddArg(v0) 8398 v0.Aux = s 8399 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8400 v1.AuxInt = i0 8401 v1.AddArg(p) 8402 v0.AddArg(v1) 8403 v0.AddArg(mem) 8404 return true 8405 } 8406 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i3] {s} p mem))) 8407 // cond: i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0) 8408 // result: @mergePoint(b,x0,x1,x2) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 8409 for { 8410 t := v.Type 8411 if v.AuxInt != 24 { 8412 break 8413 } 8414 _ = v.Args[1] 8415 o0 := v.Args[0] 8416 if o0.Op != OpARM64ORshiftLL { 8417 break 8418 } 8419 if o0.AuxInt != 16 { 8420 break 8421 } 8422 _ = o0.Args[1] 8423 x0 := o0.Args[0] 8424 if x0.Op != OpARM64MOVHUload { 8425 break 8426 } 8427 i0 := x0.AuxInt 8428 s := x0.Aux 8429 _ = x0.Args[1] 8430 p := x0.Args[0] 8431 mem := x0.Args[1] 8432 y1 := o0.Args[1] 8433 if y1.Op != OpARM64MOVDnop { 8434 break 8435 } 8436 x1 := y1.Args[0] 8437 if x1.Op != OpARM64MOVBUload { 8438 break 8439 } 8440 i2 := x1.AuxInt 8441 if x1.Aux != s { 8442 break 8443 } 8444 _ = x1.Args[1] 8445 if p != x1.Args[0] { 8446 break 8447 } 8448 if mem != x1.Args[1] { 8449 break 8450 } 8451 y2 := v.Args[1] 8452 if y2.Op != OpARM64MOVDnop { 8453 break 8454 } 8455 x2 := y2.Args[0] 8456 if x2.Op != OpARM64MOVBUload { 8457 break 8458 } 8459 i3 := x2.AuxInt 8460 if x2.Aux != s { 8461 break 8462 } 8463 _ = x2.Args[1] 8464 if p != x2.Args[0] { 8465 break 8466 } 8467 if mem != x2.Args[1] { 8468 break 8469 } 8470 if !(i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) { 8471 break 8472 } 8473 b = mergePoint(b, x0, x1, x2) 8474 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 8475 v.reset(OpCopy) 8476 v.AddArg(v0) 8477 v0.Aux = s 8478 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8479 v1.AuxInt = i0 8480 v1.AddArg(p) 8481 v0.AddArg(v1) 8482 v0.AddArg(mem) 8483 return true 8484 } 8485 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i4] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i7] {s} p mem))) 8486 // cond: i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 8487 // result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem) 8488 for { 8489 t := v.Type 8490 if v.AuxInt != 56 { 8491 break 8492 } 8493 _ = v.Args[1] 8494 o0 := v.Args[0] 8495 if o0.Op != OpARM64ORshiftLL { 8496 break 8497 } 8498 if o0.AuxInt != 48 { 8499 break 8500 } 8501 _ = o0.Args[1] 8502 o1 := o0.Args[0] 8503 if o1.Op != OpARM64ORshiftLL { 8504 break 8505 } 8506 if o1.AuxInt != 40 { 8507 break 8508 } 8509 _ = o1.Args[1] 8510 o2 := o1.Args[0] 8511 if o2.Op != OpARM64ORshiftLL { 8512 break 8513 } 8514 if o2.AuxInt != 32 { 8515 break 8516 } 8517 _ = o2.Args[1] 8518 x0 := o2.Args[0] 8519 if x0.Op != OpARM64MOVWUload { 8520 break 8521 } 8522 i0 := x0.AuxInt 8523 s := x0.Aux 8524 _ = x0.Args[1] 8525 p := x0.Args[0] 8526 mem := x0.Args[1] 8527 y1 := o2.Args[1] 8528 if y1.Op != OpARM64MOVDnop { 8529 break 8530 } 8531 x1 := y1.Args[0] 8532 if x1.Op != OpARM64MOVBUload { 8533 break 8534 } 8535 i4 := x1.AuxInt 8536 if x1.Aux != s { 8537 break 8538 } 8539 _ = x1.Args[1] 8540 if p != x1.Args[0] { 8541 break 8542 } 8543 if mem != x1.Args[1] { 8544 break 8545 } 8546 y2 := o1.Args[1] 8547 if y2.Op != OpARM64MOVDnop { 8548 break 8549 } 8550 x2 := y2.Args[0] 8551 if x2.Op != OpARM64MOVBUload { 8552 break 8553 } 8554 i5 := x2.AuxInt 8555 if x2.Aux != s { 8556 break 8557 } 8558 _ = x2.Args[1] 8559 if p != x2.Args[0] { 8560 break 8561 } 8562 if mem != x2.Args[1] { 8563 break 8564 } 8565 y3 := o0.Args[1] 8566 if y3.Op != OpARM64MOVDnop { 8567 break 8568 } 8569 x3 := y3.Args[0] 8570 if x3.Op != OpARM64MOVBUload { 8571 break 8572 } 8573 i6 := x3.AuxInt 8574 if x3.Aux != s { 8575 break 8576 } 8577 _ = x3.Args[1] 8578 if p != x3.Args[0] { 8579 break 8580 } 8581 if mem != x3.Args[1] { 8582 break 8583 } 8584 y4 := v.Args[1] 8585 if y4.Op != OpARM64MOVDnop { 8586 break 8587 } 8588 x4 := y4.Args[0] 8589 if x4.Op != OpARM64MOVBUload { 8590 break 8591 } 8592 i7 := x4.AuxInt 8593 if x4.Aux != s { 8594 break 8595 } 8596 _ = x4.Args[1] 8597 if p != x4.Args[0] { 8598 break 8599 } 8600 if mem != x4.Args[1] { 8601 break 8602 } 8603 if !(i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 8604 break 8605 } 8606 b = mergePoint(b, x0, x1, x2, x3, x4) 8607 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 8608 v.reset(OpCopy) 8609 v.AddArg(v0) 8610 v0.Aux = s 8611 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8612 v1.AuxInt = i0 8613 v1.AddArg(p) 8614 v0.AddArg(v1) 8615 v0.AddArg(mem) 8616 return true 8617 } 8618 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i1] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i0] {s} p mem))) 8619 // cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 8620 // result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUload <t> [i0] {s} p mem)) 8621 for { 8622 t := v.Type 8623 if v.AuxInt != 8 { 8624 break 8625 } 8626 _ = v.Args[1] 8627 y0 := v.Args[0] 8628 if y0.Op != OpARM64MOVDnop { 8629 break 8630 } 8631 x0 := y0.Args[0] 8632 if x0.Op != OpARM64MOVBUload { 8633 break 8634 } 8635 i1 := x0.AuxInt 8636 s := x0.Aux 8637 _ = x0.Args[1] 8638 p := x0.Args[0] 8639 mem := x0.Args[1] 8640 y1 := v.Args[1] 8641 if y1.Op != OpARM64MOVDnop { 8642 break 8643 } 8644 x1 := y1.Args[0] 8645 if x1.Op != OpARM64MOVBUload { 8646 break 8647 } 8648 i0 := x1.AuxInt 8649 if x1.Aux != s { 8650 break 8651 } 8652 _ = x1.Args[1] 8653 if p != x1.Args[0] { 8654 break 8655 } 8656 if mem != x1.Args[1] { 8657 break 8658 } 8659 if !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 8660 break 8661 } 8662 b = mergePoint(b, x0, x1) 8663 v0 := b.NewValue0(v.Pos, OpARM64REV16W, t) 8664 v.reset(OpCopy) 8665 v.AddArg(v0) 8666 v1 := b.NewValue0(v.Pos, OpARM64MOVHUload, t) 8667 v1.AuxInt = i0 8668 v1.Aux = s 8669 v1.AddArg(p) 8670 v1.AddArg(mem) 8671 v0.AddArg(v1) 8672 return true 8673 } 8674 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUload [i2] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i0] {s} p mem))) 8675 // cond: i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0) 8676 // result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 8677 for { 8678 t := v.Type 8679 if v.AuxInt != 24 { 8680 break 8681 } 8682 _ = v.Args[1] 8683 o0 := v.Args[0] 8684 if o0.Op != OpARM64ORshiftLL { 8685 break 8686 } 8687 if o0.AuxInt != 16 { 8688 break 8689 } 8690 _ = o0.Args[1] 8691 y0 := o0.Args[0] 8692 if y0.Op != OpARM64REV16W { 8693 break 8694 } 8695 x0 := y0.Args[0] 8696 if x0.Op != OpARM64MOVHUload { 8697 break 8698 } 8699 i2 := x0.AuxInt 8700 s := x0.Aux 8701 _ = x0.Args[1] 8702 p := x0.Args[0] 8703 mem := x0.Args[1] 8704 y1 := o0.Args[1] 8705 if y1.Op != OpARM64MOVDnop { 8706 break 8707 } 8708 x1 := y1.Args[0] 8709 if x1.Op != OpARM64MOVBUload { 8710 break 8711 } 8712 i1 := x1.AuxInt 8713 if x1.Aux != s { 8714 break 8715 } 8716 _ = x1.Args[1] 8717 if p != x1.Args[0] { 8718 break 8719 } 8720 if mem != x1.Args[1] { 8721 break 8722 } 8723 y2 := v.Args[1] 8724 if y2.Op != OpARM64MOVDnop { 8725 break 8726 } 8727 x2 := y2.Args[0] 8728 if x2.Op != OpARM64MOVBUload { 8729 break 8730 } 8731 i0 := x2.AuxInt 8732 if x2.Aux != s { 8733 break 8734 } 8735 _ = x2.Args[1] 8736 if p != x2.Args[0] { 8737 break 8738 } 8739 if mem != x2.Args[1] { 8740 break 8741 } 8742 if !(i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) { 8743 break 8744 } 8745 b = mergePoint(b, x0, x1, x2) 8746 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 8747 v.reset(OpCopy) 8748 v.AddArg(v0) 8749 v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 8750 v1.Aux = s 8751 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8752 v2.AuxInt = i0 8753 v2.AddArg(p) 8754 v1.AddArg(v2) 8755 v1.AddArg(mem) 8756 v0.AddArg(v1) 8757 return true 8758 } 8759 return false 8760 } 8761 func rewriteValueARM64_OpARM64ORshiftLL_10(v *Value) bool { 8762 b := v.Block 8763 _ = b 8764 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUload [i4] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i3] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i1] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i0] {s} p mem))) 8765 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 8766 // result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 8767 for { 8768 t := v.Type 8769 if v.AuxInt != 56 { 8770 break 8771 } 8772 _ = v.Args[1] 8773 o0 := v.Args[0] 8774 if o0.Op != OpARM64ORshiftLL { 8775 break 8776 } 8777 if o0.AuxInt != 48 { 8778 break 8779 } 8780 _ = o0.Args[1] 8781 o1 := o0.Args[0] 8782 if o1.Op != OpARM64ORshiftLL { 8783 break 8784 } 8785 if o1.AuxInt != 40 { 8786 break 8787 } 8788 _ = o1.Args[1] 8789 o2 := o1.Args[0] 8790 if o2.Op != OpARM64ORshiftLL { 8791 break 8792 } 8793 if o2.AuxInt != 32 { 8794 break 8795 } 8796 _ = o2.Args[1] 8797 y0 := o2.Args[0] 8798 if y0.Op != OpARM64REVW { 8799 break 8800 } 8801 x0 := y0.Args[0] 8802 if x0.Op != OpARM64MOVWUload { 8803 break 8804 } 8805 i4 := x0.AuxInt 8806 s := x0.Aux 8807 _ = x0.Args[1] 8808 p := x0.Args[0] 8809 mem := x0.Args[1] 8810 y1 := o2.Args[1] 8811 if y1.Op != OpARM64MOVDnop { 8812 break 8813 } 8814 x1 := y1.Args[0] 8815 if x1.Op != OpARM64MOVBUload { 8816 break 8817 } 8818 i3 := x1.AuxInt 8819 if x1.Aux != s { 8820 break 8821 } 8822 _ = x1.Args[1] 8823 if p != x1.Args[0] { 8824 break 8825 } 8826 if mem != x1.Args[1] { 8827 break 8828 } 8829 y2 := o1.Args[1] 8830 if y2.Op != OpARM64MOVDnop { 8831 break 8832 } 8833 x2 := y2.Args[0] 8834 if x2.Op != OpARM64MOVBUload { 8835 break 8836 } 8837 i2 := x2.AuxInt 8838 if x2.Aux != s { 8839 break 8840 } 8841 _ = x2.Args[1] 8842 if p != x2.Args[0] { 8843 break 8844 } 8845 if mem != x2.Args[1] { 8846 break 8847 } 8848 y3 := o0.Args[1] 8849 if y3.Op != OpARM64MOVDnop { 8850 break 8851 } 8852 x3 := y3.Args[0] 8853 if x3.Op != OpARM64MOVBUload { 8854 break 8855 } 8856 i1 := x3.AuxInt 8857 if x3.Aux != s { 8858 break 8859 } 8860 _ = x3.Args[1] 8861 if p != x3.Args[0] { 8862 break 8863 } 8864 if mem != x3.Args[1] { 8865 break 8866 } 8867 y4 := v.Args[1] 8868 if y4.Op != OpARM64MOVDnop { 8869 break 8870 } 8871 x4 := y4.Args[0] 8872 if x4.Op != OpARM64MOVBUload { 8873 break 8874 } 8875 i0 := x4.AuxInt 8876 if x4.Aux != s { 8877 break 8878 } 8879 _ = x4.Args[1] 8880 if p != x4.Args[0] { 8881 break 8882 } 8883 if mem != x4.Args[1] { 8884 break 8885 } 8886 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 8887 break 8888 } 8889 b = mergePoint(b, x0, x1, x2, x3, x4) 8890 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 8891 v.reset(OpCopy) 8892 v.AddArg(v0) 8893 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 8894 v1.Aux = s 8895 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 8896 v2.AuxInt = i0 8897 v2.AddArg(p) 8898 v1.AddArg(v2) 8899 v1.AddArg(mem) 8900 v0.AddArg(v1) 8901 return true 8902 } 8903 return false 8904 } 8905 func rewriteValueARM64_OpARM64ORshiftRA_0(v *Value) bool { 8906 b := v.Block 8907 _ = b 8908 // match: (ORshiftRA (MOVDconst [c]) x [d]) 8909 // cond: 8910 // result: (ORconst [c] (SRAconst <x.Type> x [d])) 8911 for { 8912 d := v.AuxInt 8913 _ = v.Args[1] 8914 v_0 := v.Args[0] 8915 if v_0.Op != OpARM64MOVDconst { 8916 break 8917 } 8918 c := v_0.AuxInt 8919 x := v.Args[1] 8920 v.reset(OpARM64ORconst) 8921 v.AuxInt = c 8922 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 8923 v0.AuxInt = d 8924 v0.AddArg(x) 8925 v.AddArg(v0) 8926 return true 8927 } 8928 // match: (ORshiftRA x (MOVDconst [c]) [d]) 8929 // cond: 8930 // result: (ORconst x [int64(int64(c)>>uint64(d))]) 8931 for { 8932 d := v.AuxInt 8933 _ = v.Args[1] 8934 x := v.Args[0] 8935 v_1 := v.Args[1] 8936 if v_1.Op != OpARM64MOVDconst { 8937 break 8938 } 8939 c := v_1.AuxInt 8940 v.reset(OpARM64ORconst) 8941 v.AuxInt = int64(int64(c) >> uint64(d)) 8942 v.AddArg(x) 8943 return true 8944 } 8945 // match: (ORshiftRA x y:(SRAconst x [c]) [d]) 8946 // cond: c==d 8947 // result: y 8948 for { 8949 d := v.AuxInt 8950 _ = v.Args[1] 8951 x := v.Args[0] 8952 y := v.Args[1] 8953 if y.Op != OpARM64SRAconst { 8954 break 8955 } 8956 c := y.AuxInt 8957 if x != y.Args[0] { 8958 break 8959 } 8960 if !(c == d) { 8961 break 8962 } 8963 v.reset(OpCopy) 8964 v.Type = y.Type 8965 v.AddArg(y) 8966 return true 8967 } 8968 return false 8969 } 8970 func rewriteValueARM64_OpARM64ORshiftRL_0(v *Value) bool { 8971 b := v.Block 8972 _ = b 8973 // match: (ORshiftRL (MOVDconst [c]) x [d]) 8974 // cond: 8975 // result: (ORconst [c] (SRLconst <x.Type> x [d])) 8976 for { 8977 d := v.AuxInt 8978 _ = v.Args[1] 8979 v_0 := v.Args[0] 8980 if v_0.Op != OpARM64MOVDconst { 8981 break 8982 } 8983 c := v_0.AuxInt 8984 x := v.Args[1] 8985 v.reset(OpARM64ORconst) 8986 v.AuxInt = c 8987 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 8988 v0.AuxInt = d 8989 v0.AddArg(x) 8990 v.AddArg(v0) 8991 return true 8992 } 8993 // match: (ORshiftRL x (MOVDconst [c]) [d]) 8994 // cond: 8995 // result: (ORconst x [int64(uint64(c)>>uint64(d))]) 8996 for { 8997 d := v.AuxInt 8998 _ = v.Args[1] 8999 x := v.Args[0] 9000 v_1 := v.Args[1] 9001 if v_1.Op != OpARM64MOVDconst { 9002 break 9003 } 9004 c := v_1.AuxInt 9005 v.reset(OpARM64ORconst) 9006 v.AuxInt = int64(uint64(c) >> uint64(d)) 9007 v.AddArg(x) 9008 return true 9009 } 9010 // match: (ORshiftRL x y:(SRLconst x [c]) [d]) 9011 // cond: c==d 9012 // result: y 9013 for { 9014 d := v.AuxInt 9015 _ = v.Args[1] 9016 x := v.Args[0] 9017 y := v.Args[1] 9018 if y.Op != OpARM64SRLconst { 9019 break 9020 } 9021 c := y.AuxInt 9022 if x != y.Args[0] { 9023 break 9024 } 9025 if !(c == d) { 9026 break 9027 } 9028 v.reset(OpCopy) 9029 v.Type = y.Type 9030 v.AddArg(y) 9031 return true 9032 } 9033 // match: (ORshiftRL [c] (SLLconst x [64-c]) x) 9034 // cond: 9035 // result: (RORconst [ c] x) 9036 for { 9037 c := v.AuxInt 9038 _ = v.Args[1] 9039 v_0 := v.Args[0] 9040 if v_0.Op != OpARM64SLLconst { 9041 break 9042 } 9043 if v_0.AuxInt != 64-c { 9044 break 9045 } 9046 x := v_0.Args[0] 9047 if x != v.Args[1] { 9048 break 9049 } 9050 v.reset(OpARM64RORconst) 9051 v.AuxInt = c 9052 v.AddArg(x) 9053 return true 9054 } 9055 // match: (ORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 9056 // cond: c < 32 && t.Size() == 4 9057 // result: (RORWconst [ c] x) 9058 for { 9059 t := v.Type 9060 c := v.AuxInt 9061 _ = v.Args[1] 9062 v_0 := v.Args[0] 9063 if v_0.Op != OpARM64SLLconst { 9064 break 9065 } 9066 if v_0.AuxInt != 32-c { 9067 break 9068 } 9069 x := v_0.Args[0] 9070 v_1 := v.Args[1] 9071 if v_1.Op != OpARM64MOVWUreg { 9072 break 9073 } 9074 if x != v_1.Args[0] { 9075 break 9076 } 9077 if !(c < 32 && t.Size() == 4) { 9078 break 9079 } 9080 v.reset(OpARM64RORWconst) 9081 v.AuxInt = c 9082 v.AddArg(x) 9083 return true 9084 } 9085 return false 9086 } 9087 func rewriteValueARM64_OpARM64SLL_0(v *Value) bool { 9088 // match: (SLL x (MOVDconst [c])) 9089 // cond: 9090 // result: (SLLconst x [c&63]) 9091 for { 9092 _ = v.Args[1] 9093 x := v.Args[0] 9094 v_1 := v.Args[1] 9095 if v_1.Op != OpARM64MOVDconst { 9096 break 9097 } 9098 c := v_1.AuxInt 9099 v.reset(OpARM64SLLconst) 9100 v.AuxInt = c & 63 9101 v.AddArg(x) 9102 return true 9103 } 9104 return false 9105 } 9106 func rewriteValueARM64_OpARM64SLLconst_0(v *Value) bool { 9107 // match: (SLLconst [c] (MOVDconst [d])) 9108 // cond: 9109 // result: (MOVDconst [int64(d)<<uint64(c)]) 9110 for { 9111 c := v.AuxInt 9112 v_0 := v.Args[0] 9113 if v_0.Op != OpARM64MOVDconst { 9114 break 9115 } 9116 d := v_0.AuxInt 9117 v.reset(OpARM64MOVDconst) 9118 v.AuxInt = int64(d) << uint64(c) 9119 return true 9120 } 9121 // match: (SLLconst [c] (SRLconst [c] x)) 9122 // cond: 0 < c && c < 64 9123 // result: (ANDconst [^(1<<uint(c)-1)] x) 9124 for { 9125 c := v.AuxInt 9126 v_0 := v.Args[0] 9127 if v_0.Op != OpARM64SRLconst { 9128 break 9129 } 9130 if v_0.AuxInt != c { 9131 break 9132 } 9133 x := v_0.Args[0] 9134 if !(0 < c && c < 64) { 9135 break 9136 } 9137 v.reset(OpARM64ANDconst) 9138 v.AuxInt = ^(1<<uint(c) - 1) 9139 v.AddArg(x) 9140 return true 9141 } 9142 return false 9143 } 9144 func rewriteValueARM64_OpARM64SRA_0(v *Value) bool { 9145 // match: (SRA x (MOVDconst [c])) 9146 // cond: 9147 // result: (SRAconst x [c&63]) 9148 for { 9149 _ = v.Args[1] 9150 x := v.Args[0] 9151 v_1 := v.Args[1] 9152 if v_1.Op != OpARM64MOVDconst { 9153 break 9154 } 9155 c := v_1.AuxInt 9156 v.reset(OpARM64SRAconst) 9157 v.AuxInt = c & 63 9158 v.AddArg(x) 9159 return true 9160 } 9161 return false 9162 } 9163 func rewriteValueARM64_OpARM64SRAconst_0(v *Value) bool { 9164 // match: (SRAconst [c] (MOVDconst [d])) 9165 // cond: 9166 // result: (MOVDconst [int64(d)>>uint64(c)]) 9167 for { 9168 c := v.AuxInt 9169 v_0 := v.Args[0] 9170 if v_0.Op != OpARM64MOVDconst { 9171 break 9172 } 9173 d := v_0.AuxInt 9174 v.reset(OpARM64MOVDconst) 9175 v.AuxInt = int64(d) >> uint64(c) 9176 return true 9177 } 9178 return false 9179 } 9180 func rewriteValueARM64_OpARM64SRL_0(v *Value) bool { 9181 // match: (SRL x (MOVDconst [c])) 9182 // cond: 9183 // result: (SRLconst x [c&63]) 9184 for { 9185 _ = v.Args[1] 9186 x := v.Args[0] 9187 v_1 := v.Args[1] 9188 if v_1.Op != OpARM64MOVDconst { 9189 break 9190 } 9191 c := v_1.AuxInt 9192 v.reset(OpARM64SRLconst) 9193 v.AuxInt = c & 63 9194 v.AddArg(x) 9195 return true 9196 } 9197 return false 9198 } 9199 func rewriteValueARM64_OpARM64SRLconst_0(v *Value) bool { 9200 // match: (SRLconst [c] (MOVDconst [d])) 9201 // cond: 9202 // result: (MOVDconst [int64(uint64(d)>>uint64(c))]) 9203 for { 9204 c := v.AuxInt 9205 v_0 := v.Args[0] 9206 if v_0.Op != OpARM64MOVDconst { 9207 break 9208 } 9209 d := v_0.AuxInt 9210 v.reset(OpARM64MOVDconst) 9211 v.AuxInt = int64(uint64(d) >> uint64(c)) 9212 return true 9213 } 9214 // match: (SRLconst [c] (SLLconst [c] x)) 9215 // cond: 0 < c && c < 64 9216 // result: (ANDconst [1<<uint(64-c)-1] x) 9217 for { 9218 c := v.AuxInt 9219 v_0 := v.Args[0] 9220 if v_0.Op != OpARM64SLLconst { 9221 break 9222 } 9223 if v_0.AuxInt != c { 9224 break 9225 } 9226 x := v_0.Args[0] 9227 if !(0 < c && c < 64) { 9228 break 9229 } 9230 v.reset(OpARM64ANDconst) 9231 v.AuxInt = 1<<uint(64-c) - 1 9232 v.AddArg(x) 9233 return true 9234 } 9235 return false 9236 } 9237 func rewriteValueARM64_OpARM64STP_0(v *Value) bool { 9238 b := v.Block 9239 _ = b 9240 config := b.Func.Config 9241 _ = config 9242 // match: (STP [off1] {sym} (ADDconst [off2] ptr) val1 val2 mem) 9243 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 9244 // result: (STP [off1+off2] {sym} ptr val1 val2 mem) 9245 for { 9246 off1 := v.AuxInt 9247 sym := v.Aux 9248 _ = v.Args[3] 9249 v_0 := v.Args[0] 9250 if v_0.Op != OpARM64ADDconst { 9251 break 9252 } 9253 off2 := v_0.AuxInt 9254 ptr := v_0.Args[0] 9255 val1 := v.Args[1] 9256 val2 := v.Args[2] 9257 mem := v.Args[3] 9258 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 9259 break 9260 } 9261 v.reset(OpARM64STP) 9262 v.AuxInt = off1 + off2 9263 v.Aux = sym 9264 v.AddArg(ptr) 9265 v.AddArg(val1) 9266 v.AddArg(val2) 9267 v.AddArg(mem) 9268 return true 9269 } 9270 // match: (STP [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val1 val2 mem) 9271 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 9272 // result: (STP [off1+off2] {mergeSym(sym1,sym2)} ptr val1 val2 mem) 9273 for { 9274 off1 := v.AuxInt 9275 sym1 := v.Aux 9276 _ = v.Args[3] 9277 v_0 := v.Args[0] 9278 if v_0.Op != OpARM64MOVDaddr { 9279 break 9280 } 9281 off2 := v_0.AuxInt 9282 sym2 := v_0.Aux 9283 ptr := v_0.Args[0] 9284 val1 := v.Args[1] 9285 val2 := v.Args[2] 9286 mem := v.Args[3] 9287 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 9288 break 9289 } 9290 v.reset(OpARM64STP) 9291 v.AuxInt = off1 + off2 9292 v.Aux = mergeSym(sym1, sym2) 9293 v.AddArg(ptr) 9294 v.AddArg(val1) 9295 v.AddArg(val2) 9296 v.AddArg(mem) 9297 return true 9298 } 9299 // match: (STP [off] {sym} ptr (MOVDconst [0]) (MOVDconst [0]) mem) 9300 // cond: 9301 // result: (MOVQstorezero [off] {sym} ptr mem) 9302 for { 9303 off := v.AuxInt 9304 sym := v.Aux 9305 _ = v.Args[3] 9306 ptr := v.Args[0] 9307 v_1 := v.Args[1] 9308 if v_1.Op != OpARM64MOVDconst { 9309 break 9310 } 9311 if v_1.AuxInt != 0 { 9312 break 9313 } 9314 v_2 := v.Args[2] 9315 if v_2.Op != OpARM64MOVDconst { 9316 break 9317 } 9318 if v_2.AuxInt != 0 { 9319 break 9320 } 9321 mem := v.Args[3] 9322 v.reset(OpARM64MOVQstorezero) 9323 v.AuxInt = off 9324 v.Aux = sym 9325 v.AddArg(ptr) 9326 v.AddArg(mem) 9327 return true 9328 } 9329 return false 9330 } 9331 func rewriteValueARM64_OpARM64SUB_0(v *Value) bool { 9332 b := v.Block 9333 _ = b 9334 // match: (SUB x (MOVDconst [c])) 9335 // cond: 9336 // result: (SUBconst [c] x) 9337 for { 9338 _ = v.Args[1] 9339 x := v.Args[0] 9340 v_1 := v.Args[1] 9341 if v_1.Op != OpARM64MOVDconst { 9342 break 9343 } 9344 c := v_1.AuxInt 9345 v.reset(OpARM64SUBconst) 9346 v.AuxInt = c 9347 v.AddArg(x) 9348 return true 9349 } 9350 // match: (SUB x x) 9351 // cond: 9352 // result: (MOVDconst [0]) 9353 for { 9354 _ = v.Args[1] 9355 x := v.Args[0] 9356 if x != v.Args[1] { 9357 break 9358 } 9359 v.reset(OpARM64MOVDconst) 9360 v.AuxInt = 0 9361 return true 9362 } 9363 // match: (SUB x (SUB y z)) 9364 // cond: 9365 // result: (SUB (ADD <v.Type> x z) y) 9366 for { 9367 _ = v.Args[1] 9368 x := v.Args[0] 9369 v_1 := v.Args[1] 9370 if v_1.Op != OpARM64SUB { 9371 break 9372 } 9373 _ = v_1.Args[1] 9374 y := v_1.Args[0] 9375 z := v_1.Args[1] 9376 v.reset(OpARM64SUB) 9377 v0 := b.NewValue0(v.Pos, OpARM64ADD, v.Type) 9378 v0.AddArg(x) 9379 v0.AddArg(z) 9380 v.AddArg(v0) 9381 v.AddArg(y) 9382 return true 9383 } 9384 // match: (SUB (SUB x y) z) 9385 // cond: 9386 // result: (SUB x (ADD <y.Type> y z)) 9387 for { 9388 _ = v.Args[1] 9389 v_0 := v.Args[0] 9390 if v_0.Op != OpARM64SUB { 9391 break 9392 } 9393 _ = v_0.Args[1] 9394 x := v_0.Args[0] 9395 y := v_0.Args[1] 9396 z := v.Args[1] 9397 v.reset(OpARM64SUB) 9398 v.AddArg(x) 9399 v0 := b.NewValue0(v.Pos, OpARM64ADD, y.Type) 9400 v0.AddArg(y) 9401 v0.AddArg(z) 9402 v.AddArg(v0) 9403 return true 9404 } 9405 // match: (SUB x (SLLconst [c] y)) 9406 // cond: 9407 // result: (SUBshiftLL x y [c]) 9408 for { 9409 _ = v.Args[1] 9410 x := v.Args[0] 9411 v_1 := v.Args[1] 9412 if v_1.Op != OpARM64SLLconst { 9413 break 9414 } 9415 c := v_1.AuxInt 9416 y := v_1.Args[0] 9417 v.reset(OpARM64SUBshiftLL) 9418 v.AuxInt = c 9419 v.AddArg(x) 9420 v.AddArg(y) 9421 return true 9422 } 9423 // match: (SUB x (SRLconst [c] y)) 9424 // cond: 9425 // result: (SUBshiftRL x y [c]) 9426 for { 9427 _ = v.Args[1] 9428 x := v.Args[0] 9429 v_1 := v.Args[1] 9430 if v_1.Op != OpARM64SRLconst { 9431 break 9432 } 9433 c := v_1.AuxInt 9434 y := v_1.Args[0] 9435 v.reset(OpARM64SUBshiftRL) 9436 v.AuxInt = c 9437 v.AddArg(x) 9438 v.AddArg(y) 9439 return true 9440 } 9441 // match: (SUB x (SRAconst [c] y)) 9442 // cond: 9443 // result: (SUBshiftRA x y [c]) 9444 for { 9445 _ = v.Args[1] 9446 x := v.Args[0] 9447 v_1 := v.Args[1] 9448 if v_1.Op != OpARM64SRAconst { 9449 break 9450 } 9451 c := v_1.AuxInt 9452 y := v_1.Args[0] 9453 v.reset(OpARM64SUBshiftRA) 9454 v.AuxInt = c 9455 v.AddArg(x) 9456 v.AddArg(y) 9457 return true 9458 } 9459 return false 9460 } 9461 func rewriteValueARM64_OpARM64SUBconst_0(v *Value) bool { 9462 // match: (SUBconst [0] x) 9463 // cond: 9464 // result: x 9465 for { 9466 if v.AuxInt != 0 { 9467 break 9468 } 9469 x := v.Args[0] 9470 v.reset(OpCopy) 9471 v.Type = x.Type 9472 v.AddArg(x) 9473 return true 9474 } 9475 // match: (SUBconst [c] (MOVDconst [d])) 9476 // cond: 9477 // result: (MOVDconst [d-c]) 9478 for { 9479 c := v.AuxInt 9480 v_0 := v.Args[0] 9481 if v_0.Op != OpARM64MOVDconst { 9482 break 9483 } 9484 d := v_0.AuxInt 9485 v.reset(OpARM64MOVDconst) 9486 v.AuxInt = d - c 9487 return true 9488 } 9489 // match: (SUBconst [c] (SUBconst [d] x)) 9490 // cond: 9491 // result: (ADDconst [-c-d] x) 9492 for { 9493 c := v.AuxInt 9494 v_0 := v.Args[0] 9495 if v_0.Op != OpARM64SUBconst { 9496 break 9497 } 9498 d := v_0.AuxInt 9499 x := v_0.Args[0] 9500 v.reset(OpARM64ADDconst) 9501 v.AuxInt = -c - d 9502 v.AddArg(x) 9503 return true 9504 } 9505 // match: (SUBconst [c] (ADDconst [d] x)) 9506 // cond: 9507 // result: (ADDconst [-c+d] x) 9508 for { 9509 c := v.AuxInt 9510 v_0 := v.Args[0] 9511 if v_0.Op != OpARM64ADDconst { 9512 break 9513 } 9514 d := v_0.AuxInt 9515 x := v_0.Args[0] 9516 v.reset(OpARM64ADDconst) 9517 v.AuxInt = -c + d 9518 v.AddArg(x) 9519 return true 9520 } 9521 return false 9522 } 9523 func rewriteValueARM64_OpARM64SUBshiftLL_0(v *Value) bool { 9524 // match: (SUBshiftLL x (MOVDconst [c]) [d]) 9525 // cond: 9526 // result: (SUBconst x [int64(uint64(c)<<uint64(d))]) 9527 for { 9528 d := v.AuxInt 9529 _ = v.Args[1] 9530 x := v.Args[0] 9531 v_1 := v.Args[1] 9532 if v_1.Op != OpARM64MOVDconst { 9533 break 9534 } 9535 c := v_1.AuxInt 9536 v.reset(OpARM64SUBconst) 9537 v.AuxInt = int64(uint64(c) << uint64(d)) 9538 v.AddArg(x) 9539 return true 9540 } 9541 // match: (SUBshiftLL x (SLLconst x [c]) [d]) 9542 // cond: c==d 9543 // result: (MOVDconst [0]) 9544 for { 9545 d := v.AuxInt 9546 _ = v.Args[1] 9547 x := v.Args[0] 9548 v_1 := v.Args[1] 9549 if v_1.Op != OpARM64SLLconst { 9550 break 9551 } 9552 c := v_1.AuxInt 9553 if x != v_1.Args[0] { 9554 break 9555 } 9556 if !(c == d) { 9557 break 9558 } 9559 v.reset(OpARM64MOVDconst) 9560 v.AuxInt = 0 9561 return true 9562 } 9563 return false 9564 } 9565 func rewriteValueARM64_OpARM64SUBshiftRA_0(v *Value) bool { 9566 // match: (SUBshiftRA x (MOVDconst [c]) [d]) 9567 // cond: 9568 // result: (SUBconst x [int64(int64(c)>>uint64(d))]) 9569 for { 9570 d := v.AuxInt 9571 _ = v.Args[1] 9572 x := v.Args[0] 9573 v_1 := v.Args[1] 9574 if v_1.Op != OpARM64MOVDconst { 9575 break 9576 } 9577 c := v_1.AuxInt 9578 v.reset(OpARM64SUBconst) 9579 v.AuxInt = int64(int64(c) >> uint64(d)) 9580 v.AddArg(x) 9581 return true 9582 } 9583 // match: (SUBshiftRA x (SRAconst x [c]) [d]) 9584 // cond: c==d 9585 // result: (MOVDconst [0]) 9586 for { 9587 d := v.AuxInt 9588 _ = v.Args[1] 9589 x := v.Args[0] 9590 v_1 := v.Args[1] 9591 if v_1.Op != OpARM64SRAconst { 9592 break 9593 } 9594 c := v_1.AuxInt 9595 if x != v_1.Args[0] { 9596 break 9597 } 9598 if !(c == d) { 9599 break 9600 } 9601 v.reset(OpARM64MOVDconst) 9602 v.AuxInt = 0 9603 return true 9604 } 9605 return false 9606 } 9607 func rewriteValueARM64_OpARM64SUBshiftRL_0(v *Value) bool { 9608 // match: (SUBshiftRL x (MOVDconst [c]) [d]) 9609 // cond: 9610 // result: (SUBconst x [int64(uint64(c)>>uint64(d))]) 9611 for { 9612 d := v.AuxInt 9613 _ = v.Args[1] 9614 x := v.Args[0] 9615 v_1 := v.Args[1] 9616 if v_1.Op != OpARM64MOVDconst { 9617 break 9618 } 9619 c := v_1.AuxInt 9620 v.reset(OpARM64SUBconst) 9621 v.AuxInt = int64(uint64(c) >> uint64(d)) 9622 v.AddArg(x) 9623 return true 9624 } 9625 // match: (SUBshiftRL x (SRLconst x [c]) [d]) 9626 // cond: c==d 9627 // result: (MOVDconst [0]) 9628 for { 9629 d := v.AuxInt 9630 _ = v.Args[1] 9631 x := v.Args[0] 9632 v_1 := v.Args[1] 9633 if v_1.Op != OpARM64SRLconst { 9634 break 9635 } 9636 c := v_1.AuxInt 9637 if x != v_1.Args[0] { 9638 break 9639 } 9640 if !(c == d) { 9641 break 9642 } 9643 v.reset(OpARM64MOVDconst) 9644 v.AuxInt = 0 9645 return true 9646 } 9647 return false 9648 } 9649 func rewriteValueARM64_OpARM64UDIV_0(v *Value) bool { 9650 // match: (UDIV x (MOVDconst [1])) 9651 // cond: 9652 // result: x 9653 for { 9654 _ = v.Args[1] 9655 x := v.Args[0] 9656 v_1 := v.Args[1] 9657 if v_1.Op != OpARM64MOVDconst { 9658 break 9659 } 9660 if v_1.AuxInt != 1 { 9661 break 9662 } 9663 v.reset(OpCopy) 9664 v.Type = x.Type 9665 v.AddArg(x) 9666 return true 9667 } 9668 // match: (UDIV x (MOVDconst [c])) 9669 // cond: isPowerOfTwo(c) 9670 // result: (SRLconst [log2(c)] x) 9671 for { 9672 _ = v.Args[1] 9673 x := v.Args[0] 9674 v_1 := v.Args[1] 9675 if v_1.Op != OpARM64MOVDconst { 9676 break 9677 } 9678 c := v_1.AuxInt 9679 if !(isPowerOfTwo(c)) { 9680 break 9681 } 9682 v.reset(OpARM64SRLconst) 9683 v.AuxInt = log2(c) 9684 v.AddArg(x) 9685 return true 9686 } 9687 // match: (UDIV (MOVDconst [c]) (MOVDconst [d])) 9688 // cond: 9689 // result: (MOVDconst [int64(uint64(c)/uint64(d))]) 9690 for { 9691 _ = v.Args[1] 9692 v_0 := v.Args[0] 9693 if v_0.Op != OpARM64MOVDconst { 9694 break 9695 } 9696 c := v_0.AuxInt 9697 v_1 := v.Args[1] 9698 if v_1.Op != OpARM64MOVDconst { 9699 break 9700 } 9701 d := v_1.AuxInt 9702 v.reset(OpARM64MOVDconst) 9703 v.AuxInt = int64(uint64(c) / uint64(d)) 9704 return true 9705 } 9706 return false 9707 } 9708 func rewriteValueARM64_OpARM64UDIVW_0(v *Value) bool { 9709 // match: (UDIVW x (MOVDconst [c])) 9710 // cond: uint32(c)==1 9711 // result: x 9712 for { 9713 _ = v.Args[1] 9714 x := v.Args[0] 9715 v_1 := v.Args[1] 9716 if v_1.Op != OpARM64MOVDconst { 9717 break 9718 } 9719 c := v_1.AuxInt 9720 if !(uint32(c) == 1) { 9721 break 9722 } 9723 v.reset(OpCopy) 9724 v.Type = x.Type 9725 v.AddArg(x) 9726 return true 9727 } 9728 // match: (UDIVW x (MOVDconst [c])) 9729 // cond: isPowerOfTwo(c) && is32Bit(c) 9730 // result: (SRLconst [log2(c)] x) 9731 for { 9732 _ = v.Args[1] 9733 x := v.Args[0] 9734 v_1 := v.Args[1] 9735 if v_1.Op != OpARM64MOVDconst { 9736 break 9737 } 9738 c := v_1.AuxInt 9739 if !(isPowerOfTwo(c) && is32Bit(c)) { 9740 break 9741 } 9742 v.reset(OpARM64SRLconst) 9743 v.AuxInt = log2(c) 9744 v.AddArg(x) 9745 return true 9746 } 9747 // match: (UDIVW (MOVDconst [c]) (MOVDconst [d])) 9748 // cond: 9749 // result: (MOVDconst [int64(uint32(c)/uint32(d))]) 9750 for { 9751 _ = v.Args[1] 9752 v_0 := v.Args[0] 9753 if v_0.Op != OpARM64MOVDconst { 9754 break 9755 } 9756 c := v_0.AuxInt 9757 v_1 := v.Args[1] 9758 if v_1.Op != OpARM64MOVDconst { 9759 break 9760 } 9761 d := v_1.AuxInt 9762 v.reset(OpARM64MOVDconst) 9763 v.AuxInt = int64(uint32(c) / uint32(d)) 9764 return true 9765 } 9766 return false 9767 } 9768 func rewriteValueARM64_OpARM64UMOD_0(v *Value) bool { 9769 // match: (UMOD _ (MOVDconst [1])) 9770 // cond: 9771 // result: (MOVDconst [0]) 9772 for { 9773 _ = v.Args[1] 9774 v_1 := v.Args[1] 9775 if v_1.Op != OpARM64MOVDconst { 9776 break 9777 } 9778 if v_1.AuxInt != 1 { 9779 break 9780 } 9781 v.reset(OpARM64MOVDconst) 9782 v.AuxInt = 0 9783 return true 9784 } 9785 // match: (UMOD x (MOVDconst [c])) 9786 // cond: isPowerOfTwo(c) 9787 // result: (ANDconst [c-1] x) 9788 for { 9789 _ = v.Args[1] 9790 x := v.Args[0] 9791 v_1 := v.Args[1] 9792 if v_1.Op != OpARM64MOVDconst { 9793 break 9794 } 9795 c := v_1.AuxInt 9796 if !(isPowerOfTwo(c)) { 9797 break 9798 } 9799 v.reset(OpARM64ANDconst) 9800 v.AuxInt = c - 1 9801 v.AddArg(x) 9802 return true 9803 } 9804 // match: (UMOD (MOVDconst [c]) (MOVDconst [d])) 9805 // cond: 9806 // result: (MOVDconst [int64(uint64(c)%uint64(d))]) 9807 for { 9808 _ = v.Args[1] 9809 v_0 := v.Args[0] 9810 if v_0.Op != OpARM64MOVDconst { 9811 break 9812 } 9813 c := v_0.AuxInt 9814 v_1 := v.Args[1] 9815 if v_1.Op != OpARM64MOVDconst { 9816 break 9817 } 9818 d := v_1.AuxInt 9819 v.reset(OpARM64MOVDconst) 9820 v.AuxInt = int64(uint64(c) % uint64(d)) 9821 return true 9822 } 9823 return false 9824 } 9825 func rewriteValueARM64_OpARM64UMODW_0(v *Value) bool { 9826 // match: (UMODW _ (MOVDconst [c])) 9827 // cond: uint32(c)==1 9828 // result: (MOVDconst [0]) 9829 for { 9830 _ = v.Args[1] 9831 v_1 := v.Args[1] 9832 if v_1.Op != OpARM64MOVDconst { 9833 break 9834 } 9835 c := v_1.AuxInt 9836 if !(uint32(c) == 1) { 9837 break 9838 } 9839 v.reset(OpARM64MOVDconst) 9840 v.AuxInt = 0 9841 return true 9842 } 9843 // match: (UMODW x (MOVDconst [c])) 9844 // cond: isPowerOfTwo(c) && is32Bit(c) 9845 // result: (ANDconst [c-1] x) 9846 for { 9847 _ = v.Args[1] 9848 x := v.Args[0] 9849 v_1 := v.Args[1] 9850 if v_1.Op != OpARM64MOVDconst { 9851 break 9852 } 9853 c := v_1.AuxInt 9854 if !(isPowerOfTwo(c) && is32Bit(c)) { 9855 break 9856 } 9857 v.reset(OpARM64ANDconst) 9858 v.AuxInt = c - 1 9859 v.AddArg(x) 9860 return true 9861 } 9862 // match: (UMODW (MOVDconst [c]) (MOVDconst [d])) 9863 // cond: 9864 // result: (MOVDconst [int64(uint32(c)%uint32(d))]) 9865 for { 9866 _ = v.Args[1] 9867 v_0 := v.Args[0] 9868 if v_0.Op != OpARM64MOVDconst { 9869 break 9870 } 9871 c := v_0.AuxInt 9872 v_1 := v.Args[1] 9873 if v_1.Op != OpARM64MOVDconst { 9874 break 9875 } 9876 d := v_1.AuxInt 9877 v.reset(OpARM64MOVDconst) 9878 v.AuxInt = int64(uint32(c) % uint32(d)) 9879 return true 9880 } 9881 return false 9882 } 9883 func rewriteValueARM64_OpARM64XOR_0(v *Value) bool { 9884 // match: (XOR x (MOVDconst [c])) 9885 // cond: 9886 // result: (XORconst [c] x) 9887 for { 9888 _ = v.Args[1] 9889 x := v.Args[0] 9890 v_1 := v.Args[1] 9891 if v_1.Op != OpARM64MOVDconst { 9892 break 9893 } 9894 c := v_1.AuxInt 9895 v.reset(OpARM64XORconst) 9896 v.AuxInt = c 9897 v.AddArg(x) 9898 return true 9899 } 9900 // match: (XOR (MOVDconst [c]) x) 9901 // cond: 9902 // result: (XORconst [c] x) 9903 for { 9904 _ = v.Args[1] 9905 v_0 := v.Args[0] 9906 if v_0.Op != OpARM64MOVDconst { 9907 break 9908 } 9909 c := v_0.AuxInt 9910 x := v.Args[1] 9911 v.reset(OpARM64XORconst) 9912 v.AuxInt = c 9913 v.AddArg(x) 9914 return true 9915 } 9916 // match: (XOR x x) 9917 // cond: 9918 // result: (MOVDconst [0]) 9919 for { 9920 _ = v.Args[1] 9921 x := v.Args[0] 9922 if x != v.Args[1] { 9923 break 9924 } 9925 v.reset(OpARM64MOVDconst) 9926 v.AuxInt = 0 9927 return true 9928 } 9929 // match: (XOR x (SLLconst [c] y)) 9930 // cond: 9931 // result: (XORshiftLL x y [c]) 9932 for { 9933 _ = v.Args[1] 9934 x := v.Args[0] 9935 v_1 := v.Args[1] 9936 if v_1.Op != OpARM64SLLconst { 9937 break 9938 } 9939 c := v_1.AuxInt 9940 y := v_1.Args[0] 9941 v.reset(OpARM64XORshiftLL) 9942 v.AuxInt = c 9943 v.AddArg(x) 9944 v.AddArg(y) 9945 return true 9946 } 9947 // match: (XOR (SLLconst [c] y) x) 9948 // cond: 9949 // result: (XORshiftLL x y [c]) 9950 for { 9951 _ = v.Args[1] 9952 v_0 := v.Args[0] 9953 if v_0.Op != OpARM64SLLconst { 9954 break 9955 } 9956 c := v_0.AuxInt 9957 y := v_0.Args[0] 9958 x := v.Args[1] 9959 v.reset(OpARM64XORshiftLL) 9960 v.AuxInt = c 9961 v.AddArg(x) 9962 v.AddArg(y) 9963 return true 9964 } 9965 // match: (XOR x (SRLconst [c] y)) 9966 // cond: 9967 // result: (XORshiftRL x y [c]) 9968 for { 9969 _ = v.Args[1] 9970 x := v.Args[0] 9971 v_1 := v.Args[1] 9972 if v_1.Op != OpARM64SRLconst { 9973 break 9974 } 9975 c := v_1.AuxInt 9976 y := v_1.Args[0] 9977 v.reset(OpARM64XORshiftRL) 9978 v.AuxInt = c 9979 v.AddArg(x) 9980 v.AddArg(y) 9981 return true 9982 } 9983 // match: (XOR (SRLconst [c] y) x) 9984 // cond: 9985 // result: (XORshiftRL x y [c]) 9986 for { 9987 _ = v.Args[1] 9988 v_0 := v.Args[0] 9989 if v_0.Op != OpARM64SRLconst { 9990 break 9991 } 9992 c := v_0.AuxInt 9993 y := v_0.Args[0] 9994 x := v.Args[1] 9995 v.reset(OpARM64XORshiftRL) 9996 v.AuxInt = c 9997 v.AddArg(x) 9998 v.AddArg(y) 9999 return true 10000 } 10001 // match: (XOR x (SRAconst [c] y)) 10002 // cond: 10003 // result: (XORshiftRA x y [c]) 10004 for { 10005 _ = v.Args[1] 10006 x := v.Args[0] 10007 v_1 := v.Args[1] 10008 if v_1.Op != OpARM64SRAconst { 10009 break 10010 } 10011 c := v_1.AuxInt 10012 y := v_1.Args[0] 10013 v.reset(OpARM64XORshiftRA) 10014 v.AuxInt = c 10015 v.AddArg(x) 10016 v.AddArg(y) 10017 return true 10018 } 10019 // match: (XOR (SRAconst [c] y) x) 10020 // cond: 10021 // result: (XORshiftRA x y [c]) 10022 for { 10023 _ = v.Args[1] 10024 v_0 := v.Args[0] 10025 if v_0.Op != OpARM64SRAconst { 10026 break 10027 } 10028 c := v_0.AuxInt 10029 y := v_0.Args[0] 10030 x := v.Args[1] 10031 v.reset(OpARM64XORshiftRA) 10032 v.AuxInt = c 10033 v.AddArg(x) 10034 v.AddArg(y) 10035 return true 10036 } 10037 return false 10038 } 10039 func rewriteValueARM64_OpARM64XORconst_0(v *Value) bool { 10040 // match: (XORconst [0] x) 10041 // cond: 10042 // result: x 10043 for { 10044 if v.AuxInt != 0 { 10045 break 10046 } 10047 x := v.Args[0] 10048 v.reset(OpCopy) 10049 v.Type = x.Type 10050 v.AddArg(x) 10051 return true 10052 } 10053 // match: (XORconst [-1] x) 10054 // cond: 10055 // result: (MVN x) 10056 for { 10057 if v.AuxInt != -1 { 10058 break 10059 } 10060 x := v.Args[0] 10061 v.reset(OpARM64MVN) 10062 v.AddArg(x) 10063 return true 10064 } 10065 // match: (XORconst [c] (MOVDconst [d])) 10066 // cond: 10067 // result: (MOVDconst [c^d]) 10068 for { 10069 c := v.AuxInt 10070 v_0 := v.Args[0] 10071 if v_0.Op != OpARM64MOVDconst { 10072 break 10073 } 10074 d := v_0.AuxInt 10075 v.reset(OpARM64MOVDconst) 10076 v.AuxInt = c ^ d 10077 return true 10078 } 10079 // match: (XORconst [c] (XORconst [d] x)) 10080 // cond: 10081 // result: (XORconst [c^d] x) 10082 for { 10083 c := v.AuxInt 10084 v_0 := v.Args[0] 10085 if v_0.Op != OpARM64XORconst { 10086 break 10087 } 10088 d := v_0.AuxInt 10089 x := v_0.Args[0] 10090 v.reset(OpARM64XORconst) 10091 v.AuxInt = c ^ d 10092 v.AddArg(x) 10093 return true 10094 } 10095 return false 10096 } 10097 func rewriteValueARM64_OpARM64XORshiftLL_0(v *Value) bool { 10098 b := v.Block 10099 _ = b 10100 // match: (XORshiftLL (MOVDconst [c]) x [d]) 10101 // cond: 10102 // result: (XORconst [c] (SLLconst <x.Type> x [d])) 10103 for { 10104 d := v.AuxInt 10105 _ = v.Args[1] 10106 v_0 := v.Args[0] 10107 if v_0.Op != OpARM64MOVDconst { 10108 break 10109 } 10110 c := v_0.AuxInt 10111 x := v.Args[1] 10112 v.reset(OpARM64XORconst) 10113 v.AuxInt = c 10114 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 10115 v0.AuxInt = d 10116 v0.AddArg(x) 10117 v.AddArg(v0) 10118 return true 10119 } 10120 // match: (XORshiftLL x (MOVDconst [c]) [d]) 10121 // cond: 10122 // result: (XORconst x [int64(uint64(c)<<uint64(d))]) 10123 for { 10124 d := v.AuxInt 10125 _ = v.Args[1] 10126 x := v.Args[0] 10127 v_1 := v.Args[1] 10128 if v_1.Op != OpARM64MOVDconst { 10129 break 10130 } 10131 c := v_1.AuxInt 10132 v.reset(OpARM64XORconst) 10133 v.AuxInt = int64(uint64(c) << uint64(d)) 10134 v.AddArg(x) 10135 return true 10136 } 10137 // match: (XORshiftLL x (SLLconst x [c]) [d]) 10138 // cond: c==d 10139 // result: (MOVDconst [0]) 10140 for { 10141 d := v.AuxInt 10142 _ = v.Args[1] 10143 x := v.Args[0] 10144 v_1 := v.Args[1] 10145 if v_1.Op != OpARM64SLLconst { 10146 break 10147 } 10148 c := v_1.AuxInt 10149 if x != v_1.Args[0] { 10150 break 10151 } 10152 if !(c == d) { 10153 break 10154 } 10155 v.reset(OpARM64MOVDconst) 10156 v.AuxInt = 0 10157 return true 10158 } 10159 // match: (XORshiftLL [c] (SRLconst x [64-c]) x) 10160 // cond: 10161 // result: (RORconst [64-c] x) 10162 for { 10163 c := v.AuxInt 10164 _ = v.Args[1] 10165 v_0 := v.Args[0] 10166 if v_0.Op != OpARM64SRLconst { 10167 break 10168 } 10169 if v_0.AuxInt != 64-c { 10170 break 10171 } 10172 x := v_0.Args[0] 10173 if x != v.Args[1] { 10174 break 10175 } 10176 v.reset(OpARM64RORconst) 10177 v.AuxInt = 64 - c 10178 v.AddArg(x) 10179 return true 10180 } 10181 // match: (XORshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x) 10182 // cond: c < 32 && t.Size() == 4 10183 // result: (RORWconst [32-c] x) 10184 for { 10185 t := v.Type 10186 c := v.AuxInt 10187 _ = v.Args[1] 10188 v_0 := v.Args[0] 10189 if v_0.Op != OpARM64SRLconst { 10190 break 10191 } 10192 if v_0.AuxInt != 32-c { 10193 break 10194 } 10195 v_0_0 := v_0.Args[0] 10196 if v_0_0.Op != OpARM64MOVWUreg { 10197 break 10198 } 10199 x := v_0_0.Args[0] 10200 if x != v.Args[1] { 10201 break 10202 } 10203 if !(c < 32 && t.Size() == 4) { 10204 break 10205 } 10206 v.reset(OpARM64RORWconst) 10207 v.AuxInt = 32 - c 10208 v.AddArg(x) 10209 return true 10210 } 10211 return false 10212 } 10213 func rewriteValueARM64_OpARM64XORshiftRA_0(v *Value) bool { 10214 b := v.Block 10215 _ = b 10216 // match: (XORshiftRA (MOVDconst [c]) x [d]) 10217 // cond: 10218 // result: (XORconst [c] (SRAconst <x.Type> x [d])) 10219 for { 10220 d := v.AuxInt 10221 _ = v.Args[1] 10222 v_0 := v.Args[0] 10223 if v_0.Op != OpARM64MOVDconst { 10224 break 10225 } 10226 c := v_0.AuxInt 10227 x := v.Args[1] 10228 v.reset(OpARM64XORconst) 10229 v.AuxInt = c 10230 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 10231 v0.AuxInt = d 10232 v0.AddArg(x) 10233 v.AddArg(v0) 10234 return true 10235 } 10236 // match: (XORshiftRA x (MOVDconst [c]) [d]) 10237 // cond: 10238 // result: (XORconst x [int64(int64(c)>>uint64(d))]) 10239 for { 10240 d := v.AuxInt 10241 _ = v.Args[1] 10242 x := v.Args[0] 10243 v_1 := v.Args[1] 10244 if v_1.Op != OpARM64MOVDconst { 10245 break 10246 } 10247 c := v_1.AuxInt 10248 v.reset(OpARM64XORconst) 10249 v.AuxInt = int64(int64(c) >> uint64(d)) 10250 v.AddArg(x) 10251 return true 10252 } 10253 // match: (XORshiftRA x (SRAconst x [c]) [d]) 10254 // cond: c==d 10255 // result: (MOVDconst [0]) 10256 for { 10257 d := v.AuxInt 10258 _ = v.Args[1] 10259 x := v.Args[0] 10260 v_1 := v.Args[1] 10261 if v_1.Op != OpARM64SRAconst { 10262 break 10263 } 10264 c := v_1.AuxInt 10265 if x != v_1.Args[0] { 10266 break 10267 } 10268 if !(c == d) { 10269 break 10270 } 10271 v.reset(OpARM64MOVDconst) 10272 v.AuxInt = 0 10273 return true 10274 } 10275 return false 10276 } 10277 func rewriteValueARM64_OpARM64XORshiftRL_0(v *Value) bool { 10278 b := v.Block 10279 _ = b 10280 // match: (XORshiftRL (MOVDconst [c]) x [d]) 10281 // cond: 10282 // result: (XORconst [c] (SRLconst <x.Type> x [d])) 10283 for { 10284 d := v.AuxInt 10285 _ = v.Args[1] 10286 v_0 := v.Args[0] 10287 if v_0.Op != OpARM64MOVDconst { 10288 break 10289 } 10290 c := v_0.AuxInt 10291 x := v.Args[1] 10292 v.reset(OpARM64XORconst) 10293 v.AuxInt = c 10294 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 10295 v0.AuxInt = d 10296 v0.AddArg(x) 10297 v.AddArg(v0) 10298 return true 10299 } 10300 // match: (XORshiftRL x (MOVDconst [c]) [d]) 10301 // cond: 10302 // result: (XORconst x [int64(uint64(c)>>uint64(d))]) 10303 for { 10304 d := v.AuxInt 10305 _ = v.Args[1] 10306 x := v.Args[0] 10307 v_1 := v.Args[1] 10308 if v_1.Op != OpARM64MOVDconst { 10309 break 10310 } 10311 c := v_1.AuxInt 10312 v.reset(OpARM64XORconst) 10313 v.AuxInt = int64(uint64(c) >> uint64(d)) 10314 v.AddArg(x) 10315 return true 10316 } 10317 // match: (XORshiftRL x (SRLconst x [c]) [d]) 10318 // cond: c==d 10319 // result: (MOVDconst [0]) 10320 for { 10321 d := v.AuxInt 10322 _ = v.Args[1] 10323 x := v.Args[0] 10324 v_1 := v.Args[1] 10325 if v_1.Op != OpARM64SRLconst { 10326 break 10327 } 10328 c := v_1.AuxInt 10329 if x != v_1.Args[0] { 10330 break 10331 } 10332 if !(c == d) { 10333 break 10334 } 10335 v.reset(OpARM64MOVDconst) 10336 v.AuxInt = 0 10337 return true 10338 } 10339 // match: (XORshiftRL [c] (SLLconst x [64-c]) x) 10340 // cond: 10341 // result: (RORconst [ c] x) 10342 for { 10343 c := v.AuxInt 10344 _ = v.Args[1] 10345 v_0 := v.Args[0] 10346 if v_0.Op != OpARM64SLLconst { 10347 break 10348 } 10349 if v_0.AuxInt != 64-c { 10350 break 10351 } 10352 x := v_0.Args[0] 10353 if x != v.Args[1] { 10354 break 10355 } 10356 v.reset(OpARM64RORconst) 10357 v.AuxInt = c 10358 v.AddArg(x) 10359 return true 10360 } 10361 // match: (XORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 10362 // cond: c < 32 && t.Size() == 4 10363 // result: (RORWconst [ c] x) 10364 for { 10365 t := v.Type 10366 c := v.AuxInt 10367 _ = v.Args[1] 10368 v_0 := v.Args[0] 10369 if v_0.Op != OpARM64SLLconst { 10370 break 10371 } 10372 if v_0.AuxInt != 32-c { 10373 break 10374 } 10375 x := v_0.Args[0] 10376 v_1 := v.Args[1] 10377 if v_1.Op != OpARM64MOVWUreg { 10378 break 10379 } 10380 if x != v_1.Args[0] { 10381 break 10382 } 10383 if !(c < 32 && t.Size() == 4) { 10384 break 10385 } 10386 v.reset(OpARM64RORWconst) 10387 v.AuxInt = c 10388 v.AddArg(x) 10389 return true 10390 } 10391 return false 10392 } 10393 func rewriteValueARM64_OpAdd16_0(v *Value) bool { 10394 // match: (Add16 x y) 10395 // cond: 10396 // result: (ADD x y) 10397 for { 10398 _ = v.Args[1] 10399 x := v.Args[0] 10400 y := v.Args[1] 10401 v.reset(OpARM64ADD) 10402 v.AddArg(x) 10403 v.AddArg(y) 10404 return true 10405 } 10406 } 10407 func rewriteValueARM64_OpAdd32_0(v *Value) bool { 10408 // match: (Add32 x y) 10409 // cond: 10410 // result: (ADD x y) 10411 for { 10412 _ = v.Args[1] 10413 x := v.Args[0] 10414 y := v.Args[1] 10415 v.reset(OpARM64ADD) 10416 v.AddArg(x) 10417 v.AddArg(y) 10418 return true 10419 } 10420 } 10421 func rewriteValueARM64_OpAdd32F_0(v *Value) bool { 10422 // match: (Add32F x y) 10423 // cond: 10424 // result: (FADDS x y) 10425 for { 10426 _ = v.Args[1] 10427 x := v.Args[0] 10428 y := v.Args[1] 10429 v.reset(OpARM64FADDS) 10430 v.AddArg(x) 10431 v.AddArg(y) 10432 return true 10433 } 10434 } 10435 func rewriteValueARM64_OpAdd64_0(v *Value) bool { 10436 // match: (Add64 x y) 10437 // cond: 10438 // result: (ADD x y) 10439 for { 10440 _ = v.Args[1] 10441 x := v.Args[0] 10442 y := v.Args[1] 10443 v.reset(OpARM64ADD) 10444 v.AddArg(x) 10445 v.AddArg(y) 10446 return true 10447 } 10448 } 10449 func rewriteValueARM64_OpAdd64F_0(v *Value) bool { 10450 // match: (Add64F x y) 10451 // cond: 10452 // result: (FADDD x y) 10453 for { 10454 _ = v.Args[1] 10455 x := v.Args[0] 10456 y := v.Args[1] 10457 v.reset(OpARM64FADDD) 10458 v.AddArg(x) 10459 v.AddArg(y) 10460 return true 10461 } 10462 } 10463 func rewriteValueARM64_OpAdd8_0(v *Value) bool { 10464 // match: (Add8 x y) 10465 // cond: 10466 // result: (ADD x y) 10467 for { 10468 _ = v.Args[1] 10469 x := v.Args[0] 10470 y := v.Args[1] 10471 v.reset(OpARM64ADD) 10472 v.AddArg(x) 10473 v.AddArg(y) 10474 return true 10475 } 10476 } 10477 func rewriteValueARM64_OpAddPtr_0(v *Value) bool { 10478 // match: (AddPtr x y) 10479 // cond: 10480 // result: (ADD x y) 10481 for { 10482 _ = v.Args[1] 10483 x := v.Args[0] 10484 y := v.Args[1] 10485 v.reset(OpARM64ADD) 10486 v.AddArg(x) 10487 v.AddArg(y) 10488 return true 10489 } 10490 } 10491 func rewriteValueARM64_OpAddr_0(v *Value) bool { 10492 // match: (Addr {sym} base) 10493 // cond: 10494 // result: (MOVDaddr {sym} base) 10495 for { 10496 sym := v.Aux 10497 base := v.Args[0] 10498 v.reset(OpARM64MOVDaddr) 10499 v.Aux = sym 10500 v.AddArg(base) 10501 return true 10502 } 10503 } 10504 func rewriteValueARM64_OpAnd16_0(v *Value) bool { 10505 // match: (And16 x y) 10506 // cond: 10507 // result: (AND x y) 10508 for { 10509 _ = v.Args[1] 10510 x := v.Args[0] 10511 y := v.Args[1] 10512 v.reset(OpARM64AND) 10513 v.AddArg(x) 10514 v.AddArg(y) 10515 return true 10516 } 10517 } 10518 func rewriteValueARM64_OpAnd32_0(v *Value) bool { 10519 // match: (And32 x y) 10520 // cond: 10521 // result: (AND x y) 10522 for { 10523 _ = v.Args[1] 10524 x := v.Args[0] 10525 y := v.Args[1] 10526 v.reset(OpARM64AND) 10527 v.AddArg(x) 10528 v.AddArg(y) 10529 return true 10530 } 10531 } 10532 func rewriteValueARM64_OpAnd64_0(v *Value) bool { 10533 // match: (And64 x y) 10534 // cond: 10535 // result: (AND x y) 10536 for { 10537 _ = v.Args[1] 10538 x := v.Args[0] 10539 y := v.Args[1] 10540 v.reset(OpARM64AND) 10541 v.AddArg(x) 10542 v.AddArg(y) 10543 return true 10544 } 10545 } 10546 func rewriteValueARM64_OpAnd8_0(v *Value) bool { 10547 // match: (And8 x y) 10548 // cond: 10549 // result: (AND x y) 10550 for { 10551 _ = v.Args[1] 10552 x := v.Args[0] 10553 y := v.Args[1] 10554 v.reset(OpARM64AND) 10555 v.AddArg(x) 10556 v.AddArg(y) 10557 return true 10558 } 10559 } 10560 func rewriteValueARM64_OpAndB_0(v *Value) bool { 10561 // match: (AndB x y) 10562 // cond: 10563 // result: (AND x y) 10564 for { 10565 _ = v.Args[1] 10566 x := v.Args[0] 10567 y := v.Args[1] 10568 v.reset(OpARM64AND) 10569 v.AddArg(x) 10570 v.AddArg(y) 10571 return true 10572 } 10573 } 10574 func rewriteValueARM64_OpAtomicAdd32_0(v *Value) bool { 10575 // match: (AtomicAdd32 ptr val mem) 10576 // cond: 10577 // result: (LoweredAtomicAdd32 ptr val mem) 10578 for { 10579 _ = v.Args[2] 10580 ptr := v.Args[0] 10581 val := v.Args[1] 10582 mem := v.Args[2] 10583 v.reset(OpARM64LoweredAtomicAdd32) 10584 v.AddArg(ptr) 10585 v.AddArg(val) 10586 v.AddArg(mem) 10587 return true 10588 } 10589 } 10590 func rewriteValueARM64_OpAtomicAdd64_0(v *Value) bool { 10591 // match: (AtomicAdd64 ptr val mem) 10592 // cond: 10593 // result: (LoweredAtomicAdd64 ptr val mem) 10594 for { 10595 _ = v.Args[2] 10596 ptr := v.Args[0] 10597 val := v.Args[1] 10598 mem := v.Args[2] 10599 v.reset(OpARM64LoweredAtomicAdd64) 10600 v.AddArg(ptr) 10601 v.AddArg(val) 10602 v.AddArg(mem) 10603 return true 10604 } 10605 } 10606 func rewriteValueARM64_OpAtomicAnd8_0(v *Value) bool { 10607 // match: (AtomicAnd8 ptr val mem) 10608 // cond: 10609 // result: (LoweredAtomicAnd8 ptr val mem) 10610 for { 10611 _ = v.Args[2] 10612 ptr := v.Args[0] 10613 val := v.Args[1] 10614 mem := v.Args[2] 10615 v.reset(OpARM64LoweredAtomicAnd8) 10616 v.AddArg(ptr) 10617 v.AddArg(val) 10618 v.AddArg(mem) 10619 return true 10620 } 10621 } 10622 func rewriteValueARM64_OpAtomicCompareAndSwap32_0(v *Value) bool { 10623 // match: (AtomicCompareAndSwap32 ptr old new_ mem) 10624 // cond: 10625 // result: (LoweredAtomicCas32 ptr old new_ mem) 10626 for { 10627 _ = v.Args[3] 10628 ptr := v.Args[0] 10629 old := v.Args[1] 10630 new_ := v.Args[2] 10631 mem := v.Args[3] 10632 v.reset(OpARM64LoweredAtomicCas32) 10633 v.AddArg(ptr) 10634 v.AddArg(old) 10635 v.AddArg(new_) 10636 v.AddArg(mem) 10637 return true 10638 } 10639 } 10640 func rewriteValueARM64_OpAtomicCompareAndSwap64_0(v *Value) bool { 10641 // match: (AtomicCompareAndSwap64 ptr old new_ mem) 10642 // cond: 10643 // result: (LoweredAtomicCas64 ptr old new_ mem) 10644 for { 10645 _ = v.Args[3] 10646 ptr := v.Args[0] 10647 old := v.Args[1] 10648 new_ := v.Args[2] 10649 mem := v.Args[3] 10650 v.reset(OpARM64LoweredAtomicCas64) 10651 v.AddArg(ptr) 10652 v.AddArg(old) 10653 v.AddArg(new_) 10654 v.AddArg(mem) 10655 return true 10656 } 10657 } 10658 func rewriteValueARM64_OpAtomicExchange32_0(v *Value) bool { 10659 // match: (AtomicExchange32 ptr val mem) 10660 // cond: 10661 // result: (LoweredAtomicExchange32 ptr val mem) 10662 for { 10663 _ = v.Args[2] 10664 ptr := v.Args[0] 10665 val := v.Args[1] 10666 mem := v.Args[2] 10667 v.reset(OpARM64LoweredAtomicExchange32) 10668 v.AddArg(ptr) 10669 v.AddArg(val) 10670 v.AddArg(mem) 10671 return true 10672 } 10673 } 10674 func rewriteValueARM64_OpAtomicExchange64_0(v *Value) bool { 10675 // match: (AtomicExchange64 ptr val mem) 10676 // cond: 10677 // result: (LoweredAtomicExchange64 ptr val mem) 10678 for { 10679 _ = v.Args[2] 10680 ptr := v.Args[0] 10681 val := v.Args[1] 10682 mem := v.Args[2] 10683 v.reset(OpARM64LoweredAtomicExchange64) 10684 v.AddArg(ptr) 10685 v.AddArg(val) 10686 v.AddArg(mem) 10687 return true 10688 } 10689 } 10690 func rewriteValueARM64_OpAtomicLoad32_0(v *Value) bool { 10691 // match: (AtomicLoad32 ptr mem) 10692 // cond: 10693 // result: (LDARW ptr mem) 10694 for { 10695 _ = v.Args[1] 10696 ptr := v.Args[0] 10697 mem := v.Args[1] 10698 v.reset(OpARM64LDARW) 10699 v.AddArg(ptr) 10700 v.AddArg(mem) 10701 return true 10702 } 10703 } 10704 func rewriteValueARM64_OpAtomicLoad64_0(v *Value) bool { 10705 // match: (AtomicLoad64 ptr mem) 10706 // cond: 10707 // result: (LDAR ptr mem) 10708 for { 10709 _ = v.Args[1] 10710 ptr := v.Args[0] 10711 mem := v.Args[1] 10712 v.reset(OpARM64LDAR) 10713 v.AddArg(ptr) 10714 v.AddArg(mem) 10715 return true 10716 } 10717 } 10718 func rewriteValueARM64_OpAtomicLoadPtr_0(v *Value) bool { 10719 // match: (AtomicLoadPtr ptr mem) 10720 // cond: 10721 // result: (LDAR ptr mem) 10722 for { 10723 _ = v.Args[1] 10724 ptr := v.Args[0] 10725 mem := v.Args[1] 10726 v.reset(OpARM64LDAR) 10727 v.AddArg(ptr) 10728 v.AddArg(mem) 10729 return true 10730 } 10731 } 10732 func rewriteValueARM64_OpAtomicOr8_0(v *Value) bool { 10733 // match: (AtomicOr8 ptr val mem) 10734 // cond: 10735 // result: (LoweredAtomicOr8 ptr val mem) 10736 for { 10737 _ = v.Args[2] 10738 ptr := v.Args[0] 10739 val := v.Args[1] 10740 mem := v.Args[2] 10741 v.reset(OpARM64LoweredAtomicOr8) 10742 v.AddArg(ptr) 10743 v.AddArg(val) 10744 v.AddArg(mem) 10745 return true 10746 } 10747 } 10748 func rewriteValueARM64_OpAtomicStore32_0(v *Value) bool { 10749 // match: (AtomicStore32 ptr val mem) 10750 // cond: 10751 // result: (STLRW ptr val mem) 10752 for { 10753 _ = v.Args[2] 10754 ptr := v.Args[0] 10755 val := v.Args[1] 10756 mem := v.Args[2] 10757 v.reset(OpARM64STLRW) 10758 v.AddArg(ptr) 10759 v.AddArg(val) 10760 v.AddArg(mem) 10761 return true 10762 } 10763 } 10764 func rewriteValueARM64_OpAtomicStore64_0(v *Value) bool { 10765 // match: (AtomicStore64 ptr val mem) 10766 // cond: 10767 // result: (STLR ptr val mem) 10768 for { 10769 _ = v.Args[2] 10770 ptr := v.Args[0] 10771 val := v.Args[1] 10772 mem := v.Args[2] 10773 v.reset(OpARM64STLR) 10774 v.AddArg(ptr) 10775 v.AddArg(val) 10776 v.AddArg(mem) 10777 return true 10778 } 10779 } 10780 func rewriteValueARM64_OpAtomicStorePtrNoWB_0(v *Value) bool { 10781 // match: (AtomicStorePtrNoWB ptr val mem) 10782 // cond: 10783 // result: (STLR ptr val mem) 10784 for { 10785 _ = v.Args[2] 10786 ptr := v.Args[0] 10787 val := v.Args[1] 10788 mem := v.Args[2] 10789 v.reset(OpARM64STLR) 10790 v.AddArg(ptr) 10791 v.AddArg(val) 10792 v.AddArg(mem) 10793 return true 10794 } 10795 } 10796 func rewriteValueARM64_OpAvg64u_0(v *Value) bool { 10797 b := v.Block 10798 _ = b 10799 // match: (Avg64u <t> x y) 10800 // cond: 10801 // result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y) 10802 for { 10803 t := v.Type 10804 _ = v.Args[1] 10805 x := v.Args[0] 10806 y := v.Args[1] 10807 v.reset(OpARM64ADD) 10808 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, t) 10809 v0.AuxInt = 1 10810 v1 := b.NewValue0(v.Pos, OpARM64SUB, t) 10811 v1.AddArg(x) 10812 v1.AddArg(y) 10813 v0.AddArg(v1) 10814 v.AddArg(v0) 10815 v.AddArg(y) 10816 return true 10817 } 10818 } 10819 func rewriteValueARM64_OpBitLen64_0(v *Value) bool { 10820 b := v.Block 10821 _ = b 10822 typ := &b.Func.Config.Types 10823 _ = typ 10824 // match: (BitLen64 x) 10825 // cond: 10826 // result: (SUB (MOVDconst [64]) (CLZ <typ.Int> x)) 10827 for { 10828 x := v.Args[0] 10829 v.reset(OpARM64SUB) 10830 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 10831 v0.AuxInt = 64 10832 v.AddArg(v0) 10833 v1 := b.NewValue0(v.Pos, OpARM64CLZ, typ.Int) 10834 v1.AddArg(x) 10835 v.AddArg(v1) 10836 return true 10837 } 10838 } 10839 func rewriteValueARM64_OpBitRev16_0(v *Value) bool { 10840 b := v.Block 10841 _ = b 10842 typ := &b.Func.Config.Types 10843 _ = typ 10844 // match: (BitRev16 x) 10845 // cond: 10846 // result: (SRLconst [48] (RBIT <typ.UInt64> x)) 10847 for { 10848 x := v.Args[0] 10849 v.reset(OpARM64SRLconst) 10850 v.AuxInt = 48 10851 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64) 10852 v0.AddArg(x) 10853 v.AddArg(v0) 10854 return true 10855 } 10856 } 10857 func rewriteValueARM64_OpBitRev32_0(v *Value) bool { 10858 // match: (BitRev32 x) 10859 // cond: 10860 // result: (RBITW x) 10861 for { 10862 x := v.Args[0] 10863 v.reset(OpARM64RBITW) 10864 v.AddArg(x) 10865 return true 10866 } 10867 } 10868 func rewriteValueARM64_OpBitRev64_0(v *Value) bool { 10869 // match: (BitRev64 x) 10870 // cond: 10871 // result: (RBIT x) 10872 for { 10873 x := v.Args[0] 10874 v.reset(OpARM64RBIT) 10875 v.AddArg(x) 10876 return true 10877 } 10878 } 10879 func rewriteValueARM64_OpBitRev8_0(v *Value) bool { 10880 b := v.Block 10881 _ = b 10882 typ := &b.Func.Config.Types 10883 _ = typ 10884 // match: (BitRev8 x) 10885 // cond: 10886 // result: (SRLconst [56] (RBIT <typ.UInt64> x)) 10887 for { 10888 x := v.Args[0] 10889 v.reset(OpARM64SRLconst) 10890 v.AuxInt = 56 10891 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64) 10892 v0.AddArg(x) 10893 v.AddArg(v0) 10894 return true 10895 } 10896 } 10897 func rewriteValueARM64_OpBswap32_0(v *Value) bool { 10898 // match: (Bswap32 x) 10899 // cond: 10900 // result: (REVW x) 10901 for { 10902 x := v.Args[0] 10903 v.reset(OpARM64REVW) 10904 v.AddArg(x) 10905 return true 10906 } 10907 } 10908 func rewriteValueARM64_OpBswap64_0(v *Value) bool { 10909 // match: (Bswap64 x) 10910 // cond: 10911 // result: (REV x) 10912 for { 10913 x := v.Args[0] 10914 v.reset(OpARM64REV) 10915 v.AddArg(x) 10916 return true 10917 } 10918 } 10919 func rewriteValueARM64_OpClosureCall_0(v *Value) bool { 10920 // match: (ClosureCall [argwid] entry closure mem) 10921 // cond: 10922 // result: (CALLclosure [argwid] entry closure mem) 10923 for { 10924 argwid := v.AuxInt 10925 _ = v.Args[2] 10926 entry := v.Args[0] 10927 closure := v.Args[1] 10928 mem := v.Args[2] 10929 v.reset(OpARM64CALLclosure) 10930 v.AuxInt = argwid 10931 v.AddArg(entry) 10932 v.AddArg(closure) 10933 v.AddArg(mem) 10934 return true 10935 } 10936 } 10937 func rewriteValueARM64_OpCom16_0(v *Value) bool { 10938 // match: (Com16 x) 10939 // cond: 10940 // result: (MVN x) 10941 for { 10942 x := v.Args[0] 10943 v.reset(OpARM64MVN) 10944 v.AddArg(x) 10945 return true 10946 } 10947 } 10948 func rewriteValueARM64_OpCom32_0(v *Value) bool { 10949 // match: (Com32 x) 10950 // cond: 10951 // result: (MVN x) 10952 for { 10953 x := v.Args[0] 10954 v.reset(OpARM64MVN) 10955 v.AddArg(x) 10956 return true 10957 } 10958 } 10959 func rewriteValueARM64_OpCom64_0(v *Value) bool { 10960 // match: (Com64 x) 10961 // cond: 10962 // result: (MVN x) 10963 for { 10964 x := v.Args[0] 10965 v.reset(OpARM64MVN) 10966 v.AddArg(x) 10967 return true 10968 } 10969 } 10970 func rewriteValueARM64_OpCom8_0(v *Value) bool { 10971 // match: (Com8 x) 10972 // cond: 10973 // result: (MVN x) 10974 for { 10975 x := v.Args[0] 10976 v.reset(OpARM64MVN) 10977 v.AddArg(x) 10978 return true 10979 } 10980 } 10981 func rewriteValueARM64_OpConst16_0(v *Value) bool { 10982 // match: (Const16 [val]) 10983 // cond: 10984 // result: (MOVDconst [val]) 10985 for { 10986 val := v.AuxInt 10987 v.reset(OpARM64MOVDconst) 10988 v.AuxInt = val 10989 return true 10990 } 10991 } 10992 func rewriteValueARM64_OpConst32_0(v *Value) bool { 10993 // match: (Const32 [val]) 10994 // cond: 10995 // result: (MOVDconst [val]) 10996 for { 10997 val := v.AuxInt 10998 v.reset(OpARM64MOVDconst) 10999 v.AuxInt = val 11000 return true 11001 } 11002 } 11003 func rewriteValueARM64_OpConst32F_0(v *Value) bool { 11004 // match: (Const32F [val]) 11005 // cond: 11006 // result: (FMOVSconst [val]) 11007 for { 11008 val := v.AuxInt 11009 v.reset(OpARM64FMOVSconst) 11010 v.AuxInt = val 11011 return true 11012 } 11013 } 11014 func rewriteValueARM64_OpConst64_0(v *Value) bool { 11015 // match: (Const64 [val]) 11016 // cond: 11017 // result: (MOVDconst [val]) 11018 for { 11019 val := v.AuxInt 11020 v.reset(OpARM64MOVDconst) 11021 v.AuxInt = val 11022 return true 11023 } 11024 } 11025 func rewriteValueARM64_OpConst64F_0(v *Value) bool { 11026 // match: (Const64F [val]) 11027 // cond: 11028 // result: (FMOVDconst [val]) 11029 for { 11030 val := v.AuxInt 11031 v.reset(OpARM64FMOVDconst) 11032 v.AuxInt = val 11033 return true 11034 } 11035 } 11036 func rewriteValueARM64_OpConst8_0(v *Value) bool { 11037 // match: (Const8 [val]) 11038 // cond: 11039 // result: (MOVDconst [val]) 11040 for { 11041 val := v.AuxInt 11042 v.reset(OpARM64MOVDconst) 11043 v.AuxInt = val 11044 return true 11045 } 11046 } 11047 func rewriteValueARM64_OpConstBool_0(v *Value) bool { 11048 // match: (ConstBool [b]) 11049 // cond: 11050 // result: (MOVDconst [b]) 11051 for { 11052 b := v.AuxInt 11053 v.reset(OpARM64MOVDconst) 11054 v.AuxInt = b 11055 return true 11056 } 11057 } 11058 func rewriteValueARM64_OpConstNil_0(v *Value) bool { 11059 // match: (ConstNil) 11060 // cond: 11061 // result: (MOVDconst [0]) 11062 for { 11063 v.reset(OpARM64MOVDconst) 11064 v.AuxInt = 0 11065 return true 11066 } 11067 } 11068 func rewriteValueARM64_OpConvert_0(v *Value) bool { 11069 // match: (Convert x mem) 11070 // cond: 11071 // result: (MOVDconvert x mem) 11072 for { 11073 _ = v.Args[1] 11074 x := v.Args[0] 11075 mem := v.Args[1] 11076 v.reset(OpARM64MOVDconvert) 11077 v.AddArg(x) 11078 v.AddArg(mem) 11079 return true 11080 } 11081 } 11082 func rewriteValueARM64_OpCtz32_0(v *Value) bool { 11083 b := v.Block 11084 _ = b 11085 // match: (Ctz32 <t> x) 11086 // cond: 11087 // result: (CLZW (RBITW <t> x)) 11088 for { 11089 t := v.Type 11090 x := v.Args[0] 11091 v.reset(OpARM64CLZW) 11092 v0 := b.NewValue0(v.Pos, OpARM64RBITW, t) 11093 v0.AddArg(x) 11094 v.AddArg(v0) 11095 return true 11096 } 11097 } 11098 func rewriteValueARM64_OpCtz64_0(v *Value) bool { 11099 b := v.Block 11100 _ = b 11101 // match: (Ctz64 <t> x) 11102 // cond: 11103 // result: (CLZ (RBIT <t> x)) 11104 for { 11105 t := v.Type 11106 x := v.Args[0] 11107 v.reset(OpARM64CLZ) 11108 v0 := b.NewValue0(v.Pos, OpARM64RBIT, t) 11109 v0.AddArg(x) 11110 v.AddArg(v0) 11111 return true 11112 } 11113 } 11114 func rewriteValueARM64_OpCvt32Fto32_0(v *Value) bool { 11115 // match: (Cvt32Fto32 x) 11116 // cond: 11117 // result: (FCVTZSSW x) 11118 for { 11119 x := v.Args[0] 11120 v.reset(OpARM64FCVTZSSW) 11121 v.AddArg(x) 11122 return true 11123 } 11124 } 11125 func rewriteValueARM64_OpCvt32Fto32U_0(v *Value) bool { 11126 // match: (Cvt32Fto32U x) 11127 // cond: 11128 // result: (FCVTZUSW x) 11129 for { 11130 x := v.Args[0] 11131 v.reset(OpARM64FCVTZUSW) 11132 v.AddArg(x) 11133 return true 11134 } 11135 } 11136 func rewriteValueARM64_OpCvt32Fto64_0(v *Value) bool { 11137 // match: (Cvt32Fto64 x) 11138 // cond: 11139 // result: (FCVTZSS x) 11140 for { 11141 x := v.Args[0] 11142 v.reset(OpARM64FCVTZSS) 11143 v.AddArg(x) 11144 return true 11145 } 11146 } 11147 func rewriteValueARM64_OpCvt32Fto64F_0(v *Value) bool { 11148 // match: (Cvt32Fto64F x) 11149 // cond: 11150 // result: (FCVTSD x) 11151 for { 11152 x := v.Args[0] 11153 v.reset(OpARM64FCVTSD) 11154 v.AddArg(x) 11155 return true 11156 } 11157 } 11158 func rewriteValueARM64_OpCvt32Fto64U_0(v *Value) bool { 11159 // match: (Cvt32Fto64U x) 11160 // cond: 11161 // result: (FCVTZUS x) 11162 for { 11163 x := v.Args[0] 11164 v.reset(OpARM64FCVTZUS) 11165 v.AddArg(x) 11166 return true 11167 } 11168 } 11169 func rewriteValueARM64_OpCvt32Uto32F_0(v *Value) bool { 11170 // match: (Cvt32Uto32F x) 11171 // cond: 11172 // result: (UCVTFWS x) 11173 for { 11174 x := v.Args[0] 11175 v.reset(OpARM64UCVTFWS) 11176 v.AddArg(x) 11177 return true 11178 } 11179 } 11180 func rewriteValueARM64_OpCvt32Uto64F_0(v *Value) bool { 11181 // match: (Cvt32Uto64F x) 11182 // cond: 11183 // result: (UCVTFWD x) 11184 for { 11185 x := v.Args[0] 11186 v.reset(OpARM64UCVTFWD) 11187 v.AddArg(x) 11188 return true 11189 } 11190 } 11191 func rewriteValueARM64_OpCvt32to32F_0(v *Value) bool { 11192 // match: (Cvt32to32F x) 11193 // cond: 11194 // result: (SCVTFWS x) 11195 for { 11196 x := v.Args[0] 11197 v.reset(OpARM64SCVTFWS) 11198 v.AddArg(x) 11199 return true 11200 } 11201 } 11202 func rewriteValueARM64_OpCvt32to64F_0(v *Value) bool { 11203 // match: (Cvt32to64F x) 11204 // cond: 11205 // result: (SCVTFWD x) 11206 for { 11207 x := v.Args[0] 11208 v.reset(OpARM64SCVTFWD) 11209 v.AddArg(x) 11210 return true 11211 } 11212 } 11213 func rewriteValueARM64_OpCvt64Fto32_0(v *Value) bool { 11214 // match: (Cvt64Fto32 x) 11215 // cond: 11216 // result: (FCVTZSDW x) 11217 for { 11218 x := v.Args[0] 11219 v.reset(OpARM64FCVTZSDW) 11220 v.AddArg(x) 11221 return true 11222 } 11223 } 11224 func rewriteValueARM64_OpCvt64Fto32F_0(v *Value) bool { 11225 // match: (Cvt64Fto32F x) 11226 // cond: 11227 // result: (FCVTDS x) 11228 for { 11229 x := v.Args[0] 11230 v.reset(OpARM64FCVTDS) 11231 v.AddArg(x) 11232 return true 11233 } 11234 } 11235 func rewriteValueARM64_OpCvt64Fto32U_0(v *Value) bool { 11236 // match: (Cvt64Fto32U x) 11237 // cond: 11238 // result: (FCVTZUDW x) 11239 for { 11240 x := v.Args[0] 11241 v.reset(OpARM64FCVTZUDW) 11242 v.AddArg(x) 11243 return true 11244 } 11245 } 11246 func rewriteValueARM64_OpCvt64Fto64_0(v *Value) bool { 11247 // match: (Cvt64Fto64 x) 11248 // cond: 11249 // result: (FCVTZSD x) 11250 for { 11251 x := v.Args[0] 11252 v.reset(OpARM64FCVTZSD) 11253 v.AddArg(x) 11254 return true 11255 } 11256 } 11257 func rewriteValueARM64_OpCvt64Fto64U_0(v *Value) bool { 11258 // match: (Cvt64Fto64U x) 11259 // cond: 11260 // result: (FCVTZUD x) 11261 for { 11262 x := v.Args[0] 11263 v.reset(OpARM64FCVTZUD) 11264 v.AddArg(x) 11265 return true 11266 } 11267 } 11268 func rewriteValueARM64_OpCvt64Uto32F_0(v *Value) bool { 11269 // match: (Cvt64Uto32F x) 11270 // cond: 11271 // result: (UCVTFS x) 11272 for { 11273 x := v.Args[0] 11274 v.reset(OpARM64UCVTFS) 11275 v.AddArg(x) 11276 return true 11277 } 11278 } 11279 func rewriteValueARM64_OpCvt64Uto64F_0(v *Value) bool { 11280 // match: (Cvt64Uto64F x) 11281 // cond: 11282 // result: (UCVTFD x) 11283 for { 11284 x := v.Args[0] 11285 v.reset(OpARM64UCVTFD) 11286 v.AddArg(x) 11287 return true 11288 } 11289 } 11290 func rewriteValueARM64_OpCvt64to32F_0(v *Value) bool { 11291 // match: (Cvt64to32F x) 11292 // cond: 11293 // result: (SCVTFS x) 11294 for { 11295 x := v.Args[0] 11296 v.reset(OpARM64SCVTFS) 11297 v.AddArg(x) 11298 return true 11299 } 11300 } 11301 func rewriteValueARM64_OpCvt64to64F_0(v *Value) bool { 11302 // match: (Cvt64to64F x) 11303 // cond: 11304 // result: (SCVTFD x) 11305 for { 11306 x := v.Args[0] 11307 v.reset(OpARM64SCVTFD) 11308 v.AddArg(x) 11309 return true 11310 } 11311 } 11312 func rewriteValueARM64_OpDiv16_0(v *Value) bool { 11313 b := v.Block 11314 _ = b 11315 typ := &b.Func.Config.Types 11316 _ = typ 11317 // match: (Div16 x y) 11318 // cond: 11319 // result: (DIVW (SignExt16to32 x) (SignExt16to32 y)) 11320 for { 11321 _ = v.Args[1] 11322 x := v.Args[0] 11323 y := v.Args[1] 11324 v.reset(OpARM64DIVW) 11325 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 11326 v0.AddArg(x) 11327 v.AddArg(v0) 11328 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 11329 v1.AddArg(y) 11330 v.AddArg(v1) 11331 return true 11332 } 11333 } 11334 func rewriteValueARM64_OpDiv16u_0(v *Value) bool { 11335 b := v.Block 11336 _ = b 11337 typ := &b.Func.Config.Types 11338 _ = typ 11339 // match: (Div16u x y) 11340 // cond: 11341 // result: (UDIVW (ZeroExt16to32 x) (ZeroExt16to32 y)) 11342 for { 11343 _ = v.Args[1] 11344 x := v.Args[0] 11345 y := v.Args[1] 11346 v.reset(OpARM64UDIVW) 11347 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 11348 v0.AddArg(x) 11349 v.AddArg(v0) 11350 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 11351 v1.AddArg(y) 11352 v.AddArg(v1) 11353 return true 11354 } 11355 } 11356 func rewriteValueARM64_OpDiv32_0(v *Value) bool { 11357 // match: (Div32 x y) 11358 // cond: 11359 // result: (DIVW x y) 11360 for { 11361 _ = v.Args[1] 11362 x := v.Args[0] 11363 y := v.Args[1] 11364 v.reset(OpARM64DIVW) 11365 v.AddArg(x) 11366 v.AddArg(y) 11367 return true 11368 } 11369 } 11370 func rewriteValueARM64_OpDiv32F_0(v *Value) bool { 11371 // match: (Div32F x y) 11372 // cond: 11373 // result: (FDIVS x y) 11374 for { 11375 _ = v.Args[1] 11376 x := v.Args[0] 11377 y := v.Args[1] 11378 v.reset(OpARM64FDIVS) 11379 v.AddArg(x) 11380 v.AddArg(y) 11381 return true 11382 } 11383 } 11384 func rewriteValueARM64_OpDiv32u_0(v *Value) bool { 11385 // match: (Div32u x y) 11386 // cond: 11387 // result: (UDIVW x y) 11388 for { 11389 _ = v.Args[1] 11390 x := v.Args[0] 11391 y := v.Args[1] 11392 v.reset(OpARM64UDIVW) 11393 v.AddArg(x) 11394 v.AddArg(y) 11395 return true 11396 } 11397 } 11398 func rewriteValueARM64_OpDiv64_0(v *Value) bool { 11399 // match: (Div64 x y) 11400 // cond: 11401 // result: (DIV x y) 11402 for { 11403 _ = v.Args[1] 11404 x := v.Args[0] 11405 y := v.Args[1] 11406 v.reset(OpARM64DIV) 11407 v.AddArg(x) 11408 v.AddArg(y) 11409 return true 11410 } 11411 } 11412 func rewriteValueARM64_OpDiv64F_0(v *Value) bool { 11413 // match: (Div64F x y) 11414 // cond: 11415 // result: (FDIVD x y) 11416 for { 11417 _ = v.Args[1] 11418 x := v.Args[0] 11419 y := v.Args[1] 11420 v.reset(OpARM64FDIVD) 11421 v.AddArg(x) 11422 v.AddArg(y) 11423 return true 11424 } 11425 } 11426 func rewriteValueARM64_OpDiv64u_0(v *Value) bool { 11427 // match: (Div64u x y) 11428 // cond: 11429 // result: (UDIV x y) 11430 for { 11431 _ = v.Args[1] 11432 x := v.Args[0] 11433 y := v.Args[1] 11434 v.reset(OpARM64UDIV) 11435 v.AddArg(x) 11436 v.AddArg(y) 11437 return true 11438 } 11439 } 11440 func rewriteValueARM64_OpDiv8_0(v *Value) bool { 11441 b := v.Block 11442 _ = b 11443 typ := &b.Func.Config.Types 11444 _ = typ 11445 // match: (Div8 x y) 11446 // cond: 11447 // result: (DIVW (SignExt8to32 x) (SignExt8to32 y)) 11448 for { 11449 _ = v.Args[1] 11450 x := v.Args[0] 11451 y := v.Args[1] 11452 v.reset(OpARM64DIVW) 11453 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 11454 v0.AddArg(x) 11455 v.AddArg(v0) 11456 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 11457 v1.AddArg(y) 11458 v.AddArg(v1) 11459 return true 11460 } 11461 } 11462 func rewriteValueARM64_OpDiv8u_0(v *Value) bool { 11463 b := v.Block 11464 _ = b 11465 typ := &b.Func.Config.Types 11466 _ = typ 11467 // match: (Div8u x y) 11468 // cond: 11469 // result: (UDIVW (ZeroExt8to32 x) (ZeroExt8to32 y)) 11470 for { 11471 _ = v.Args[1] 11472 x := v.Args[0] 11473 y := v.Args[1] 11474 v.reset(OpARM64UDIVW) 11475 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 11476 v0.AddArg(x) 11477 v.AddArg(v0) 11478 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 11479 v1.AddArg(y) 11480 v.AddArg(v1) 11481 return true 11482 } 11483 } 11484 func rewriteValueARM64_OpEq16_0(v *Value) bool { 11485 b := v.Block 11486 _ = b 11487 typ := &b.Func.Config.Types 11488 _ = typ 11489 // match: (Eq16 x y) 11490 // cond: 11491 // result: (Equal (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 11492 for { 11493 _ = v.Args[1] 11494 x := v.Args[0] 11495 y := v.Args[1] 11496 v.reset(OpARM64Equal) 11497 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11498 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 11499 v1.AddArg(x) 11500 v0.AddArg(v1) 11501 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 11502 v2.AddArg(y) 11503 v0.AddArg(v2) 11504 v.AddArg(v0) 11505 return true 11506 } 11507 } 11508 func rewriteValueARM64_OpEq32_0(v *Value) bool { 11509 b := v.Block 11510 _ = b 11511 // match: (Eq32 x y) 11512 // cond: 11513 // result: (Equal (CMPW x y)) 11514 for { 11515 _ = v.Args[1] 11516 x := v.Args[0] 11517 y := v.Args[1] 11518 v.reset(OpARM64Equal) 11519 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11520 v0.AddArg(x) 11521 v0.AddArg(y) 11522 v.AddArg(v0) 11523 return true 11524 } 11525 } 11526 func rewriteValueARM64_OpEq32F_0(v *Value) bool { 11527 b := v.Block 11528 _ = b 11529 // match: (Eq32F x y) 11530 // cond: 11531 // result: (Equal (FCMPS x y)) 11532 for { 11533 _ = v.Args[1] 11534 x := v.Args[0] 11535 y := v.Args[1] 11536 v.reset(OpARM64Equal) 11537 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 11538 v0.AddArg(x) 11539 v0.AddArg(y) 11540 v.AddArg(v0) 11541 return true 11542 } 11543 } 11544 func rewriteValueARM64_OpEq64_0(v *Value) bool { 11545 b := v.Block 11546 _ = b 11547 // match: (Eq64 x y) 11548 // cond: 11549 // result: (Equal (CMP x y)) 11550 for { 11551 _ = v.Args[1] 11552 x := v.Args[0] 11553 y := v.Args[1] 11554 v.reset(OpARM64Equal) 11555 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 11556 v0.AddArg(x) 11557 v0.AddArg(y) 11558 v.AddArg(v0) 11559 return true 11560 } 11561 } 11562 func rewriteValueARM64_OpEq64F_0(v *Value) bool { 11563 b := v.Block 11564 _ = b 11565 // match: (Eq64F x y) 11566 // cond: 11567 // result: (Equal (FCMPD x y)) 11568 for { 11569 _ = v.Args[1] 11570 x := v.Args[0] 11571 y := v.Args[1] 11572 v.reset(OpARM64Equal) 11573 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 11574 v0.AddArg(x) 11575 v0.AddArg(y) 11576 v.AddArg(v0) 11577 return true 11578 } 11579 } 11580 func rewriteValueARM64_OpEq8_0(v *Value) bool { 11581 b := v.Block 11582 _ = b 11583 typ := &b.Func.Config.Types 11584 _ = typ 11585 // match: (Eq8 x y) 11586 // cond: 11587 // result: (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 11588 for { 11589 _ = v.Args[1] 11590 x := v.Args[0] 11591 y := v.Args[1] 11592 v.reset(OpARM64Equal) 11593 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11594 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 11595 v1.AddArg(x) 11596 v0.AddArg(v1) 11597 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 11598 v2.AddArg(y) 11599 v0.AddArg(v2) 11600 v.AddArg(v0) 11601 return true 11602 } 11603 } 11604 func rewriteValueARM64_OpEqB_0(v *Value) bool { 11605 b := v.Block 11606 _ = b 11607 typ := &b.Func.Config.Types 11608 _ = typ 11609 // match: (EqB x y) 11610 // cond: 11611 // result: (XOR (MOVDconst [1]) (XOR <typ.Bool> x y)) 11612 for { 11613 _ = v.Args[1] 11614 x := v.Args[0] 11615 y := v.Args[1] 11616 v.reset(OpARM64XOR) 11617 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 11618 v0.AuxInt = 1 11619 v.AddArg(v0) 11620 v1 := b.NewValue0(v.Pos, OpARM64XOR, typ.Bool) 11621 v1.AddArg(x) 11622 v1.AddArg(y) 11623 v.AddArg(v1) 11624 return true 11625 } 11626 } 11627 func rewriteValueARM64_OpEqPtr_0(v *Value) bool { 11628 b := v.Block 11629 _ = b 11630 // match: (EqPtr x y) 11631 // cond: 11632 // result: (Equal (CMP x y)) 11633 for { 11634 _ = v.Args[1] 11635 x := v.Args[0] 11636 y := v.Args[1] 11637 v.reset(OpARM64Equal) 11638 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 11639 v0.AddArg(x) 11640 v0.AddArg(y) 11641 v.AddArg(v0) 11642 return true 11643 } 11644 } 11645 func rewriteValueARM64_OpGeq16_0(v *Value) bool { 11646 b := v.Block 11647 _ = b 11648 typ := &b.Func.Config.Types 11649 _ = typ 11650 // match: (Geq16 x y) 11651 // cond: 11652 // result: (GreaterEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) 11653 for { 11654 _ = v.Args[1] 11655 x := v.Args[0] 11656 y := v.Args[1] 11657 v.reset(OpARM64GreaterEqual) 11658 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11659 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 11660 v1.AddArg(x) 11661 v0.AddArg(v1) 11662 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 11663 v2.AddArg(y) 11664 v0.AddArg(v2) 11665 v.AddArg(v0) 11666 return true 11667 } 11668 } 11669 func rewriteValueARM64_OpGeq16U_0(v *Value) bool { 11670 b := v.Block 11671 _ = b 11672 typ := &b.Func.Config.Types 11673 _ = typ 11674 // match: (Geq16U x y) 11675 // cond: 11676 // result: (GreaterEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 11677 for { 11678 _ = v.Args[1] 11679 x := v.Args[0] 11680 y := v.Args[1] 11681 v.reset(OpARM64GreaterEqualU) 11682 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11683 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 11684 v1.AddArg(x) 11685 v0.AddArg(v1) 11686 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 11687 v2.AddArg(y) 11688 v0.AddArg(v2) 11689 v.AddArg(v0) 11690 return true 11691 } 11692 } 11693 func rewriteValueARM64_OpGeq32_0(v *Value) bool { 11694 b := v.Block 11695 _ = b 11696 // match: (Geq32 x y) 11697 // cond: 11698 // result: (GreaterEqual (CMPW x y)) 11699 for { 11700 _ = v.Args[1] 11701 x := v.Args[0] 11702 y := v.Args[1] 11703 v.reset(OpARM64GreaterEqual) 11704 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11705 v0.AddArg(x) 11706 v0.AddArg(y) 11707 v.AddArg(v0) 11708 return true 11709 } 11710 } 11711 func rewriteValueARM64_OpGeq32F_0(v *Value) bool { 11712 b := v.Block 11713 _ = b 11714 // match: (Geq32F x y) 11715 // cond: 11716 // result: (GreaterEqual (FCMPS x y)) 11717 for { 11718 _ = v.Args[1] 11719 x := v.Args[0] 11720 y := v.Args[1] 11721 v.reset(OpARM64GreaterEqual) 11722 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 11723 v0.AddArg(x) 11724 v0.AddArg(y) 11725 v.AddArg(v0) 11726 return true 11727 } 11728 } 11729 func rewriteValueARM64_OpGeq32U_0(v *Value) bool { 11730 b := v.Block 11731 _ = b 11732 // match: (Geq32U x y) 11733 // cond: 11734 // result: (GreaterEqualU (CMPW x y)) 11735 for { 11736 _ = v.Args[1] 11737 x := v.Args[0] 11738 y := v.Args[1] 11739 v.reset(OpARM64GreaterEqualU) 11740 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11741 v0.AddArg(x) 11742 v0.AddArg(y) 11743 v.AddArg(v0) 11744 return true 11745 } 11746 } 11747 func rewriteValueARM64_OpGeq64_0(v *Value) bool { 11748 b := v.Block 11749 _ = b 11750 // match: (Geq64 x y) 11751 // cond: 11752 // result: (GreaterEqual (CMP x y)) 11753 for { 11754 _ = v.Args[1] 11755 x := v.Args[0] 11756 y := v.Args[1] 11757 v.reset(OpARM64GreaterEqual) 11758 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 11759 v0.AddArg(x) 11760 v0.AddArg(y) 11761 v.AddArg(v0) 11762 return true 11763 } 11764 } 11765 func rewriteValueARM64_OpGeq64F_0(v *Value) bool { 11766 b := v.Block 11767 _ = b 11768 // match: (Geq64F x y) 11769 // cond: 11770 // result: (GreaterEqual (FCMPD x y)) 11771 for { 11772 _ = v.Args[1] 11773 x := v.Args[0] 11774 y := v.Args[1] 11775 v.reset(OpARM64GreaterEqual) 11776 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 11777 v0.AddArg(x) 11778 v0.AddArg(y) 11779 v.AddArg(v0) 11780 return true 11781 } 11782 } 11783 func rewriteValueARM64_OpGeq64U_0(v *Value) bool { 11784 b := v.Block 11785 _ = b 11786 // match: (Geq64U x y) 11787 // cond: 11788 // result: (GreaterEqualU (CMP x y)) 11789 for { 11790 _ = v.Args[1] 11791 x := v.Args[0] 11792 y := v.Args[1] 11793 v.reset(OpARM64GreaterEqualU) 11794 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 11795 v0.AddArg(x) 11796 v0.AddArg(y) 11797 v.AddArg(v0) 11798 return true 11799 } 11800 } 11801 func rewriteValueARM64_OpGeq8_0(v *Value) bool { 11802 b := v.Block 11803 _ = b 11804 typ := &b.Func.Config.Types 11805 _ = typ 11806 // match: (Geq8 x y) 11807 // cond: 11808 // result: (GreaterEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) 11809 for { 11810 _ = v.Args[1] 11811 x := v.Args[0] 11812 y := v.Args[1] 11813 v.reset(OpARM64GreaterEqual) 11814 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11815 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 11816 v1.AddArg(x) 11817 v0.AddArg(v1) 11818 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 11819 v2.AddArg(y) 11820 v0.AddArg(v2) 11821 v.AddArg(v0) 11822 return true 11823 } 11824 } 11825 func rewriteValueARM64_OpGeq8U_0(v *Value) bool { 11826 b := v.Block 11827 _ = b 11828 typ := &b.Func.Config.Types 11829 _ = typ 11830 // match: (Geq8U x y) 11831 // cond: 11832 // result: (GreaterEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 11833 for { 11834 _ = v.Args[1] 11835 x := v.Args[0] 11836 y := v.Args[1] 11837 v.reset(OpARM64GreaterEqualU) 11838 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11839 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 11840 v1.AddArg(x) 11841 v0.AddArg(v1) 11842 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 11843 v2.AddArg(y) 11844 v0.AddArg(v2) 11845 v.AddArg(v0) 11846 return true 11847 } 11848 } 11849 func rewriteValueARM64_OpGetClosurePtr_0(v *Value) bool { 11850 // match: (GetClosurePtr) 11851 // cond: 11852 // result: (LoweredGetClosurePtr) 11853 for { 11854 v.reset(OpARM64LoweredGetClosurePtr) 11855 return true 11856 } 11857 } 11858 func rewriteValueARM64_OpGreater16_0(v *Value) bool { 11859 b := v.Block 11860 _ = b 11861 typ := &b.Func.Config.Types 11862 _ = typ 11863 // match: (Greater16 x y) 11864 // cond: 11865 // result: (GreaterThan (CMPW (SignExt16to32 x) (SignExt16to32 y))) 11866 for { 11867 _ = v.Args[1] 11868 x := v.Args[0] 11869 y := v.Args[1] 11870 v.reset(OpARM64GreaterThan) 11871 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11872 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 11873 v1.AddArg(x) 11874 v0.AddArg(v1) 11875 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 11876 v2.AddArg(y) 11877 v0.AddArg(v2) 11878 v.AddArg(v0) 11879 return true 11880 } 11881 } 11882 func rewriteValueARM64_OpGreater16U_0(v *Value) bool { 11883 b := v.Block 11884 _ = b 11885 typ := &b.Func.Config.Types 11886 _ = typ 11887 // match: (Greater16U x y) 11888 // cond: 11889 // result: (GreaterThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 11890 for { 11891 _ = v.Args[1] 11892 x := v.Args[0] 11893 y := v.Args[1] 11894 v.reset(OpARM64GreaterThanU) 11895 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11896 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 11897 v1.AddArg(x) 11898 v0.AddArg(v1) 11899 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 11900 v2.AddArg(y) 11901 v0.AddArg(v2) 11902 v.AddArg(v0) 11903 return true 11904 } 11905 } 11906 func rewriteValueARM64_OpGreater32_0(v *Value) bool { 11907 b := v.Block 11908 _ = b 11909 // match: (Greater32 x y) 11910 // cond: 11911 // result: (GreaterThan (CMPW x y)) 11912 for { 11913 _ = v.Args[1] 11914 x := v.Args[0] 11915 y := v.Args[1] 11916 v.reset(OpARM64GreaterThan) 11917 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11918 v0.AddArg(x) 11919 v0.AddArg(y) 11920 v.AddArg(v0) 11921 return true 11922 } 11923 } 11924 func rewriteValueARM64_OpGreater32F_0(v *Value) bool { 11925 b := v.Block 11926 _ = b 11927 // match: (Greater32F x y) 11928 // cond: 11929 // result: (GreaterThan (FCMPS x y)) 11930 for { 11931 _ = v.Args[1] 11932 x := v.Args[0] 11933 y := v.Args[1] 11934 v.reset(OpARM64GreaterThan) 11935 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 11936 v0.AddArg(x) 11937 v0.AddArg(y) 11938 v.AddArg(v0) 11939 return true 11940 } 11941 } 11942 func rewriteValueARM64_OpGreater32U_0(v *Value) bool { 11943 b := v.Block 11944 _ = b 11945 // match: (Greater32U x y) 11946 // cond: 11947 // result: (GreaterThanU (CMPW x y)) 11948 for { 11949 _ = v.Args[1] 11950 x := v.Args[0] 11951 y := v.Args[1] 11952 v.reset(OpARM64GreaterThanU) 11953 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 11954 v0.AddArg(x) 11955 v0.AddArg(y) 11956 v.AddArg(v0) 11957 return true 11958 } 11959 } 11960 func rewriteValueARM64_OpGreater64_0(v *Value) bool { 11961 b := v.Block 11962 _ = b 11963 // match: (Greater64 x y) 11964 // cond: 11965 // result: (GreaterThan (CMP x y)) 11966 for { 11967 _ = v.Args[1] 11968 x := v.Args[0] 11969 y := v.Args[1] 11970 v.reset(OpARM64GreaterThan) 11971 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 11972 v0.AddArg(x) 11973 v0.AddArg(y) 11974 v.AddArg(v0) 11975 return true 11976 } 11977 } 11978 func rewriteValueARM64_OpGreater64F_0(v *Value) bool { 11979 b := v.Block 11980 _ = b 11981 // match: (Greater64F x y) 11982 // cond: 11983 // result: (GreaterThan (FCMPD x y)) 11984 for { 11985 _ = v.Args[1] 11986 x := v.Args[0] 11987 y := v.Args[1] 11988 v.reset(OpARM64GreaterThan) 11989 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 11990 v0.AddArg(x) 11991 v0.AddArg(y) 11992 v.AddArg(v0) 11993 return true 11994 } 11995 } 11996 func rewriteValueARM64_OpGreater64U_0(v *Value) bool { 11997 b := v.Block 11998 _ = b 11999 // match: (Greater64U x y) 12000 // cond: 12001 // result: (GreaterThanU (CMP x y)) 12002 for { 12003 _ = v.Args[1] 12004 x := v.Args[0] 12005 y := v.Args[1] 12006 v.reset(OpARM64GreaterThanU) 12007 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 12008 v0.AddArg(x) 12009 v0.AddArg(y) 12010 v.AddArg(v0) 12011 return true 12012 } 12013 } 12014 func rewriteValueARM64_OpGreater8_0(v *Value) bool { 12015 b := v.Block 12016 _ = b 12017 typ := &b.Func.Config.Types 12018 _ = typ 12019 // match: (Greater8 x y) 12020 // cond: 12021 // result: (GreaterThan (CMPW (SignExt8to32 x) (SignExt8to32 y))) 12022 for { 12023 _ = v.Args[1] 12024 x := v.Args[0] 12025 y := v.Args[1] 12026 v.reset(OpARM64GreaterThan) 12027 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12028 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 12029 v1.AddArg(x) 12030 v0.AddArg(v1) 12031 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 12032 v2.AddArg(y) 12033 v0.AddArg(v2) 12034 v.AddArg(v0) 12035 return true 12036 } 12037 } 12038 func rewriteValueARM64_OpGreater8U_0(v *Value) bool { 12039 b := v.Block 12040 _ = b 12041 typ := &b.Func.Config.Types 12042 _ = typ 12043 // match: (Greater8U x y) 12044 // cond: 12045 // result: (GreaterThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 12046 for { 12047 _ = v.Args[1] 12048 x := v.Args[0] 12049 y := v.Args[1] 12050 v.reset(OpARM64GreaterThanU) 12051 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12052 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 12053 v1.AddArg(x) 12054 v0.AddArg(v1) 12055 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 12056 v2.AddArg(y) 12057 v0.AddArg(v2) 12058 v.AddArg(v0) 12059 return true 12060 } 12061 } 12062 func rewriteValueARM64_OpHmul32_0(v *Value) bool { 12063 b := v.Block 12064 _ = b 12065 typ := &b.Func.Config.Types 12066 _ = typ 12067 // match: (Hmul32 x y) 12068 // cond: 12069 // result: (SRAconst (MULL <typ.Int64> x y) [32]) 12070 for { 12071 _ = v.Args[1] 12072 x := v.Args[0] 12073 y := v.Args[1] 12074 v.reset(OpARM64SRAconst) 12075 v.AuxInt = 32 12076 v0 := b.NewValue0(v.Pos, OpARM64MULL, typ.Int64) 12077 v0.AddArg(x) 12078 v0.AddArg(y) 12079 v.AddArg(v0) 12080 return true 12081 } 12082 } 12083 func rewriteValueARM64_OpHmul32u_0(v *Value) bool { 12084 b := v.Block 12085 _ = b 12086 typ := &b.Func.Config.Types 12087 _ = typ 12088 // match: (Hmul32u x y) 12089 // cond: 12090 // result: (SRAconst (UMULL <typ.UInt64> x y) [32]) 12091 for { 12092 _ = v.Args[1] 12093 x := v.Args[0] 12094 y := v.Args[1] 12095 v.reset(OpARM64SRAconst) 12096 v.AuxInt = 32 12097 v0 := b.NewValue0(v.Pos, OpARM64UMULL, typ.UInt64) 12098 v0.AddArg(x) 12099 v0.AddArg(y) 12100 v.AddArg(v0) 12101 return true 12102 } 12103 } 12104 func rewriteValueARM64_OpHmul64_0(v *Value) bool { 12105 // match: (Hmul64 x y) 12106 // cond: 12107 // result: (MULH x y) 12108 for { 12109 _ = v.Args[1] 12110 x := v.Args[0] 12111 y := v.Args[1] 12112 v.reset(OpARM64MULH) 12113 v.AddArg(x) 12114 v.AddArg(y) 12115 return true 12116 } 12117 } 12118 func rewriteValueARM64_OpHmul64u_0(v *Value) bool { 12119 // match: (Hmul64u x y) 12120 // cond: 12121 // result: (UMULH x y) 12122 for { 12123 _ = v.Args[1] 12124 x := v.Args[0] 12125 y := v.Args[1] 12126 v.reset(OpARM64UMULH) 12127 v.AddArg(x) 12128 v.AddArg(y) 12129 return true 12130 } 12131 } 12132 func rewriteValueARM64_OpInterCall_0(v *Value) bool { 12133 // match: (InterCall [argwid] entry mem) 12134 // cond: 12135 // result: (CALLinter [argwid] entry mem) 12136 for { 12137 argwid := v.AuxInt 12138 _ = v.Args[1] 12139 entry := v.Args[0] 12140 mem := v.Args[1] 12141 v.reset(OpARM64CALLinter) 12142 v.AuxInt = argwid 12143 v.AddArg(entry) 12144 v.AddArg(mem) 12145 return true 12146 } 12147 } 12148 func rewriteValueARM64_OpIsInBounds_0(v *Value) bool { 12149 b := v.Block 12150 _ = b 12151 // match: (IsInBounds idx len) 12152 // cond: 12153 // result: (LessThanU (CMP idx len)) 12154 for { 12155 _ = v.Args[1] 12156 idx := v.Args[0] 12157 len := v.Args[1] 12158 v.reset(OpARM64LessThanU) 12159 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 12160 v0.AddArg(idx) 12161 v0.AddArg(len) 12162 v.AddArg(v0) 12163 return true 12164 } 12165 } 12166 func rewriteValueARM64_OpIsNonNil_0(v *Value) bool { 12167 b := v.Block 12168 _ = b 12169 // match: (IsNonNil ptr) 12170 // cond: 12171 // result: (NotEqual (CMPconst [0] ptr)) 12172 for { 12173 ptr := v.Args[0] 12174 v.reset(OpARM64NotEqual) 12175 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 12176 v0.AuxInt = 0 12177 v0.AddArg(ptr) 12178 v.AddArg(v0) 12179 return true 12180 } 12181 } 12182 func rewriteValueARM64_OpIsSliceInBounds_0(v *Value) bool { 12183 b := v.Block 12184 _ = b 12185 // match: (IsSliceInBounds idx len) 12186 // cond: 12187 // result: (LessEqualU (CMP idx len)) 12188 for { 12189 _ = v.Args[1] 12190 idx := v.Args[0] 12191 len := v.Args[1] 12192 v.reset(OpARM64LessEqualU) 12193 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 12194 v0.AddArg(idx) 12195 v0.AddArg(len) 12196 v.AddArg(v0) 12197 return true 12198 } 12199 } 12200 func rewriteValueARM64_OpLeq16_0(v *Value) bool { 12201 b := v.Block 12202 _ = b 12203 typ := &b.Func.Config.Types 12204 _ = typ 12205 // match: (Leq16 x y) 12206 // cond: 12207 // result: (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) 12208 for { 12209 _ = v.Args[1] 12210 x := v.Args[0] 12211 y := v.Args[1] 12212 v.reset(OpARM64LessEqual) 12213 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12214 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 12215 v1.AddArg(x) 12216 v0.AddArg(v1) 12217 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 12218 v2.AddArg(y) 12219 v0.AddArg(v2) 12220 v.AddArg(v0) 12221 return true 12222 } 12223 } 12224 func rewriteValueARM64_OpLeq16U_0(v *Value) bool { 12225 b := v.Block 12226 _ = b 12227 typ := &b.Func.Config.Types 12228 _ = typ 12229 // match: (Leq16U x y) 12230 // cond: 12231 // result: (LessEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 12232 for { 12233 _ = v.Args[1] 12234 x := v.Args[0] 12235 y := v.Args[1] 12236 v.reset(OpARM64LessEqualU) 12237 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12238 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 12239 v1.AddArg(x) 12240 v0.AddArg(v1) 12241 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 12242 v2.AddArg(y) 12243 v0.AddArg(v2) 12244 v.AddArg(v0) 12245 return true 12246 } 12247 } 12248 func rewriteValueARM64_OpLeq32_0(v *Value) bool { 12249 b := v.Block 12250 _ = b 12251 // match: (Leq32 x y) 12252 // cond: 12253 // result: (LessEqual (CMPW x y)) 12254 for { 12255 _ = v.Args[1] 12256 x := v.Args[0] 12257 y := v.Args[1] 12258 v.reset(OpARM64LessEqual) 12259 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12260 v0.AddArg(x) 12261 v0.AddArg(y) 12262 v.AddArg(v0) 12263 return true 12264 } 12265 } 12266 func rewriteValueARM64_OpLeq32F_0(v *Value) bool { 12267 b := v.Block 12268 _ = b 12269 // match: (Leq32F x y) 12270 // cond: 12271 // result: (GreaterEqual (FCMPS y x)) 12272 for { 12273 _ = v.Args[1] 12274 x := v.Args[0] 12275 y := v.Args[1] 12276 v.reset(OpARM64GreaterEqual) 12277 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 12278 v0.AddArg(y) 12279 v0.AddArg(x) 12280 v.AddArg(v0) 12281 return true 12282 } 12283 } 12284 func rewriteValueARM64_OpLeq32U_0(v *Value) bool { 12285 b := v.Block 12286 _ = b 12287 // match: (Leq32U x y) 12288 // cond: 12289 // result: (LessEqualU (CMPW x y)) 12290 for { 12291 _ = v.Args[1] 12292 x := v.Args[0] 12293 y := v.Args[1] 12294 v.reset(OpARM64LessEqualU) 12295 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12296 v0.AddArg(x) 12297 v0.AddArg(y) 12298 v.AddArg(v0) 12299 return true 12300 } 12301 } 12302 func rewriteValueARM64_OpLeq64_0(v *Value) bool { 12303 b := v.Block 12304 _ = b 12305 // match: (Leq64 x y) 12306 // cond: 12307 // result: (LessEqual (CMP x y)) 12308 for { 12309 _ = v.Args[1] 12310 x := v.Args[0] 12311 y := v.Args[1] 12312 v.reset(OpARM64LessEqual) 12313 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 12314 v0.AddArg(x) 12315 v0.AddArg(y) 12316 v.AddArg(v0) 12317 return true 12318 } 12319 } 12320 func rewriteValueARM64_OpLeq64F_0(v *Value) bool { 12321 b := v.Block 12322 _ = b 12323 // match: (Leq64F x y) 12324 // cond: 12325 // result: (GreaterEqual (FCMPD y x)) 12326 for { 12327 _ = v.Args[1] 12328 x := v.Args[0] 12329 y := v.Args[1] 12330 v.reset(OpARM64GreaterEqual) 12331 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 12332 v0.AddArg(y) 12333 v0.AddArg(x) 12334 v.AddArg(v0) 12335 return true 12336 } 12337 } 12338 func rewriteValueARM64_OpLeq64U_0(v *Value) bool { 12339 b := v.Block 12340 _ = b 12341 // match: (Leq64U x y) 12342 // cond: 12343 // result: (LessEqualU (CMP x y)) 12344 for { 12345 _ = v.Args[1] 12346 x := v.Args[0] 12347 y := v.Args[1] 12348 v.reset(OpARM64LessEqualU) 12349 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 12350 v0.AddArg(x) 12351 v0.AddArg(y) 12352 v.AddArg(v0) 12353 return true 12354 } 12355 } 12356 func rewriteValueARM64_OpLeq8_0(v *Value) bool { 12357 b := v.Block 12358 _ = b 12359 typ := &b.Func.Config.Types 12360 _ = typ 12361 // match: (Leq8 x y) 12362 // cond: 12363 // result: (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) 12364 for { 12365 _ = v.Args[1] 12366 x := v.Args[0] 12367 y := v.Args[1] 12368 v.reset(OpARM64LessEqual) 12369 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12370 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 12371 v1.AddArg(x) 12372 v0.AddArg(v1) 12373 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 12374 v2.AddArg(y) 12375 v0.AddArg(v2) 12376 v.AddArg(v0) 12377 return true 12378 } 12379 } 12380 func rewriteValueARM64_OpLeq8U_0(v *Value) bool { 12381 b := v.Block 12382 _ = b 12383 typ := &b.Func.Config.Types 12384 _ = typ 12385 // match: (Leq8U x y) 12386 // cond: 12387 // result: (LessEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 12388 for { 12389 _ = v.Args[1] 12390 x := v.Args[0] 12391 y := v.Args[1] 12392 v.reset(OpARM64LessEqualU) 12393 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12394 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 12395 v1.AddArg(x) 12396 v0.AddArg(v1) 12397 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 12398 v2.AddArg(y) 12399 v0.AddArg(v2) 12400 v.AddArg(v0) 12401 return true 12402 } 12403 } 12404 func rewriteValueARM64_OpLess16_0(v *Value) bool { 12405 b := v.Block 12406 _ = b 12407 typ := &b.Func.Config.Types 12408 _ = typ 12409 // match: (Less16 x y) 12410 // cond: 12411 // result: (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y))) 12412 for { 12413 _ = v.Args[1] 12414 x := v.Args[0] 12415 y := v.Args[1] 12416 v.reset(OpARM64LessThan) 12417 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12418 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 12419 v1.AddArg(x) 12420 v0.AddArg(v1) 12421 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 12422 v2.AddArg(y) 12423 v0.AddArg(v2) 12424 v.AddArg(v0) 12425 return true 12426 } 12427 } 12428 func rewriteValueARM64_OpLess16U_0(v *Value) bool { 12429 b := v.Block 12430 _ = b 12431 typ := &b.Func.Config.Types 12432 _ = typ 12433 // match: (Less16U x y) 12434 // cond: 12435 // result: (LessThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 12436 for { 12437 _ = v.Args[1] 12438 x := v.Args[0] 12439 y := v.Args[1] 12440 v.reset(OpARM64LessThanU) 12441 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12442 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 12443 v1.AddArg(x) 12444 v0.AddArg(v1) 12445 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 12446 v2.AddArg(y) 12447 v0.AddArg(v2) 12448 v.AddArg(v0) 12449 return true 12450 } 12451 } 12452 func rewriteValueARM64_OpLess32_0(v *Value) bool { 12453 b := v.Block 12454 _ = b 12455 // match: (Less32 x y) 12456 // cond: 12457 // result: (LessThan (CMPW x y)) 12458 for { 12459 _ = v.Args[1] 12460 x := v.Args[0] 12461 y := v.Args[1] 12462 v.reset(OpARM64LessThan) 12463 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12464 v0.AddArg(x) 12465 v0.AddArg(y) 12466 v.AddArg(v0) 12467 return true 12468 } 12469 } 12470 func rewriteValueARM64_OpLess32F_0(v *Value) bool { 12471 b := v.Block 12472 _ = b 12473 // match: (Less32F x y) 12474 // cond: 12475 // result: (GreaterThan (FCMPS y x)) 12476 for { 12477 _ = v.Args[1] 12478 x := v.Args[0] 12479 y := v.Args[1] 12480 v.reset(OpARM64GreaterThan) 12481 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 12482 v0.AddArg(y) 12483 v0.AddArg(x) 12484 v.AddArg(v0) 12485 return true 12486 } 12487 } 12488 func rewriteValueARM64_OpLess32U_0(v *Value) bool { 12489 b := v.Block 12490 _ = b 12491 // match: (Less32U x y) 12492 // cond: 12493 // result: (LessThanU (CMPW x y)) 12494 for { 12495 _ = v.Args[1] 12496 x := v.Args[0] 12497 y := v.Args[1] 12498 v.reset(OpARM64LessThanU) 12499 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12500 v0.AddArg(x) 12501 v0.AddArg(y) 12502 v.AddArg(v0) 12503 return true 12504 } 12505 } 12506 func rewriteValueARM64_OpLess64_0(v *Value) bool { 12507 b := v.Block 12508 _ = b 12509 // match: (Less64 x y) 12510 // cond: 12511 // result: (LessThan (CMP x y)) 12512 for { 12513 _ = v.Args[1] 12514 x := v.Args[0] 12515 y := v.Args[1] 12516 v.reset(OpARM64LessThan) 12517 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 12518 v0.AddArg(x) 12519 v0.AddArg(y) 12520 v.AddArg(v0) 12521 return true 12522 } 12523 } 12524 func rewriteValueARM64_OpLess64F_0(v *Value) bool { 12525 b := v.Block 12526 _ = b 12527 // match: (Less64F x y) 12528 // cond: 12529 // result: (GreaterThan (FCMPD y x)) 12530 for { 12531 _ = v.Args[1] 12532 x := v.Args[0] 12533 y := v.Args[1] 12534 v.reset(OpARM64GreaterThan) 12535 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 12536 v0.AddArg(y) 12537 v0.AddArg(x) 12538 v.AddArg(v0) 12539 return true 12540 } 12541 } 12542 func rewriteValueARM64_OpLess64U_0(v *Value) bool { 12543 b := v.Block 12544 _ = b 12545 // match: (Less64U x y) 12546 // cond: 12547 // result: (LessThanU (CMP x y)) 12548 for { 12549 _ = v.Args[1] 12550 x := v.Args[0] 12551 y := v.Args[1] 12552 v.reset(OpARM64LessThanU) 12553 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 12554 v0.AddArg(x) 12555 v0.AddArg(y) 12556 v.AddArg(v0) 12557 return true 12558 } 12559 } 12560 func rewriteValueARM64_OpLess8_0(v *Value) bool { 12561 b := v.Block 12562 _ = b 12563 typ := &b.Func.Config.Types 12564 _ = typ 12565 // match: (Less8 x y) 12566 // cond: 12567 // result: (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y))) 12568 for { 12569 _ = v.Args[1] 12570 x := v.Args[0] 12571 y := v.Args[1] 12572 v.reset(OpARM64LessThan) 12573 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12574 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 12575 v1.AddArg(x) 12576 v0.AddArg(v1) 12577 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 12578 v2.AddArg(y) 12579 v0.AddArg(v2) 12580 v.AddArg(v0) 12581 return true 12582 } 12583 } 12584 func rewriteValueARM64_OpLess8U_0(v *Value) bool { 12585 b := v.Block 12586 _ = b 12587 typ := &b.Func.Config.Types 12588 _ = typ 12589 // match: (Less8U x y) 12590 // cond: 12591 // result: (LessThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 12592 for { 12593 _ = v.Args[1] 12594 x := v.Args[0] 12595 y := v.Args[1] 12596 v.reset(OpARM64LessThanU) 12597 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 12598 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 12599 v1.AddArg(x) 12600 v0.AddArg(v1) 12601 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 12602 v2.AddArg(y) 12603 v0.AddArg(v2) 12604 v.AddArg(v0) 12605 return true 12606 } 12607 } 12608 func rewriteValueARM64_OpLoad_0(v *Value) bool { 12609 // match: (Load <t> ptr mem) 12610 // cond: t.IsBoolean() 12611 // result: (MOVBUload ptr mem) 12612 for { 12613 t := v.Type 12614 _ = v.Args[1] 12615 ptr := v.Args[0] 12616 mem := v.Args[1] 12617 if !(t.IsBoolean()) { 12618 break 12619 } 12620 v.reset(OpARM64MOVBUload) 12621 v.AddArg(ptr) 12622 v.AddArg(mem) 12623 return true 12624 } 12625 // match: (Load <t> ptr mem) 12626 // cond: (is8BitInt(t) && isSigned(t)) 12627 // result: (MOVBload ptr mem) 12628 for { 12629 t := v.Type 12630 _ = v.Args[1] 12631 ptr := v.Args[0] 12632 mem := v.Args[1] 12633 if !(is8BitInt(t) && isSigned(t)) { 12634 break 12635 } 12636 v.reset(OpARM64MOVBload) 12637 v.AddArg(ptr) 12638 v.AddArg(mem) 12639 return true 12640 } 12641 // match: (Load <t> ptr mem) 12642 // cond: (is8BitInt(t) && !isSigned(t)) 12643 // result: (MOVBUload ptr mem) 12644 for { 12645 t := v.Type 12646 _ = v.Args[1] 12647 ptr := v.Args[0] 12648 mem := v.Args[1] 12649 if !(is8BitInt(t) && !isSigned(t)) { 12650 break 12651 } 12652 v.reset(OpARM64MOVBUload) 12653 v.AddArg(ptr) 12654 v.AddArg(mem) 12655 return true 12656 } 12657 // match: (Load <t> ptr mem) 12658 // cond: (is16BitInt(t) && isSigned(t)) 12659 // result: (MOVHload ptr mem) 12660 for { 12661 t := v.Type 12662 _ = v.Args[1] 12663 ptr := v.Args[0] 12664 mem := v.Args[1] 12665 if !(is16BitInt(t) && isSigned(t)) { 12666 break 12667 } 12668 v.reset(OpARM64MOVHload) 12669 v.AddArg(ptr) 12670 v.AddArg(mem) 12671 return true 12672 } 12673 // match: (Load <t> ptr mem) 12674 // cond: (is16BitInt(t) && !isSigned(t)) 12675 // result: (MOVHUload ptr mem) 12676 for { 12677 t := v.Type 12678 _ = v.Args[1] 12679 ptr := v.Args[0] 12680 mem := v.Args[1] 12681 if !(is16BitInt(t) && !isSigned(t)) { 12682 break 12683 } 12684 v.reset(OpARM64MOVHUload) 12685 v.AddArg(ptr) 12686 v.AddArg(mem) 12687 return true 12688 } 12689 // match: (Load <t> ptr mem) 12690 // cond: (is32BitInt(t) && isSigned(t)) 12691 // result: (MOVWload ptr mem) 12692 for { 12693 t := v.Type 12694 _ = v.Args[1] 12695 ptr := v.Args[0] 12696 mem := v.Args[1] 12697 if !(is32BitInt(t) && isSigned(t)) { 12698 break 12699 } 12700 v.reset(OpARM64MOVWload) 12701 v.AddArg(ptr) 12702 v.AddArg(mem) 12703 return true 12704 } 12705 // match: (Load <t> ptr mem) 12706 // cond: (is32BitInt(t) && !isSigned(t)) 12707 // result: (MOVWUload ptr mem) 12708 for { 12709 t := v.Type 12710 _ = v.Args[1] 12711 ptr := v.Args[0] 12712 mem := v.Args[1] 12713 if !(is32BitInt(t) && !isSigned(t)) { 12714 break 12715 } 12716 v.reset(OpARM64MOVWUload) 12717 v.AddArg(ptr) 12718 v.AddArg(mem) 12719 return true 12720 } 12721 // match: (Load <t> ptr mem) 12722 // cond: (is64BitInt(t) || isPtr(t)) 12723 // result: (MOVDload ptr mem) 12724 for { 12725 t := v.Type 12726 _ = v.Args[1] 12727 ptr := v.Args[0] 12728 mem := v.Args[1] 12729 if !(is64BitInt(t) || isPtr(t)) { 12730 break 12731 } 12732 v.reset(OpARM64MOVDload) 12733 v.AddArg(ptr) 12734 v.AddArg(mem) 12735 return true 12736 } 12737 // match: (Load <t> ptr mem) 12738 // cond: is32BitFloat(t) 12739 // result: (FMOVSload ptr mem) 12740 for { 12741 t := v.Type 12742 _ = v.Args[1] 12743 ptr := v.Args[0] 12744 mem := v.Args[1] 12745 if !(is32BitFloat(t)) { 12746 break 12747 } 12748 v.reset(OpARM64FMOVSload) 12749 v.AddArg(ptr) 12750 v.AddArg(mem) 12751 return true 12752 } 12753 // match: (Load <t> ptr mem) 12754 // cond: is64BitFloat(t) 12755 // result: (FMOVDload ptr mem) 12756 for { 12757 t := v.Type 12758 _ = v.Args[1] 12759 ptr := v.Args[0] 12760 mem := v.Args[1] 12761 if !(is64BitFloat(t)) { 12762 break 12763 } 12764 v.reset(OpARM64FMOVDload) 12765 v.AddArg(ptr) 12766 v.AddArg(mem) 12767 return true 12768 } 12769 return false 12770 } 12771 func rewriteValueARM64_OpLsh16x16_0(v *Value) bool { 12772 b := v.Block 12773 _ = b 12774 typ := &b.Func.Config.Types 12775 _ = typ 12776 // match: (Lsh16x16 <t> x y) 12777 // cond: 12778 // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 12779 for { 12780 t := v.Type 12781 _ = v.Args[1] 12782 x := v.Args[0] 12783 y := v.Args[1] 12784 v.reset(OpARM64CSELULT) 12785 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12786 v0.AddArg(x) 12787 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 12788 v1.AddArg(y) 12789 v0.AddArg(v1) 12790 v.AddArg(v0) 12791 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 12792 v2.AuxInt = 0 12793 v.AddArg(v2) 12794 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 12795 v3.AuxInt = 64 12796 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 12797 v4.AddArg(y) 12798 v3.AddArg(v4) 12799 v.AddArg(v3) 12800 return true 12801 } 12802 } 12803 func rewriteValueARM64_OpLsh16x32_0(v *Value) bool { 12804 b := v.Block 12805 _ = b 12806 typ := &b.Func.Config.Types 12807 _ = typ 12808 // match: (Lsh16x32 <t> x y) 12809 // cond: 12810 // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 12811 for { 12812 t := v.Type 12813 _ = v.Args[1] 12814 x := v.Args[0] 12815 y := v.Args[1] 12816 v.reset(OpARM64CSELULT) 12817 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12818 v0.AddArg(x) 12819 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 12820 v1.AddArg(y) 12821 v0.AddArg(v1) 12822 v.AddArg(v0) 12823 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 12824 v2.AuxInt = 0 12825 v.AddArg(v2) 12826 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 12827 v3.AuxInt = 64 12828 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 12829 v4.AddArg(y) 12830 v3.AddArg(v4) 12831 v.AddArg(v3) 12832 return true 12833 } 12834 } 12835 func rewriteValueARM64_OpLsh16x64_0(v *Value) bool { 12836 b := v.Block 12837 _ = b 12838 // match: (Lsh16x64 <t> x y) 12839 // cond: 12840 // result: (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y)) 12841 for { 12842 t := v.Type 12843 _ = v.Args[1] 12844 x := v.Args[0] 12845 y := v.Args[1] 12846 v.reset(OpARM64CSELULT) 12847 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12848 v0.AddArg(x) 12849 v0.AddArg(y) 12850 v.AddArg(v0) 12851 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 12852 v1.AuxInt = 0 12853 v.AddArg(v1) 12854 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 12855 v2.AuxInt = 64 12856 v2.AddArg(y) 12857 v.AddArg(v2) 12858 return true 12859 } 12860 } 12861 func rewriteValueARM64_OpLsh16x8_0(v *Value) bool { 12862 b := v.Block 12863 _ = b 12864 typ := &b.Func.Config.Types 12865 _ = typ 12866 // match: (Lsh16x8 <t> x y) 12867 // cond: 12868 // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 12869 for { 12870 t := v.Type 12871 _ = v.Args[1] 12872 x := v.Args[0] 12873 y := v.Args[1] 12874 v.reset(OpARM64CSELULT) 12875 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12876 v0.AddArg(x) 12877 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 12878 v1.AddArg(y) 12879 v0.AddArg(v1) 12880 v.AddArg(v0) 12881 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 12882 v2.AuxInt = 0 12883 v.AddArg(v2) 12884 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 12885 v3.AuxInt = 64 12886 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 12887 v4.AddArg(y) 12888 v3.AddArg(v4) 12889 v.AddArg(v3) 12890 return true 12891 } 12892 } 12893 func rewriteValueARM64_OpLsh32x16_0(v *Value) bool { 12894 b := v.Block 12895 _ = b 12896 typ := &b.Func.Config.Types 12897 _ = typ 12898 // match: (Lsh32x16 <t> x y) 12899 // cond: 12900 // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 12901 for { 12902 t := v.Type 12903 _ = v.Args[1] 12904 x := v.Args[0] 12905 y := v.Args[1] 12906 v.reset(OpARM64CSELULT) 12907 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12908 v0.AddArg(x) 12909 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 12910 v1.AddArg(y) 12911 v0.AddArg(v1) 12912 v.AddArg(v0) 12913 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 12914 v2.AuxInt = 0 12915 v.AddArg(v2) 12916 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 12917 v3.AuxInt = 64 12918 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 12919 v4.AddArg(y) 12920 v3.AddArg(v4) 12921 v.AddArg(v3) 12922 return true 12923 } 12924 } 12925 func rewriteValueARM64_OpLsh32x32_0(v *Value) bool { 12926 b := v.Block 12927 _ = b 12928 typ := &b.Func.Config.Types 12929 _ = typ 12930 // match: (Lsh32x32 <t> x y) 12931 // cond: 12932 // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 12933 for { 12934 t := v.Type 12935 _ = v.Args[1] 12936 x := v.Args[0] 12937 y := v.Args[1] 12938 v.reset(OpARM64CSELULT) 12939 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12940 v0.AddArg(x) 12941 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 12942 v1.AddArg(y) 12943 v0.AddArg(v1) 12944 v.AddArg(v0) 12945 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 12946 v2.AuxInt = 0 12947 v.AddArg(v2) 12948 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 12949 v3.AuxInt = 64 12950 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 12951 v4.AddArg(y) 12952 v3.AddArg(v4) 12953 v.AddArg(v3) 12954 return true 12955 } 12956 } 12957 func rewriteValueARM64_OpLsh32x64_0(v *Value) bool { 12958 b := v.Block 12959 _ = b 12960 // match: (Lsh32x64 <t> x y) 12961 // cond: 12962 // result: (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y)) 12963 for { 12964 t := v.Type 12965 _ = v.Args[1] 12966 x := v.Args[0] 12967 y := v.Args[1] 12968 v.reset(OpARM64CSELULT) 12969 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12970 v0.AddArg(x) 12971 v0.AddArg(y) 12972 v.AddArg(v0) 12973 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 12974 v1.AuxInt = 0 12975 v.AddArg(v1) 12976 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 12977 v2.AuxInt = 64 12978 v2.AddArg(y) 12979 v.AddArg(v2) 12980 return true 12981 } 12982 } 12983 func rewriteValueARM64_OpLsh32x8_0(v *Value) bool { 12984 b := v.Block 12985 _ = b 12986 typ := &b.Func.Config.Types 12987 _ = typ 12988 // match: (Lsh32x8 <t> x y) 12989 // cond: 12990 // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 12991 for { 12992 t := v.Type 12993 _ = v.Args[1] 12994 x := v.Args[0] 12995 y := v.Args[1] 12996 v.reset(OpARM64CSELULT) 12997 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 12998 v0.AddArg(x) 12999 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 13000 v1.AddArg(y) 13001 v0.AddArg(v1) 13002 v.AddArg(v0) 13003 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13004 v2.AuxInt = 0 13005 v.AddArg(v2) 13006 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13007 v3.AuxInt = 64 13008 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 13009 v4.AddArg(y) 13010 v3.AddArg(v4) 13011 v.AddArg(v3) 13012 return true 13013 } 13014 } 13015 func rewriteValueARM64_OpLsh64x16_0(v *Value) bool { 13016 b := v.Block 13017 _ = b 13018 typ := &b.Func.Config.Types 13019 _ = typ 13020 // match: (Lsh64x16 <t> x y) 13021 // cond: 13022 // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 13023 for { 13024 t := v.Type 13025 _ = v.Args[1] 13026 x := v.Args[0] 13027 y := v.Args[1] 13028 v.reset(OpARM64CSELULT) 13029 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 13030 v0.AddArg(x) 13031 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 13032 v1.AddArg(y) 13033 v0.AddArg(v1) 13034 v.AddArg(v0) 13035 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13036 v2.AuxInt = 0 13037 v.AddArg(v2) 13038 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13039 v3.AuxInt = 64 13040 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 13041 v4.AddArg(y) 13042 v3.AddArg(v4) 13043 v.AddArg(v3) 13044 return true 13045 } 13046 } 13047 func rewriteValueARM64_OpLsh64x32_0(v *Value) bool { 13048 b := v.Block 13049 _ = b 13050 typ := &b.Func.Config.Types 13051 _ = typ 13052 // match: (Lsh64x32 <t> x y) 13053 // cond: 13054 // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 13055 for { 13056 t := v.Type 13057 _ = v.Args[1] 13058 x := v.Args[0] 13059 y := v.Args[1] 13060 v.reset(OpARM64CSELULT) 13061 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 13062 v0.AddArg(x) 13063 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 13064 v1.AddArg(y) 13065 v0.AddArg(v1) 13066 v.AddArg(v0) 13067 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13068 v2.AuxInt = 0 13069 v.AddArg(v2) 13070 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13071 v3.AuxInt = 64 13072 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 13073 v4.AddArg(y) 13074 v3.AddArg(v4) 13075 v.AddArg(v3) 13076 return true 13077 } 13078 } 13079 func rewriteValueARM64_OpLsh64x64_0(v *Value) bool { 13080 b := v.Block 13081 _ = b 13082 // match: (Lsh64x64 <t> x y) 13083 // cond: 13084 // result: (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y)) 13085 for { 13086 t := v.Type 13087 _ = v.Args[1] 13088 x := v.Args[0] 13089 y := v.Args[1] 13090 v.reset(OpARM64CSELULT) 13091 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 13092 v0.AddArg(x) 13093 v0.AddArg(y) 13094 v.AddArg(v0) 13095 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13096 v1.AuxInt = 0 13097 v.AddArg(v1) 13098 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13099 v2.AuxInt = 64 13100 v2.AddArg(y) 13101 v.AddArg(v2) 13102 return true 13103 } 13104 } 13105 func rewriteValueARM64_OpLsh64x8_0(v *Value) bool { 13106 b := v.Block 13107 _ = b 13108 typ := &b.Func.Config.Types 13109 _ = typ 13110 // match: (Lsh64x8 <t> x y) 13111 // cond: 13112 // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 13113 for { 13114 t := v.Type 13115 _ = v.Args[1] 13116 x := v.Args[0] 13117 y := v.Args[1] 13118 v.reset(OpARM64CSELULT) 13119 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 13120 v0.AddArg(x) 13121 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 13122 v1.AddArg(y) 13123 v0.AddArg(v1) 13124 v.AddArg(v0) 13125 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13126 v2.AuxInt = 0 13127 v.AddArg(v2) 13128 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13129 v3.AuxInt = 64 13130 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 13131 v4.AddArg(y) 13132 v3.AddArg(v4) 13133 v.AddArg(v3) 13134 return true 13135 } 13136 } 13137 func rewriteValueARM64_OpLsh8x16_0(v *Value) bool { 13138 b := v.Block 13139 _ = b 13140 typ := &b.Func.Config.Types 13141 _ = typ 13142 // match: (Lsh8x16 <t> x y) 13143 // cond: 13144 // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 13145 for { 13146 t := v.Type 13147 _ = v.Args[1] 13148 x := v.Args[0] 13149 y := v.Args[1] 13150 v.reset(OpARM64CSELULT) 13151 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 13152 v0.AddArg(x) 13153 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 13154 v1.AddArg(y) 13155 v0.AddArg(v1) 13156 v.AddArg(v0) 13157 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13158 v2.AuxInt = 0 13159 v.AddArg(v2) 13160 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13161 v3.AuxInt = 64 13162 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 13163 v4.AddArg(y) 13164 v3.AddArg(v4) 13165 v.AddArg(v3) 13166 return true 13167 } 13168 } 13169 func rewriteValueARM64_OpLsh8x32_0(v *Value) bool { 13170 b := v.Block 13171 _ = b 13172 typ := &b.Func.Config.Types 13173 _ = typ 13174 // match: (Lsh8x32 <t> x y) 13175 // cond: 13176 // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 13177 for { 13178 t := v.Type 13179 _ = v.Args[1] 13180 x := v.Args[0] 13181 y := v.Args[1] 13182 v.reset(OpARM64CSELULT) 13183 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 13184 v0.AddArg(x) 13185 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 13186 v1.AddArg(y) 13187 v0.AddArg(v1) 13188 v.AddArg(v0) 13189 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13190 v2.AuxInt = 0 13191 v.AddArg(v2) 13192 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13193 v3.AuxInt = 64 13194 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 13195 v4.AddArg(y) 13196 v3.AddArg(v4) 13197 v.AddArg(v3) 13198 return true 13199 } 13200 } 13201 func rewriteValueARM64_OpLsh8x64_0(v *Value) bool { 13202 b := v.Block 13203 _ = b 13204 // match: (Lsh8x64 <t> x y) 13205 // cond: 13206 // result: (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y)) 13207 for { 13208 t := v.Type 13209 _ = v.Args[1] 13210 x := v.Args[0] 13211 y := v.Args[1] 13212 v.reset(OpARM64CSELULT) 13213 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 13214 v0.AddArg(x) 13215 v0.AddArg(y) 13216 v.AddArg(v0) 13217 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13218 v1.AuxInt = 0 13219 v.AddArg(v1) 13220 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13221 v2.AuxInt = 64 13222 v2.AddArg(y) 13223 v.AddArg(v2) 13224 return true 13225 } 13226 } 13227 func rewriteValueARM64_OpLsh8x8_0(v *Value) bool { 13228 b := v.Block 13229 _ = b 13230 typ := &b.Func.Config.Types 13231 _ = typ 13232 // match: (Lsh8x8 <t> x y) 13233 // cond: 13234 // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 13235 for { 13236 t := v.Type 13237 _ = v.Args[1] 13238 x := v.Args[0] 13239 y := v.Args[1] 13240 v.reset(OpARM64CSELULT) 13241 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 13242 v0.AddArg(x) 13243 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 13244 v1.AddArg(y) 13245 v0.AddArg(v1) 13246 v.AddArg(v0) 13247 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 13248 v2.AuxInt = 0 13249 v.AddArg(v2) 13250 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 13251 v3.AuxInt = 64 13252 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 13253 v4.AddArg(y) 13254 v3.AddArg(v4) 13255 v.AddArg(v3) 13256 return true 13257 } 13258 } 13259 func rewriteValueARM64_OpMod16_0(v *Value) bool { 13260 b := v.Block 13261 _ = b 13262 typ := &b.Func.Config.Types 13263 _ = typ 13264 // match: (Mod16 x y) 13265 // cond: 13266 // result: (MODW (SignExt16to32 x) (SignExt16to32 y)) 13267 for { 13268 _ = v.Args[1] 13269 x := v.Args[0] 13270 y := v.Args[1] 13271 v.reset(OpARM64MODW) 13272 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 13273 v0.AddArg(x) 13274 v.AddArg(v0) 13275 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 13276 v1.AddArg(y) 13277 v.AddArg(v1) 13278 return true 13279 } 13280 } 13281 func rewriteValueARM64_OpMod16u_0(v *Value) bool { 13282 b := v.Block 13283 _ = b 13284 typ := &b.Func.Config.Types 13285 _ = typ 13286 // match: (Mod16u x y) 13287 // cond: 13288 // result: (UMODW (ZeroExt16to32 x) (ZeroExt16to32 y)) 13289 for { 13290 _ = v.Args[1] 13291 x := v.Args[0] 13292 y := v.Args[1] 13293 v.reset(OpARM64UMODW) 13294 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 13295 v0.AddArg(x) 13296 v.AddArg(v0) 13297 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 13298 v1.AddArg(y) 13299 v.AddArg(v1) 13300 return true 13301 } 13302 } 13303 func rewriteValueARM64_OpMod32_0(v *Value) bool { 13304 // match: (Mod32 x y) 13305 // cond: 13306 // result: (MODW x y) 13307 for { 13308 _ = v.Args[1] 13309 x := v.Args[0] 13310 y := v.Args[1] 13311 v.reset(OpARM64MODW) 13312 v.AddArg(x) 13313 v.AddArg(y) 13314 return true 13315 } 13316 } 13317 func rewriteValueARM64_OpMod32u_0(v *Value) bool { 13318 // match: (Mod32u x y) 13319 // cond: 13320 // result: (UMODW x y) 13321 for { 13322 _ = v.Args[1] 13323 x := v.Args[0] 13324 y := v.Args[1] 13325 v.reset(OpARM64UMODW) 13326 v.AddArg(x) 13327 v.AddArg(y) 13328 return true 13329 } 13330 } 13331 func rewriteValueARM64_OpMod64_0(v *Value) bool { 13332 // match: (Mod64 x y) 13333 // cond: 13334 // result: (MOD x y) 13335 for { 13336 _ = v.Args[1] 13337 x := v.Args[0] 13338 y := v.Args[1] 13339 v.reset(OpARM64MOD) 13340 v.AddArg(x) 13341 v.AddArg(y) 13342 return true 13343 } 13344 } 13345 func rewriteValueARM64_OpMod64u_0(v *Value) bool { 13346 // match: (Mod64u x y) 13347 // cond: 13348 // result: (UMOD x y) 13349 for { 13350 _ = v.Args[1] 13351 x := v.Args[0] 13352 y := v.Args[1] 13353 v.reset(OpARM64UMOD) 13354 v.AddArg(x) 13355 v.AddArg(y) 13356 return true 13357 } 13358 } 13359 func rewriteValueARM64_OpMod8_0(v *Value) bool { 13360 b := v.Block 13361 _ = b 13362 typ := &b.Func.Config.Types 13363 _ = typ 13364 // match: (Mod8 x y) 13365 // cond: 13366 // result: (MODW (SignExt8to32 x) (SignExt8to32 y)) 13367 for { 13368 _ = v.Args[1] 13369 x := v.Args[0] 13370 y := v.Args[1] 13371 v.reset(OpARM64MODW) 13372 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 13373 v0.AddArg(x) 13374 v.AddArg(v0) 13375 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 13376 v1.AddArg(y) 13377 v.AddArg(v1) 13378 return true 13379 } 13380 } 13381 func rewriteValueARM64_OpMod8u_0(v *Value) bool { 13382 b := v.Block 13383 _ = b 13384 typ := &b.Func.Config.Types 13385 _ = typ 13386 // match: (Mod8u x y) 13387 // cond: 13388 // result: (UMODW (ZeroExt8to32 x) (ZeroExt8to32 y)) 13389 for { 13390 _ = v.Args[1] 13391 x := v.Args[0] 13392 y := v.Args[1] 13393 v.reset(OpARM64UMODW) 13394 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 13395 v0.AddArg(x) 13396 v.AddArg(v0) 13397 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 13398 v1.AddArg(y) 13399 v.AddArg(v1) 13400 return true 13401 } 13402 } 13403 func rewriteValueARM64_OpMove_0(v *Value) bool { 13404 b := v.Block 13405 _ = b 13406 typ := &b.Func.Config.Types 13407 _ = typ 13408 // match: (Move [0] _ _ mem) 13409 // cond: 13410 // result: mem 13411 for { 13412 if v.AuxInt != 0 { 13413 break 13414 } 13415 _ = v.Args[2] 13416 mem := v.Args[2] 13417 v.reset(OpCopy) 13418 v.Type = mem.Type 13419 v.AddArg(mem) 13420 return true 13421 } 13422 // match: (Move [1] dst src mem) 13423 // cond: 13424 // result: (MOVBstore dst (MOVBUload src mem) mem) 13425 for { 13426 if v.AuxInt != 1 { 13427 break 13428 } 13429 _ = v.Args[2] 13430 dst := v.Args[0] 13431 src := v.Args[1] 13432 mem := v.Args[2] 13433 v.reset(OpARM64MOVBstore) 13434 v.AddArg(dst) 13435 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 13436 v0.AddArg(src) 13437 v0.AddArg(mem) 13438 v.AddArg(v0) 13439 v.AddArg(mem) 13440 return true 13441 } 13442 // match: (Move [2] dst src mem) 13443 // cond: 13444 // result: (MOVHstore dst (MOVHUload src mem) mem) 13445 for { 13446 if v.AuxInt != 2 { 13447 break 13448 } 13449 _ = v.Args[2] 13450 dst := v.Args[0] 13451 src := v.Args[1] 13452 mem := v.Args[2] 13453 v.reset(OpARM64MOVHstore) 13454 v.AddArg(dst) 13455 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 13456 v0.AddArg(src) 13457 v0.AddArg(mem) 13458 v.AddArg(v0) 13459 v.AddArg(mem) 13460 return true 13461 } 13462 // match: (Move [4] dst src mem) 13463 // cond: 13464 // result: (MOVWstore dst (MOVWUload src mem) mem) 13465 for { 13466 if v.AuxInt != 4 { 13467 break 13468 } 13469 _ = v.Args[2] 13470 dst := v.Args[0] 13471 src := v.Args[1] 13472 mem := v.Args[2] 13473 v.reset(OpARM64MOVWstore) 13474 v.AddArg(dst) 13475 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 13476 v0.AddArg(src) 13477 v0.AddArg(mem) 13478 v.AddArg(v0) 13479 v.AddArg(mem) 13480 return true 13481 } 13482 // match: (Move [8] dst src mem) 13483 // cond: 13484 // result: (MOVDstore dst (MOVDload src mem) mem) 13485 for { 13486 if v.AuxInt != 8 { 13487 break 13488 } 13489 _ = v.Args[2] 13490 dst := v.Args[0] 13491 src := v.Args[1] 13492 mem := v.Args[2] 13493 v.reset(OpARM64MOVDstore) 13494 v.AddArg(dst) 13495 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 13496 v0.AddArg(src) 13497 v0.AddArg(mem) 13498 v.AddArg(v0) 13499 v.AddArg(mem) 13500 return true 13501 } 13502 // match: (Move [3] dst src mem) 13503 // cond: 13504 // result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem)) 13505 for { 13506 if v.AuxInt != 3 { 13507 break 13508 } 13509 _ = v.Args[2] 13510 dst := v.Args[0] 13511 src := v.Args[1] 13512 mem := v.Args[2] 13513 v.reset(OpARM64MOVBstore) 13514 v.AuxInt = 2 13515 v.AddArg(dst) 13516 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 13517 v0.AuxInt = 2 13518 v0.AddArg(src) 13519 v0.AddArg(mem) 13520 v.AddArg(v0) 13521 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 13522 v1.AddArg(dst) 13523 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 13524 v2.AddArg(src) 13525 v2.AddArg(mem) 13526 v1.AddArg(v2) 13527 v1.AddArg(mem) 13528 v.AddArg(v1) 13529 return true 13530 } 13531 // match: (Move [5] dst src mem) 13532 // cond: 13533 // result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 13534 for { 13535 if v.AuxInt != 5 { 13536 break 13537 } 13538 _ = v.Args[2] 13539 dst := v.Args[0] 13540 src := v.Args[1] 13541 mem := v.Args[2] 13542 v.reset(OpARM64MOVBstore) 13543 v.AuxInt = 4 13544 v.AddArg(dst) 13545 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 13546 v0.AuxInt = 4 13547 v0.AddArg(src) 13548 v0.AddArg(mem) 13549 v.AddArg(v0) 13550 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 13551 v1.AddArg(dst) 13552 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 13553 v2.AddArg(src) 13554 v2.AddArg(mem) 13555 v1.AddArg(v2) 13556 v1.AddArg(mem) 13557 v.AddArg(v1) 13558 return true 13559 } 13560 // match: (Move [6] dst src mem) 13561 // cond: 13562 // result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 13563 for { 13564 if v.AuxInt != 6 { 13565 break 13566 } 13567 _ = v.Args[2] 13568 dst := v.Args[0] 13569 src := v.Args[1] 13570 mem := v.Args[2] 13571 v.reset(OpARM64MOVHstore) 13572 v.AuxInt = 4 13573 v.AddArg(dst) 13574 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 13575 v0.AuxInt = 4 13576 v0.AddArg(src) 13577 v0.AddArg(mem) 13578 v.AddArg(v0) 13579 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 13580 v1.AddArg(dst) 13581 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 13582 v2.AddArg(src) 13583 v2.AddArg(mem) 13584 v1.AddArg(v2) 13585 v1.AddArg(mem) 13586 v.AddArg(v1) 13587 return true 13588 } 13589 // match: (Move [7] dst src mem) 13590 // cond: 13591 // result: (MOVBstore [6] dst (MOVBUload [6] src mem) (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))) 13592 for { 13593 if v.AuxInt != 7 { 13594 break 13595 } 13596 _ = v.Args[2] 13597 dst := v.Args[0] 13598 src := v.Args[1] 13599 mem := v.Args[2] 13600 v.reset(OpARM64MOVBstore) 13601 v.AuxInt = 6 13602 v.AddArg(dst) 13603 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 13604 v0.AuxInt = 6 13605 v0.AddArg(src) 13606 v0.AddArg(mem) 13607 v.AddArg(v0) 13608 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 13609 v1.AuxInt = 4 13610 v1.AddArg(dst) 13611 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 13612 v2.AuxInt = 4 13613 v2.AddArg(src) 13614 v2.AddArg(mem) 13615 v1.AddArg(v2) 13616 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 13617 v3.AddArg(dst) 13618 v4 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 13619 v4.AddArg(src) 13620 v4.AddArg(mem) 13621 v3.AddArg(v4) 13622 v3.AddArg(mem) 13623 v1.AddArg(v3) 13624 v.AddArg(v1) 13625 return true 13626 } 13627 // match: (Move [12] dst src mem) 13628 // cond: 13629 // result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 13630 for { 13631 if v.AuxInt != 12 { 13632 break 13633 } 13634 _ = v.Args[2] 13635 dst := v.Args[0] 13636 src := v.Args[1] 13637 mem := v.Args[2] 13638 v.reset(OpARM64MOVWstore) 13639 v.AuxInt = 8 13640 v.AddArg(dst) 13641 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 13642 v0.AuxInt = 8 13643 v0.AddArg(src) 13644 v0.AddArg(mem) 13645 v.AddArg(v0) 13646 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 13647 v1.AddArg(dst) 13648 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 13649 v2.AddArg(src) 13650 v2.AddArg(mem) 13651 v1.AddArg(v2) 13652 v1.AddArg(mem) 13653 v.AddArg(v1) 13654 return true 13655 } 13656 return false 13657 } 13658 func rewriteValueARM64_OpMove_10(v *Value) bool { 13659 b := v.Block 13660 _ = b 13661 config := b.Func.Config 13662 _ = config 13663 typ := &b.Func.Config.Types 13664 _ = typ 13665 // match: (Move [16] dst src mem) 13666 // cond: 13667 // result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 13668 for { 13669 if v.AuxInt != 16 { 13670 break 13671 } 13672 _ = v.Args[2] 13673 dst := v.Args[0] 13674 src := v.Args[1] 13675 mem := v.Args[2] 13676 v.reset(OpARM64MOVDstore) 13677 v.AuxInt = 8 13678 v.AddArg(dst) 13679 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 13680 v0.AuxInt = 8 13681 v0.AddArg(src) 13682 v0.AddArg(mem) 13683 v.AddArg(v0) 13684 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 13685 v1.AddArg(dst) 13686 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 13687 v2.AddArg(src) 13688 v2.AddArg(mem) 13689 v1.AddArg(v2) 13690 v1.AddArg(mem) 13691 v.AddArg(v1) 13692 return true 13693 } 13694 // match: (Move [24] dst src mem) 13695 // cond: 13696 // result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))) 13697 for { 13698 if v.AuxInt != 24 { 13699 break 13700 } 13701 _ = v.Args[2] 13702 dst := v.Args[0] 13703 src := v.Args[1] 13704 mem := v.Args[2] 13705 v.reset(OpARM64MOVDstore) 13706 v.AuxInt = 16 13707 v.AddArg(dst) 13708 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 13709 v0.AuxInt = 16 13710 v0.AddArg(src) 13711 v0.AddArg(mem) 13712 v.AddArg(v0) 13713 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 13714 v1.AuxInt = 8 13715 v1.AddArg(dst) 13716 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 13717 v2.AuxInt = 8 13718 v2.AddArg(src) 13719 v2.AddArg(mem) 13720 v1.AddArg(v2) 13721 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 13722 v3.AddArg(dst) 13723 v4 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 13724 v4.AddArg(src) 13725 v4.AddArg(mem) 13726 v3.AddArg(v4) 13727 v3.AddArg(mem) 13728 v1.AddArg(v3) 13729 v.AddArg(v1) 13730 return true 13731 } 13732 // match: (Move [s] dst src mem) 13733 // cond: s%8 != 0 && s > 8 13734 // result: (Move [s%8] (OffPtr <dst.Type> dst [s-s%8]) (OffPtr <src.Type> src [s-s%8]) (Move [s-s%8] dst src mem)) 13735 for { 13736 s := v.AuxInt 13737 _ = v.Args[2] 13738 dst := v.Args[0] 13739 src := v.Args[1] 13740 mem := v.Args[2] 13741 if !(s%8 != 0 && s > 8) { 13742 break 13743 } 13744 v.reset(OpMove) 13745 v.AuxInt = s % 8 13746 v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type) 13747 v0.AuxInt = s - s%8 13748 v0.AddArg(dst) 13749 v.AddArg(v0) 13750 v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type) 13751 v1.AuxInt = s - s%8 13752 v1.AddArg(src) 13753 v.AddArg(v1) 13754 v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem) 13755 v2.AuxInt = s - s%8 13756 v2.AddArg(dst) 13757 v2.AddArg(src) 13758 v2.AddArg(mem) 13759 v.AddArg(v2) 13760 return true 13761 } 13762 // match: (Move [s] dst src mem) 13763 // cond: s%8 == 0 && s > 24 && s <= 8*128 && !config.noDuffDevice 13764 // result: (DUFFCOPY [8 * (128 - int64(s/8))] dst src mem) 13765 for { 13766 s := v.AuxInt 13767 _ = v.Args[2] 13768 dst := v.Args[0] 13769 src := v.Args[1] 13770 mem := v.Args[2] 13771 if !(s%8 == 0 && s > 24 && s <= 8*128 && !config.noDuffDevice) { 13772 break 13773 } 13774 v.reset(OpARM64DUFFCOPY) 13775 v.AuxInt = 8 * (128 - int64(s/8)) 13776 v.AddArg(dst) 13777 v.AddArg(src) 13778 v.AddArg(mem) 13779 return true 13780 } 13781 // match: (Move [s] dst src mem) 13782 // cond: s > 24 && s%8 == 0 13783 // result: (LoweredMove dst src (ADDconst <src.Type> src [s-8]) mem) 13784 for { 13785 s := v.AuxInt 13786 _ = v.Args[2] 13787 dst := v.Args[0] 13788 src := v.Args[1] 13789 mem := v.Args[2] 13790 if !(s > 24 && s%8 == 0) { 13791 break 13792 } 13793 v.reset(OpARM64LoweredMove) 13794 v.AddArg(dst) 13795 v.AddArg(src) 13796 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, src.Type) 13797 v0.AuxInt = s - 8 13798 v0.AddArg(src) 13799 v.AddArg(v0) 13800 v.AddArg(mem) 13801 return true 13802 } 13803 return false 13804 } 13805 func rewriteValueARM64_OpMul16_0(v *Value) bool { 13806 // match: (Mul16 x y) 13807 // cond: 13808 // result: (MULW x y) 13809 for { 13810 _ = v.Args[1] 13811 x := v.Args[0] 13812 y := v.Args[1] 13813 v.reset(OpARM64MULW) 13814 v.AddArg(x) 13815 v.AddArg(y) 13816 return true 13817 } 13818 } 13819 func rewriteValueARM64_OpMul32_0(v *Value) bool { 13820 // match: (Mul32 x y) 13821 // cond: 13822 // result: (MULW x y) 13823 for { 13824 _ = v.Args[1] 13825 x := v.Args[0] 13826 y := v.Args[1] 13827 v.reset(OpARM64MULW) 13828 v.AddArg(x) 13829 v.AddArg(y) 13830 return true 13831 } 13832 } 13833 func rewriteValueARM64_OpMul32F_0(v *Value) bool { 13834 // match: (Mul32F x y) 13835 // cond: 13836 // result: (FMULS x y) 13837 for { 13838 _ = v.Args[1] 13839 x := v.Args[0] 13840 y := v.Args[1] 13841 v.reset(OpARM64FMULS) 13842 v.AddArg(x) 13843 v.AddArg(y) 13844 return true 13845 } 13846 } 13847 func rewriteValueARM64_OpMul64_0(v *Value) bool { 13848 // match: (Mul64 x y) 13849 // cond: 13850 // result: (MUL x y) 13851 for { 13852 _ = v.Args[1] 13853 x := v.Args[0] 13854 y := v.Args[1] 13855 v.reset(OpARM64MUL) 13856 v.AddArg(x) 13857 v.AddArg(y) 13858 return true 13859 } 13860 } 13861 func rewriteValueARM64_OpMul64F_0(v *Value) bool { 13862 // match: (Mul64F x y) 13863 // cond: 13864 // result: (FMULD x y) 13865 for { 13866 _ = v.Args[1] 13867 x := v.Args[0] 13868 y := v.Args[1] 13869 v.reset(OpARM64FMULD) 13870 v.AddArg(x) 13871 v.AddArg(y) 13872 return true 13873 } 13874 } 13875 func rewriteValueARM64_OpMul8_0(v *Value) bool { 13876 // match: (Mul8 x y) 13877 // cond: 13878 // result: (MULW x y) 13879 for { 13880 _ = v.Args[1] 13881 x := v.Args[0] 13882 y := v.Args[1] 13883 v.reset(OpARM64MULW) 13884 v.AddArg(x) 13885 v.AddArg(y) 13886 return true 13887 } 13888 } 13889 func rewriteValueARM64_OpNeg16_0(v *Value) bool { 13890 // match: (Neg16 x) 13891 // cond: 13892 // result: (NEG x) 13893 for { 13894 x := v.Args[0] 13895 v.reset(OpARM64NEG) 13896 v.AddArg(x) 13897 return true 13898 } 13899 } 13900 func rewriteValueARM64_OpNeg32_0(v *Value) bool { 13901 // match: (Neg32 x) 13902 // cond: 13903 // result: (NEG x) 13904 for { 13905 x := v.Args[0] 13906 v.reset(OpARM64NEG) 13907 v.AddArg(x) 13908 return true 13909 } 13910 } 13911 func rewriteValueARM64_OpNeg32F_0(v *Value) bool { 13912 // match: (Neg32F x) 13913 // cond: 13914 // result: (FNEGS x) 13915 for { 13916 x := v.Args[0] 13917 v.reset(OpARM64FNEGS) 13918 v.AddArg(x) 13919 return true 13920 } 13921 } 13922 func rewriteValueARM64_OpNeg64_0(v *Value) bool { 13923 // match: (Neg64 x) 13924 // cond: 13925 // result: (NEG x) 13926 for { 13927 x := v.Args[0] 13928 v.reset(OpARM64NEG) 13929 v.AddArg(x) 13930 return true 13931 } 13932 } 13933 func rewriteValueARM64_OpNeg64F_0(v *Value) bool { 13934 // match: (Neg64F x) 13935 // cond: 13936 // result: (FNEGD x) 13937 for { 13938 x := v.Args[0] 13939 v.reset(OpARM64FNEGD) 13940 v.AddArg(x) 13941 return true 13942 } 13943 } 13944 func rewriteValueARM64_OpNeg8_0(v *Value) bool { 13945 // match: (Neg8 x) 13946 // cond: 13947 // result: (NEG x) 13948 for { 13949 x := v.Args[0] 13950 v.reset(OpARM64NEG) 13951 v.AddArg(x) 13952 return true 13953 } 13954 } 13955 func rewriteValueARM64_OpNeq16_0(v *Value) bool { 13956 b := v.Block 13957 _ = b 13958 typ := &b.Func.Config.Types 13959 _ = typ 13960 // match: (Neq16 x y) 13961 // cond: 13962 // result: (NotEqual (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 13963 for { 13964 _ = v.Args[1] 13965 x := v.Args[0] 13966 y := v.Args[1] 13967 v.reset(OpARM64NotEqual) 13968 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 13969 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 13970 v1.AddArg(x) 13971 v0.AddArg(v1) 13972 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 13973 v2.AddArg(y) 13974 v0.AddArg(v2) 13975 v.AddArg(v0) 13976 return true 13977 } 13978 } 13979 func rewriteValueARM64_OpNeq32_0(v *Value) bool { 13980 b := v.Block 13981 _ = b 13982 // match: (Neq32 x y) 13983 // cond: 13984 // result: (NotEqual (CMPW x y)) 13985 for { 13986 _ = v.Args[1] 13987 x := v.Args[0] 13988 y := v.Args[1] 13989 v.reset(OpARM64NotEqual) 13990 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 13991 v0.AddArg(x) 13992 v0.AddArg(y) 13993 v.AddArg(v0) 13994 return true 13995 } 13996 } 13997 func rewriteValueARM64_OpNeq32F_0(v *Value) bool { 13998 b := v.Block 13999 _ = b 14000 // match: (Neq32F x y) 14001 // cond: 14002 // result: (NotEqual (FCMPS x y)) 14003 for { 14004 _ = v.Args[1] 14005 x := v.Args[0] 14006 y := v.Args[1] 14007 v.reset(OpARM64NotEqual) 14008 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 14009 v0.AddArg(x) 14010 v0.AddArg(y) 14011 v.AddArg(v0) 14012 return true 14013 } 14014 } 14015 func rewriteValueARM64_OpNeq64_0(v *Value) bool { 14016 b := v.Block 14017 _ = b 14018 // match: (Neq64 x y) 14019 // cond: 14020 // result: (NotEqual (CMP x y)) 14021 for { 14022 _ = v.Args[1] 14023 x := v.Args[0] 14024 y := v.Args[1] 14025 v.reset(OpARM64NotEqual) 14026 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 14027 v0.AddArg(x) 14028 v0.AddArg(y) 14029 v.AddArg(v0) 14030 return true 14031 } 14032 } 14033 func rewriteValueARM64_OpNeq64F_0(v *Value) bool { 14034 b := v.Block 14035 _ = b 14036 // match: (Neq64F x y) 14037 // cond: 14038 // result: (NotEqual (FCMPD x y)) 14039 for { 14040 _ = v.Args[1] 14041 x := v.Args[0] 14042 y := v.Args[1] 14043 v.reset(OpARM64NotEqual) 14044 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 14045 v0.AddArg(x) 14046 v0.AddArg(y) 14047 v.AddArg(v0) 14048 return true 14049 } 14050 } 14051 func rewriteValueARM64_OpNeq8_0(v *Value) bool { 14052 b := v.Block 14053 _ = b 14054 typ := &b.Func.Config.Types 14055 _ = typ 14056 // match: (Neq8 x y) 14057 // cond: 14058 // result: (NotEqual (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 14059 for { 14060 _ = v.Args[1] 14061 x := v.Args[0] 14062 y := v.Args[1] 14063 v.reset(OpARM64NotEqual) 14064 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 14065 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 14066 v1.AddArg(x) 14067 v0.AddArg(v1) 14068 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 14069 v2.AddArg(y) 14070 v0.AddArg(v2) 14071 v.AddArg(v0) 14072 return true 14073 } 14074 } 14075 func rewriteValueARM64_OpNeqB_0(v *Value) bool { 14076 // match: (NeqB x y) 14077 // cond: 14078 // result: (XOR x y) 14079 for { 14080 _ = v.Args[1] 14081 x := v.Args[0] 14082 y := v.Args[1] 14083 v.reset(OpARM64XOR) 14084 v.AddArg(x) 14085 v.AddArg(y) 14086 return true 14087 } 14088 } 14089 func rewriteValueARM64_OpNeqPtr_0(v *Value) bool { 14090 b := v.Block 14091 _ = b 14092 // match: (NeqPtr x y) 14093 // cond: 14094 // result: (NotEqual (CMP x y)) 14095 for { 14096 _ = v.Args[1] 14097 x := v.Args[0] 14098 y := v.Args[1] 14099 v.reset(OpARM64NotEqual) 14100 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 14101 v0.AddArg(x) 14102 v0.AddArg(y) 14103 v.AddArg(v0) 14104 return true 14105 } 14106 } 14107 func rewriteValueARM64_OpNilCheck_0(v *Value) bool { 14108 // match: (NilCheck ptr mem) 14109 // cond: 14110 // result: (LoweredNilCheck ptr mem) 14111 for { 14112 _ = v.Args[1] 14113 ptr := v.Args[0] 14114 mem := v.Args[1] 14115 v.reset(OpARM64LoweredNilCheck) 14116 v.AddArg(ptr) 14117 v.AddArg(mem) 14118 return true 14119 } 14120 } 14121 func rewriteValueARM64_OpNot_0(v *Value) bool { 14122 b := v.Block 14123 _ = b 14124 typ := &b.Func.Config.Types 14125 _ = typ 14126 // match: (Not x) 14127 // cond: 14128 // result: (XOR (MOVDconst [1]) x) 14129 for { 14130 x := v.Args[0] 14131 v.reset(OpARM64XOR) 14132 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 14133 v0.AuxInt = 1 14134 v.AddArg(v0) 14135 v.AddArg(x) 14136 return true 14137 } 14138 } 14139 func rewriteValueARM64_OpOffPtr_0(v *Value) bool { 14140 // match: (OffPtr [off] ptr:(SP)) 14141 // cond: 14142 // result: (MOVDaddr [off] ptr) 14143 for { 14144 off := v.AuxInt 14145 ptr := v.Args[0] 14146 if ptr.Op != OpSP { 14147 break 14148 } 14149 v.reset(OpARM64MOVDaddr) 14150 v.AuxInt = off 14151 v.AddArg(ptr) 14152 return true 14153 } 14154 // match: (OffPtr [off] ptr) 14155 // cond: 14156 // result: (ADDconst [off] ptr) 14157 for { 14158 off := v.AuxInt 14159 ptr := v.Args[0] 14160 v.reset(OpARM64ADDconst) 14161 v.AuxInt = off 14162 v.AddArg(ptr) 14163 return true 14164 } 14165 } 14166 func rewriteValueARM64_OpOr16_0(v *Value) bool { 14167 // match: (Or16 x y) 14168 // cond: 14169 // result: (OR x y) 14170 for { 14171 _ = v.Args[1] 14172 x := v.Args[0] 14173 y := v.Args[1] 14174 v.reset(OpARM64OR) 14175 v.AddArg(x) 14176 v.AddArg(y) 14177 return true 14178 } 14179 } 14180 func rewriteValueARM64_OpOr32_0(v *Value) bool { 14181 // match: (Or32 x y) 14182 // cond: 14183 // result: (OR x y) 14184 for { 14185 _ = v.Args[1] 14186 x := v.Args[0] 14187 y := v.Args[1] 14188 v.reset(OpARM64OR) 14189 v.AddArg(x) 14190 v.AddArg(y) 14191 return true 14192 } 14193 } 14194 func rewriteValueARM64_OpOr64_0(v *Value) bool { 14195 // match: (Or64 x y) 14196 // cond: 14197 // result: (OR x y) 14198 for { 14199 _ = v.Args[1] 14200 x := v.Args[0] 14201 y := v.Args[1] 14202 v.reset(OpARM64OR) 14203 v.AddArg(x) 14204 v.AddArg(y) 14205 return true 14206 } 14207 } 14208 func rewriteValueARM64_OpOr8_0(v *Value) bool { 14209 // match: (Or8 x y) 14210 // cond: 14211 // result: (OR x y) 14212 for { 14213 _ = v.Args[1] 14214 x := v.Args[0] 14215 y := v.Args[1] 14216 v.reset(OpARM64OR) 14217 v.AddArg(x) 14218 v.AddArg(y) 14219 return true 14220 } 14221 } 14222 func rewriteValueARM64_OpOrB_0(v *Value) bool { 14223 // match: (OrB x y) 14224 // cond: 14225 // result: (OR x y) 14226 for { 14227 _ = v.Args[1] 14228 x := v.Args[0] 14229 y := v.Args[1] 14230 v.reset(OpARM64OR) 14231 v.AddArg(x) 14232 v.AddArg(y) 14233 return true 14234 } 14235 } 14236 func rewriteValueARM64_OpRound32F_0(v *Value) bool { 14237 // match: (Round32F x) 14238 // cond: 14239 // result: x 14240 for { 14241 x := v.Args[0] 14242 v.reset(OpCopy) 14243 v.Type = x.Type 14244 v.AddArg(x) 14245 return true 14246 } 14247 } 14248 func rewriteValueARM64_OpRound64F_0(v *Value) bool { 14249 // match: (Round64F x) 14250 // cond: 14251 // result: x 14252 for { 14253 x := v.Args[0] 14254 v.reset(OpCopy) 14255 v.Type = x.Type 14256 v.AddArg(x) 14257 return true 14258 } 14259 } 14260 func rewriteValueARM64_OpRsh16Ux16_0(v *Value) bool { 14261 b := v.Block 14262 _ = b 14263 typ := &b.Func.Config.Types 14264 _ = typ 14265 // match: (Rsh16Ux16 <t> x y) 14266 // cond: 14267 // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 14268 for { 14269 t := v.Type 14270 _ = v.Args[1] 14271 x := v.Args[0] 14272 y := v.Args[1] 14273 v.reset(OpARM64CSELULT) 14274 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14275 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14276 v1.AddArg(x) 14277 v0.AddArg(v1) 14278 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14279 v2.AddArg(y) 14280 v0.AddArg(v2) 14281 v.AddArg(v0) 14282 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14283 v3.AuxInt = 0 14284 v.AddArg(v3) 14285 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14286 v4.AuxInt = 64 14287 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14288 v5.AddArg(y) 14289 v4.AddArg(v5) 14290 v.AddArg(v4) 14291 return true 14292 } 14293 } 14294 func rewriteValueARM64_OpRsh16Ux32_0(v *Value) bool { 14295 b := v.Block 14296 _ = b 14297 typ := &b.Func.Config.Types 14298 _ = typ 14299 // match: (Rsh16Ux32 <t> x y) 14300 // cond: 14301 // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 14302 for { 14303 t := v.Type 14304 _ = v.Args[1] 14305 x := v.Args[0] 14306 y := v.Args[1] 14307 v.reset(OpARM64CSELULT) 14308 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14309 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14310 v1.AddArg(x) 14311 v0.AddArg(v1) 14312 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14313 v2.AddArg(y) 14314 v0.AddArg(v2) 14315 v.AddArg(v0) 14316 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14317 v3.AuxInt = 0 14318 v.AddArg(v3) 14319 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14320 v4.AuxInt = 64 14321 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14322 v5.AddArg(y) 14323 v4.AddArg(v5) 14324 v.AddArg(v4) 14325 return true 14326 } 14327 } 14328 func rewriteValueARM64_OpRsh16Ux64_0(v *Value) bool { 14329 b := v.Block 14330 _ = b 14331 typ := &b.Func.Config.Types 14332 _ = typ 14333 // match: (Rsh16Ux64 <t> x y) 14334 // cond: 14335 // result: (CSELULT (SRL <t> (ZeroExt16to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y)) 14336 for { 14337 t := v.Type 14338 _ = v.Args[1] 14339 x := v.Args[0] 14340 y := v.Args[1] 14341 v.reset(OpARM64CSELULT) 14342 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14343 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14344 v1.AddArg(x) 14345 v0.AddArg(v1) 14346 v0.AddArg(y) 14347 v.AddArg(v0) 14348 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14349 v2.AuxInt = 0 14350 v.AddArg(v2) 14351 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14352 v3.AuxInt = 64 14353 v3.AddArg(y) 14354 v.AddArg(v3) 14355 return true 14356 } 14357 } 14358 func rewriteValueARM64_OpRsh16Ux8_0(v *Value) bool { 14359 b := v.Block 14360 _ = b 14361 typ := &b.Func.Config.Types 14362 _ = typ 14363 // match: (Rsh16Ux8 <t> x y) 14364 // cond: 14365 // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 14366 for { 14367 t := v.Type 14368 _ = v.Args[1] 14369 x := v.Args[0] 14370 y := v.Args[1] 14371 v.reset(OpARM64CSELULT) 14372 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14373 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14374 v1.AddArg(x) 14375 v0.AddArg(v1) 14376 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14377 v2.AddArg(y) 14378 v0.AddArg(v2) 14379 v.AddArg(v0) 14380 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14381 v3.AuxInt = 0 14382 v.AddArg(v3) 14383 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14384 v4.AuxInt = 64 14385 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14386 v5.AddArg(y) 14387 v4.AddArg(v5) 14388 v.AddArg(v4) 14389 return true 14390 } 14391 } 14392 func rewriteValueARM64_OpRsh16x16_0(v *Value) bool { 14393 b := v.Block 14394 _ = b 14395 typ := &b.Func.Config.Types 14396 _ = typ 14397 // match: (Rsh16x16 x y) 14398 // cond: 14399 // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 14400 for { 14401 _ = v.Args[1] 14402 x := v.Args[0] 14403 y := v.Args[1] 14404 v.reset(OpARM64SRA) 14405 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 14406 v0.AddArg(x) 14407 v.AddArg(v0) 14408 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14409 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14410 v2.AddArg(y) 14411 v1.AddArg(v2) 14412 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14413 v3.AuxInt = 63 14414 v1.AddArg(v3) 14415 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14416 v4.AuxInt = 64 14417 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14418 v5.AddArg(y) 14419 v4.AddArg(v5) 14420 v1.AddArg(v4) 14421 v.AddArg(v1) 14422 return true 14423 } 14424 } 14425 func rewriteValueARM64_OpRsh16x32_0(v *Value) bool { 14426 b := v.Block 14427 _ = b 14428 typ := &b.Func.Config.Types 14429 _ = typ 14430 // match: (Rsh16x32 x y) 14431 // cond: 14432 // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 14433 for { 14434 _ = v.Args[1] 14435 x := v.Args[0] 14436 y := v.Args[1] 14437 v.reset(OpARM64SRA) 14438 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 14439 v0.AddArg(x) 14440 v.AddArg(v0) 14441 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14442 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14443 v2.AddArg(y) 14444 v1.AddArg(v2) 14445 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14446 v3.AuxInt = 63 14447 v1.AddArg(v3) 14448 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14449 v4.AuxInt = 64 14450 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14451 v5.AddArg(y) 14452 v4.AddArg(v5) 14453 v1.AddArg(v4) 14454 v.AddArg(v1) 14455 return true 14456 } 14457 } 14458 func rewriteValueARM64_OpRsh16x64_0(v *Value) bool { 14459 b := v.Block 14460 _ = b 14461 typ := &b.Func.Config.Types 14462 _ = typ 14463 // match: (Rsh16x64 x y) 14464 // cond: 14465 // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y))) 14466 for { 14467 _ = v.Args[1] 14468 x := v.Args[0] 14469 y := v.Args[1] 14470 v.reset(OpARM64SRA) 14471 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 14472 v0.AddArg(x) 14473 v.AddArg(v0) 14474 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14475 v1.AddArg(y) 14476 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14477 v2.AuxInt = 63 14478 v1.AddArg(v2) 14479 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14480 v3.AuxInt = 64 14481 v3.AddArg(y) 14482 v1.AddArg(v3) 14483 v.AddArg(v1) 14484 return true 14485 } 14486 } 14487 func rewriteValueARM64_OpRsh16x8_0(v *Value) bool { 14488 b := v.Block 14489 _ = b 14490 typ := &b.Func.Config.Types 14491 _ = typ 14492 // match: (Rsh16x8 x y) 14493 // cond: 14494 // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 14495 for { 14496 _ = v.Args[1] 14497 x := v.Args[0] 14498 y := v.Args[1] 14499 v.reset(OpARM64SRA) 14500 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 14501 v0.AddArg(x) 14502 v.AddArg(v0) 14503 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14504 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14505 v2.AddArg(y) 14506 v1.AddArg(v2) 14507 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14508 v3.AuxInt = 63 14509 v1.AddArg(v3) 14510 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14511 v4.AuxInt = 64 14512 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14513 v5.AddArg(y) 14514 v4.AddArg(v5) 14515 v1.AddArg(v4) 14516 v.AddArg(v1) 14517 return true 14518 } 14519 } 14520 func rewriteValueARM64_OpRsh32Ux16_0(v *Value) bool { 14521 b := v.Block 14522 _ = b 14523 typ := &b.Func.Config.Types 14524 _ = typ 14525 // match: (Rsh32Ux16 <t> x y) 14526 // cond: 14527 // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 14528 for { 14529 t := v.Type 14530 _ = v.Args[1] 14531 x := v.Args[0] 14532 y := v.Args[1] 14533 v.reset(OpARM64CSELULT) 14534 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14535 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14536 v1.AddArg(x) 14537 v0.AddArg(v1) 14538 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14539 v2.AddArg(y) 14540 v0.AddArg(v2) 14541 v.AddArg(v0) 14542 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14543 v3.AuxInt = 0 14544 v.AddArg(v3) 14545 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14546 v4.AuxInt = 64 14547 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14548 v5.AddArg(y) 14549 v4.AddArg(v5) 14550 v.AddArg(v4) 14551 return true 14552 } 14553 } 14554 func rewriteValueARM64_OpRsh32Ux32_0(v *Value) bool { 14555 b := v.Block 14556 _ = b 14557 typ := &b.Func.Config.Types 14558 _ = typ 14559 // match: (Rsh32Ux32 <t> x y) 14560 // cond: 14561 // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 14562 for { 14563 t := v.Type 14564 _ = v.Args[1] 14565 x := v.Args[0] 14566 y := v.Args[1] 14567 v.reset(OpARM64CSELULT) 14568 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14569 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14570 v1.AddArg(x) 14571 v0.AddArg(v1) 14572 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14573 v2.AddArg(y) 14574 v0.AddArg(v2) 14575 v.AddArg(v0) 14576 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14577 v3.AuxInt = 0 14578 v.AddArg(v3) 14579 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14580 v4.AuxInt = 64 14581 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14582 v5.AddArg(y) 14583 v4.AddArg(v5) 14584 v.AddArg(v4) 14585 return true 14586 } 14587 } 14588 func rewriteValueARM64_OpRsh32Ux64_0(v *Value) bool { 14589 b := v.Block 14590 _ = b 14591 typ := &b.Func.Config.Types 14592 _ = typ 14593 // match: (Rsh32Ux64 <t> x y) 14594 // cond: 14595 // result: (CSELULT (SRL <t> (ZeroExt32to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y)) 14596 for { 14597 t := v.Type 14598 _ = v.Args[1] 14599 x := v.Args[0] 14600 y := v.Args[1] 14601 v.reset(OpARM64CSELULT) 14602 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14603 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14604 v1.AddArg(x) 14605 v0.AddArg(v1) 14606 v0.AddArg(y) 14607 v.AddArg(v0) 14608 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14609 v2.AuxInt = 0 14610 v.AddArg(v2) 14611 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14612 v3.AuxInt = 64 14613 v3.AddArg(y) 14614 v.AddArg(v3) 14615 return true 14616 } 14617 } 14618 func rewriteValueARM64_OpRsh32Ux8_0(v *Value) bool { 14619 b := v.Block 14620 _ = b 14621 typ := &b.Func.Config.Types 14622 _ = typ 14623 // match: (Rsh32Ux8 <t> x y) 14624 // cond: 14625 // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 14626 for { 14627 t := v.Type 14628 _ = v.Args[1] 14629 x := v.Args[0] 14630 y := v.Args[1] 14631 v.reset(OpARM64CSELULT) 14632 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14633 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14634 v1.AddArg(x) 14635 v0.AddArg(v1) 14636 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14637 v2.AddArg(y) 14638 v0.AddArg(v2) 14639 v.AddArg(v0) 14640 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14641 v3.AuxInt = 0 14642 v.AddArg(v3) 14643 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14644 v4.AuxInt = 64 14645 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14646 v5.AddArg(y) 14647 v4.AddArg(v5) 14648 v.AddArg(v4) 14649 return true 14650 } 14651 } 14652 func rewriteValueARM64_OpRsh32x16_0(v *Value) bool { 14653 b := v.Block 14654 _ = b 14655 typ := &b.Func.Config.Types 14656 _ = typ 14657 // match: (Rsh32x16 x y) 14658 // cond: 14659 // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 14660 for { 14661 _ = v.Args[1] 14662 x := v.Args[0] 14663 y := v.Args[1] 14664 v.reset(OpARM64SRA) 14665 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 14666 v0.AddArg(x) 14667 v.AddArg(v0) 14668 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14669 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14670 v2.AddArg(y) 14671 v1.AddArg(v2) 14672 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14673 v3.AuxInt = 63 14674 v1.AddArg(v3) 14675 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14676 v4.AuxInt = 64 14677 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14678 v5.AddArg(y) 14679 v4.AddArg(v5) 14680 v1.AddArg(v4) 14681 v.AddArg(v1) 14682 return true 14683 } 14684 } 14685 func rewriteValueARM64_OpRsh32x32_0(v *Value) bool { 14686 b := v.Block 14687 _ = b 14688 typ := &b.Func.Config.Types 14689 _ = typ 14690 // match: (Rsh32x32 x y) 14691 // cond: 14692 // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 14693 for { 14694 _ = v.Args[1] 14695 x := v.Args[0] 14696 y := v.Args[1] 14697 v.reset(OpARM64SRA) 14698 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 14699 v0.AddArg(x) 14700 v.AddArg(v0) 14701 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14702 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14703 v2.AddArg(y) 14704 v1.AddArg(v2) 14705 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14706 v3.AuxInt = 63 14707 v1.AddArg(v3) 14708 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14709 v4.AuxInt = 64 14710 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14711 v5.AddArg(y) 14712 v4.AddArg(v5) 14713 v1.AddArg(v4) 14714 v.AddArg(v1) 14715 return true 14716 } 14717 } 14718 func rewriteValueARM64_OpRsh32x64_0(v *Value) bool { 14719 b := v.Block 14720 _ = b 14721 typ := &b.Func.Config.Types 14722 _ = typ 14723 // match: (Rsh32x64 x y) 14724 // cond: 14725 // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y))) 14726 for { 14727 _ = v.Args[1] 14728 x := v.Args[0] 14729 y := v.Args[1] 14730 v.reset(OpARM64SRA) 14731 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 14732 v0.AddArg(x) 14733 v.AddArg(v0) 14734 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14735 v1.AddArg(y) 14736 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14737 v2.AuxInt = 63 14738 v1.AddArg(v2) 14739 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14740 v3.AuxInt = 64 14741 v3.AddArg(y) 14742 v1.AddArg(v3) 14743 v.AddArg(v1) 14744 return true 14745 } 14746 } 14747 func rewriteValueARM64_OpRsh32x8_0(v *Value) bool { 14748 b := v.Block 14749 _ = b 14750 typ := &b.Func.Config.Types 14751 _ = typ 14752 // match: (Rsh32x8 x y) 14753 // cond: 14754 // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 14755 for { 14756 _ = v.Args[1] 14757 x := v.Args[0] 14758 y := v.Args[1] 14759 v.reset(OpARM64SRA) 14760 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 14761 v0.AddArg(x) 14762 v.AddArg(v0) 14763 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14764 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14765 v2.AddArg(y) 14766 v1.AddArg(v2) 14767 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14768 v3.AuxInt = 63 14769 v1.AddArg(v3) 14770 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14771 v4.AuxInt = 64 14772 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14773 v5.AddArg(y) 14774 v4.AddArg(v5) 14775 v1.AddArg(v4) 14776 v.AddArg(v1) 14777 return true 14778 } 14779 } 14780 func rewriteValueARM64_OpRsh64Ux16_0(v *Value) bool { 14781 b := v.Block 14782 _ = b 14783 typ := &b.Func.Config.Types 14784 _ = typ 14785 // match: (Rsh64Ux16 <t> x y) 14786 // cond: 14787 // result: (CSELULT (SRL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 14788 for { 14789 t := v.Type 14790 _ = v.Args[1] 14791 x := v.Args[0] 14792 y := v.Args[1] 14793 v.reset(OpARM64CSELULT) 14794 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14795 v0.AddArg(x) 14796 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14797 v1.AddArg(y) 14798 v0.AddArg(v1) 14799 v.AddArg(v0) 14800 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14801 v2.AuxInt = 0 14802 v.AddArg(v2) 14803 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14804 v3.AuxInt = 64 14805 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14806 v4.AddArg(y) 14807 v3.AddArg(v4) 14808 v.AddArg(v3) 14809 return true 14810 } 14811 } 14812 func rewriteValueARM64_OpRsh64Ux32_0(v *Value) bool { 14813 b := v.Block 14814 _ = b 14815 typ := &b.Func.Config.Types 14816 _ = typ 14817 // match: (Rsh64Ux32 <t> x y) 14818 // cond: 14819 // result: (CSELULT (SRL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 14820 for { 14821 t := v.Type 14822 _ = v.Args[1] 14823 x := v.Args[0] 14824 y := v.Args[1] 14825 v.reset(OpARM64CSELULT) 14826 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14827 v0.AddArg(x) 14828 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14829 v1.AddArg(y) 14830 v0.AddArg(v1) 14831 v.AddArg(v0) 14832 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14833 v2.AuxInt = 0 14834 v.AddArg(v2) 14835 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14836 v3.AuxInt = 64 14837 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14838 v4.AddArg(y) 14839 v3.AddArg(v4) 14840 v.AddArg(v3) 14841 return true 14842 } 14843 } 14844 func rewriteValueARM64_OpRsh64Ux64_0(v *Value) bool { 14845 b := v.Block 14846 _ = b 14847 // match: (Rsh64Ux64 <t> x y) 14848 // cond: 14849 // result: (CSELULT (SRL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y)) 14850 for { 14851 t := v.Type 14852 _ = v.Args[1] 14853 x := v.Args[0] 14854 y := v.Args[1] 14855 v.reset(OpARM64CSELULT) 14856 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14857 v0.AddArg(x) 14858 v0.AddArg(y) 14859 v.AddArg(v0) 14860 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14861 v1.AuxInt = 0 14862 v.AddArg(v1) 14863 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14864 v2.AuxInt = 64 14865 v2.AddArg(y) 14866 v.AddArg(v2) 14867 return true 14868 } 14869 } 14870 func rewriteValueARM64_OpRsh64Ux8_0(v *Value) bool { 14871 b := v.Block 14872 _ = b 14873 typ := &b.Func.Config.Types 14874 _ = typ 14875 // match: (Rsh64Ux8 <t> x y) 14876 // cond: 14877 // result: (CSELULT (SRL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 14878 for { 14879 t := v.Type 14880 _ = v.Args[1] 14881 x := v.Args[0] 14882 y := v.Args[1] 14883 v.reset(OpARM64CSELULT) 14884 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 14885 v0.AddArg(x) 14886 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14887 v1.AddArg(y) 14888 v0.AddArg(v1) 14889 v.AddArg(v0) 14890 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 14891 v2.AuxInt = 0 14892 v.AddArg(v2) 14893 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14894 v3.AuxInt = 64 14895 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 14896 v4.AddArg(y) 14897 v3.AddArg(v4) 14898 v.AddArg(v3) 14899 return true 14900 } 14901 } 14902 func rewriteValueARM64_OpRsh64x16_0(v *Value) bool { 14903 b := v.Block 14904 _ = b 14905 typ := &b.Func.Config.Types 14906 _ = typ 14907 // match: (Rsh64x16 x y) 14908 // cond: 14909 // result: (SRA x (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 14910 for { 14911 _ = v.Args[1] 14912 x := v.Args[0] 14913 y := v.Args[1] 14914 v.reset(OpARM64SRA) 14915 v.AddArg(x) 14916 v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14917 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14918 v1.AddArg(y) 14919 v0.AddArg(v1) 14920 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14921 v2.AuxInt = 63 14922 v0.AddArg(v2) 14923 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14924 v3.AuxInt = 64 14925 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 14926 v4.AddArg(y) 14927 v3.AddArg(v4) 14928 v0.AddArg(v3) 14929 v.AddArg(v0) 14930 return true 14931 } 14932 } 14933 func rewriteValueARM64_OpRsh64x32_0(v *Value) bool { 14934 b := v.Block 14935 _ = b 14936 typ := &b.Func.Config.Types 14937 _ = typ 14938 // match: (Rsh64x32 x y) 14939 // cond: 14940 // result: (SRA x (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 14941 for { 14942 _ = v.Args[1] 14943 x := v.Args[0] 14944 y := v.Args[1] 14945 v.reset(OpARM64SRA) 14946 v.AddArg(x) 14947 v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14948 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14949 v1.AddArg(y) 14950 v0.AddArg(v1) 14951 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14952 v2.AuxInt = 63 14953 v0.AddArg(v2) 14954 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14955 v3.AuxInt = 64 14956 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 14957 v4.AddArg(y) 14958 v3.AddArg(v4) 14959 v0.AddArg(v3) 14960 v.AddArg(v0) 14961 return true 14962 } 14963 } 14964 func rewriteValueARM64_OpRsh64x64_0(v *Value) bool { 14965 b := v.Block 14966 _ = b 14967 // match: (Rsh64x64 x y) 14968 // cond: 14969 // result: (SRA x (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y))) 14970 for { 14971 _ = v.Args[1] 14972 x := v.Args[0] 14973 y := v.Args[1] 14974 v.reset(OpARM64SRA) 14975 v.AddArg(x) 14976 v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 14977 v0.AddArg(y) 14978 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 14979 v1.AuxInt = 63 14980 v0.AddArg(v1) 14981 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 14982 v2.AuxInt = 64 14983 v2.AddArg(y) 14984 v0.AddArg(v2) 14985 v.AddArg(v0) 14986 return true 14987 } 14988 } 14989 func rewriteValueARM64_OpRsh64x8_0(v *Value) bool { 14990 b := v.Block 14991 _ = b 14992 typ := &b.Func.Config.Types 14993 _ = typ 14994 // match: (Rsh64x8 x y) 14995 // cond: 14996 // result: (SRA x (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 14997 for { 14998 _ = v.Args[1] 14999 x := v.Args[0] 15000 y := v.Args[1] 15001 v.reset(OpARM64SRA) 15002 v.AddArg(x) 15003 v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 15004 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15005 v1.AddArg(y) 15006 v0.AddArg(v1) 15007 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 15008 v2.AuxInt = 63 15009 v0.AddArg(v2) 15010 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15011 v3.AuxInt = 64 15012 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15013 v4.AddArg(y) 15014 v3.AddArg(v4) 15015 v0.AddArg(v3) 15016 v.AddArg(v0) 15017 return true 15018 } 15019 } 15020 func rewriteValueARM64_OpRsh8Ux16_0(v *Value) bool { 15021 b := v.Block 15022 _ = b 15023 typ := &b.Func.Config.Types 15024 _ = typ 15025 // match: (Rsh8Ux16 <t> x y) 15026 // cond: 15027 // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 15028 for { 15029 t := v.Type 15030 _ = v.Args[1] 15031 x := v.Args[0] 15032 y := v.Args[1] 15033 v.reset(OpARM64CSELULT) 15034 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 15035 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15036 v1.AddArg(x) 15037 v0.AddArg(v1) 15038 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 15039 v2.AddArg(y) 15040 v0.AddArg(v2) 15041 v.AddArg(v0) 15042 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 15043 v3.AuxInt = 0 15044 v.AddArg(v3) 15045 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15046 v4.AuxInt = 64 15047 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 15048 v5.AddArg(y) 15049 v4.AddArg(v5) 15050 v.AddArg(v4) 15051 return true 15052 } 15053 } 15054 func rewriteValueARM64_OpRsh8Ux32_0(v *Value) bool { 15055 b := v.Block 15056 _ = b 15057 typ := &b.Func.Config.Types 15058 _ = typ 15059 // match: (Rsh8Ux32 <t> x y) 15060 // cond: 15061 // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 15062 for { 15063 t := v.Type 15064 _ = v.Args[1] 15065 x := v.Args[0] 15066 y := v.Args[1] 15067 v.reset(OpARM64CSELULT) 15068 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 15069 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15070 v1.AddArg(x) 15071 v0.AddArg(v1) 15072 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 15073 v2.AddArg(y) 15074 v0.AddArg(v2) 15075 v.AddArg(v0) 15076 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 15077 v3.AuxInt = 0 15078 v.AddArg(v3) 15079 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15080 v4.AuxInt = 64 15081 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 15082 v5.AddArg(y) 15083 v4.AddArg(v5) 15084 v.AddArg(v4) 15085 return true 15086 } 15087 } 15088 func rewriteValueARM64_OpRsh8Ux64_0(v *Value) bool { 15089 b := v.Block 15090 _ = b 15091 typ := &b.Func.Config.Types 15092 _ = typ 15093 // match: (Rsh8Ux64 <t> x y) 15094 // cond: 15095 // result: (CSELULT (SRL <t> (ZeroExt8to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y)) 15096 for { 15097 t := v.Type 15098 _ = v.Args[1] 15099 x := v.Args[0] 15100 y := v.Args[1] 15101 v.reset(OpARM64CSELULT) 15102 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 15103 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15104 v1.AddArg(x) 15105 v0.AddArg(v1) 15106 v0.AddArg(y) 15107 v.AddArg(v0) 15108 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 15109 v2.AuxInt = 0 15110 v.AddArg(v2) 15111 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15112 v3.AuxInt = 64 15113 v3.AddArg(y) 15114 v.AddArg(v3) 15115 return true 15116 } 15117 } 15118 func rewriteValueARM64_OpRsh8Ux8_0(v *Value) bool { 15119 b := v.Block 15120 _ = b 15121 typ := &b.Func.Config.Types 15122 _ = typ 15123 // match: (Rsh8Ux8 <t> x y) 15124 // cond: 15125 // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 15126 for { 15127 t := v.Type 15128 _ = v.Args[1] 15129 x := v.Args[0] 15130 y := v.Args[1] 15131 v.reset(OpARM64CSELULT) 15132 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 15133 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15134 v1.AddArg(x) 15135 v0.AddArg(v1) 15136 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15137 v2.AddArg(y) 15138 v0.AddArg(v2) 15139 v.AddArg(v0) 15140 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t) 15141 v3.AuxInt = 0 15142 v.AddArg(v3) 15143 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15144 v4.AuxInt = 64 15145 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15146 v5.AddArg(y) 15147 v4.AddArg(v5) 15148 v.AddArg(v4) 15149 return true 15150 } 15151 } 15152 func rewriteValueARM64_OpRsh8x16_0(v *Value) bool { 15153 b := v.Block 15154 _ = b 15155 typ := &b.Func.Config.Types 15156 _ = typ 15157 // match: (Rsh8x16 x y) 15158 // cond: 15159 // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 15160 for { 15161 _ = v.Args[1] 15162 x := v.Args[0] 15163 y := v.Args[1] 15164 v.reset(OpARM64SRA) 15165 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 15166 v0.AddArg(x) 15167 v.AddArg(v0) 15168 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 15169 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 15170 v2.AddArg(y) 15171 v1.AddArg(v2) 15172 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 15173 v3.AuxInt = 63 15174 v1.AddArg(v3) 15175 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15176 v4.AuxInt = 64 15177 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 15178 v5.AddArg(y) 15179 v4.AddArg(v5) 15180 v1.AddArg(v4) 15181 v.AddArg(v1) 15182 return true 15183 } 15184 } 15185 func rewriteValueARM64_OpRsh8x32_0(v *Value) bool { 15186 b := v.Block 15187 _ = b 15188 typ := &b.Func.Config.Types 15189 _ = typ 15190 // match: (Rsh8x32 x y) 15191 // cond: 15192 // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 15193 for { 15194 _ = v.Args[1] 15195 x := v.Args[0] 15196 y := v.Args[1] 15197 v.reset(OpARM64SRA) 15198 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 15199 v0.AddArg(x) 15200 v.AddArg(v0) 15201 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 15202 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 15203 v2.AddArg(y) 15204 v1.AddArg(v2) 15205 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 15206 v3.AuxInt = 63 15207 v1.AddArg(v3) 15208 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15209 v4.AuxInt = 64 15210 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 15211 v5.AddArg(y) 15212 v4.AddArg(v5) 15213 v1.AddArg(v4) 15214 v.AddArg(v1) 15215 return true 15216 } 15217 } 15218 func rewriteValueARM64_OpRsh8x64_0(v *Value) bool { 15219 b := v.Block 15220 _ = b 15221 typ := &b.Func.Config.Types 15222 _ = typ 15223 // match: (Rsh8x64 x y) 15224 // cond: 15225 // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y))) 15226 for { 15227 _ = v.Args[1] 15228 x := v.Args[0] 15229 y := v.Args[1] 15230 v.reset(OpARM64SRA) 15231 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 15232 v0.AddArg(x) 15233 v.AddArg(v0) 15234 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 15235 v1.AddArg(y) 15236 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 15237 v2.AuxInt = 63 15238 v1.AddArg(v2) 15239 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15240 v3.AuxInt = 64 15241 v3.AddArg(y) 15242 v1.AddArg(v3) 15243 v.AddArg(v1) 15244 return true 15245 } 15246 } 15247 func rewriteValueARM64_OpRsh8x8_0(v *Value) bool { 15248 b := v.Block 15249 _ = b 15250 typ := &b.Func.Config.Types 15251 _ = typ 15252 // match: (Rsh8x8 x y) 15253 // cond: 15254 // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 15255 for { 15256 _ = v.Args[1] 15257 x := v.Args[0] 15258 y := v.Args[1] 15259 v.reset(OpARM64SRA) 15260 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 15261 v0.AddArg(x) 15262 v.AddArg(v0) 15263 v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type) 15264 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15265 v2.AddArg(y) 15266 v1.AddArg(v2) 15267 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type) 15268 v3.AuxInt = 63 15269 v1.AddArg(v3) 15270 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 15271 v4.AuxInt = 64 15272 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 15273 v5.AddArg(y) 15274 v4.AddArg(v5) 15275 v1.AddArg(v4) 15276 v.AddArg(v1) 15277 return true 15278 } 15279 } 15280 func rewriteValueARM64_OpSignExt16to32_0(v *Value) bool { 15281 // match: (SignExt16to32 x) 15282 // cond: 15283 // result: (MOVHreg x) 15284 for { 15285 x := v.Args[0] 15286 v.reset(OpARM64MOVHreg) 15287 v.AddArg(x) 15288 return true 15289 } 15290 } 15291 func rewriteValueARM64_OpSignExt16to64_0(v *Value) bool { 15292 // match: (SignExt16to64 x) 15293 // cond: 15294 // result: (MOVHreg x) 15295 for { 15296 x := v.Args[0] 15297 v.reset(OpARM64MOVHreg) 15298 v.AddArg(x) 15299 return true 15300 } 15301 } 15302 func rewriteValueARM64_OpSignExt32to64_0(v *Value) bool { 15303 // match: (SignExt32to64 x) 15304 // cond: 15305 // result: (MOVWreg x) 15306 for { 15307 x := v.Args[0] 15308 v.reset(OpARM64MOVWreg) 15309 v.AddArg(x) 15310 return true 15311 } 15312 } 15313 func rewriteValueARM64_OpSignExt8to16_0(v *Value) bool { 15314 // match: (SignExt8to16 x) 15315 // cond: 15316 // result: (MOVBreg x) 15317 for { 15318 x := v.Args[0] 15319 v.reset(OpARM64MOVBreg) 15320 v.AddArg(x) 15321 return true 15322 } 15323 } 15324 func rewriteValueARM64_OpSignExt8to32_0(v *Value) bool { 15325 // match: (SignExt8to32 x) 15326 // cond: 15327 // result: (MOVBreg x) 15328 for { 15329 x := v.Args[0] 15330 v.reset(OpARM64MOVBreg) 15331 v.AddArg(x) 15332 return true 15333 } 15334 } 15335 func rewriteValueARM64_OpSignExt8to64_0(v *Value) bool { 15336 // match: (SignExt8to64 x) 15337 // cond: 15338 // result: (MOVBreg x) 15339 for { 15340 x := v.Args[0] 15341 v.reset(OpARM64MOVBreg) 15342 v.AddArg(x) 15343 return true 15344 } 15345 } 15346 func rewriteValueARM64_OpSlicemask_0(v *Value) bool { 15347 b := v.Block 15348 _ = b 15349 // match: (Slicemask <t> x) 15350 // cond: 15351 // result: (SRAconst (NEG <t> x) [63]) 15352 for { 15353 t := v.Type 15354 x := v.Args[0] 15355 v.reset(OpARM64SRAconst) 15356 v.AuxInt = 63 15357 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 15358 v0.AddArg(x) 15359 v.AddArg(v0) 15360 return true 15361 } 15362 } 15363 func rewriteValueARM64_OpSqrt_0(v *Value) bool { 15364 // match: (Sqrt x) 15365 // cond: 15366 // result: (FSQRTD x) 15367 for { 15368 x := v.Args[0] 15369 v.reset(OpARM64FSQRTD) 15370 v.AddArg(x) 15371 return true 15372 } 15373 } 15374 func rewriteValueARM64_OpStaticCall_0(v *Value) bool { 15375 // match: (StaticCall [argwid] {target} mem) 15376 // cond: 15377 // result: (CALLstatic [argwid] {target} mem) 15378 for { 15379 argwid := v.AuxInt 15380 target := v.Aux 15381 mem := v.Args[0] 15382 v.reset(OpARM64CALLstatic) 15383 v.AuxInt = argwid 15384 v.Aux = target 15385 v.AddArg(mem) 15386 return true 15387 } 15388 } 15389 func rewriteValueARM64_OpStore_0(v *Value) bool { 15390 // match: (Store {t} ptr val mem) 15391 // cond: t.(*types.Type).Size() == 1 15392 // result: (MOVBstore ptr val mem) 15393 for { 15394 t := v.Aux 15395 _ = v.Args[2] 15396 ptr := v.Args[0] 15397 val := v.Args[1] 15398 mem := v.Args[2] 15399 if !(t.(*types.Type).Size() == 1) { 15400 break 15401 } 15402 v.reset(OpARM64MOVBstore) 15403 v.AddArg(ptr) 15404 v.AddArg(val) 15405 v.AddArg(mem) 15406 return true 15407 } 15408 // match: (Store {t} ptr val mem) 15409 // cond: t.(*types.Type).Size() == 2 15410 // result: (MOVHstore ptr val mem) 15411 for { 15412 t := v.Aux 15413 _ = v.Args[2] 15414 ptr := v.Args[0] 15415 val := v.Args[1] 15416 mem := v.Args[2] 15417 if !(t.(*types.Type).Size() == 2) { 15418 break 15419 } 15420 v.reset(OpARM64MOVHstore) 15421 v.AddArg(ptr) 15422 v.AddArg(val) 15423 v.AddArg(mem) 15424 return true 15425 } 15426 // match: (Store {t} ptr val mem) 15427 // cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type) 15428 // result: (MOVWstore ptr val mem) 15429 for { 15430 t := v.Aux 15431 _ = v.Args[2] 15432 ptr := v.Args[0] 15433 val := v.Args[1] 15434 mem := v.Args[2] 15435 if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) { 15436 break 15437 } 15438 v.reset(OpARM64MOVWstore) 15439 v.AddArg(ptr) 15440 v.AddArg(val) 15441 v.AddArg(mem) 15442 return true 15443 } 15444 // match: (Store {t} ptr val mem) 15445 // cond: t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type) 15446 // result: (MOVDstore ptr val mem) 15447 for { 15448 t := v.Aux 15449 _ = v.Args[2] 15450 ptr := v.Args[0] 15451 val := v.Args[1] 15452 mem := v.Args[2] 15453 if !(t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)) { 15454 break 15455 } 15456 v.reset(OpARM64MOVDstore) 15457 v.AddArg(ptr) 15458 v.AddArg(val) 15459 v.AddArg(mem) 15460 return true 15461 } 15462 // match: (Store {t} ptr val mem) 15463 // cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type) 15464 // result: (FMOVSstore ptr val mem) 15465 for { 15466 t := v.Aux 15467 _ = v.Args[2] 15468 ptr := v.Args[0] 15469 val := v.Args[1] 15470 mem := v.Args[2] 15471 if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) { 15472 break 15473 } 15474 v.reset(OpARM64FMOVSstore) 15475 v.AddArg(ptr) 15476 v.AddArg(val) 15477 v.AddArg(mem) 15478 return true 15479 } 15480 // match: (Store {t} ptr val mem) 15481 // cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type) 15482 // result: (FMOVDstore ptr val mem) 15483 for { 15484 t := v.Aux 15485 _ = v.Args[2] 15486 ptr := v.Args[0] 15487 val := v.Args[1] 15488 mem := v.Args[2] 15489 if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) { 15490 break 15491 } 15492 v.reset(OpARM64FMOVDstore) 15493 v.AddArg(ptr) 15494 v.AddArg(val) 15495 v.AddArg(mem) 15496 return true 15497 } 15498 return false 15499 } 15500 func rewriteValueARM64_OpSub16_0(v *Value) bool { 15501 // match: (Sub16 x y) 15502 // cond: 15503 // result: (SUB x y) 15504 for { 15505 _ = v.Args[1] 15506 x := v.Args[0] 15507 y := v.Args[1] 15508 v.reset(OpARM64SUB) 15509 v.AddArg(x) 15510 v.AddArg(y) 15511 return true 15512 } 15513 } 15514 func rewriteValueARM64_OpSub32_0(v *Value) bool { 15515 // match: (Sub32 x y) 15516 // cond: 15517 // result: (SUB x y) 15518 for { 15519 _ = v.Args[1] 15520 x := v.Args[0] 15521 y := v.Args[1] 15522 v.reset(OpARM64SUB) 15523 v.AddArg(x) 15524 v.AddArg(y) 15525 return true 15526 } 15527 } 15528 func rewriteValueARM64_OpSub32F_0(v *Value) bool { 15529 // match: (Sub32F x y) 15530 // cond: 15531 // result: (FSUBS x y) 15532 for { 15533 _ = v.Args[1] 15534 x := v.Args[0] 15535 y := v.Args[1] 15536 v.reset(OpARM64FSUBS) 15537 v.AddArg(x) 15538 v.AddArg(y) 15539 return true 15540 } 15541 } 15542 func rewriteValueARM64_OpSub64_0(v *Value) bool { 15543 // match: (Sub64 x y) 15544 // cond: 15545 // result: (SUB x y) 15546 for { 15547 _ = v.Args[1] 15548 x := v.Args[0] 15549 y := v.Args[1] 15550 v.reset(OpARM64SUB) 15551 v.AddArg(x) 15552 v.AddArg(y) 15553 return true 15554 } 15555 } 15556 func rewriteValueARM64_OpSub64F_0(v *Value) bool { 15557 // match: (Sub64F x y) 15558 // cond: 15559 // result: (FSUBD x y) 15560 for { 15561 _ = v.Args[1] 15562 x := v.Args[0] 15563 y := v.Args[1] 15564 v.reset(OpARM64FSUBD) 15565 v.AddArg(x) 15566 v.AddArg(y) 15567 return true 15568 } 15569 } 15570 func rewriteValueARM64_OpSub8_0(v *Value) bool { 15571 // match: (Sub8 x y) 15572 // cond: 15573 // result: (SUB x y) 15574 for { 15575 _ = v.Args[1] 15576 x := v.Args[0] 15577 y := v.Args[1] 15578 v.reset(OpARM64SUB) 15579 v.AddArg(x) 15580 v.AddArg(y) 15581 return true 15582 } 15583 } 15584 func rewriteValueARM64_OpSubPtr_0(v *Value) bool { 15585 // match: (SubPtr x y) 15586 // cond: 15587 // result: (SUB x y) 15588 for { 15589 _ = v.Args[1] 15590 x := v.Args[0] 15591 y := v.Args[1] 15592 v.reset(OpARM64SUB) 15593 v.AddArg(x) 15594 v.AddArg(y) 15595 return true 15596 } 15597 } 15598 func rewriteValueARM64_OpTrunc16to8_0(v *Value) bool { 15599 // match: (Trunc16to8 x) 15600 // cond: 15601 // result: x 15602 for { 15603 x := v.Args[0] 15604 v.reset(OpCopy) 15605 v.Type = x.Type 15606 v.AddArg(x) 15607 return true 15608 } 15609 } 15610 func rewriteValueARM64_OpTrunc32to16_0(v *Value) bool { 15611 // match: (Trunc32to16 x) 15612 // cond: 15613 // result: x 15614 for { 15615 x := v.Args[0] 15616 v.reset(OpCopy) 15617 v.Type = x.Type 15618 v.AddArg(x) 15619 return true 15620 } 15621 } 15622 func rewriteValueARM64_OpTrunc32to8_0(v *Value) bool { 15623 // match: (Trunc32to8 x) 15624 // cond: 15625 // result: x 15626 for { 15627 x := v.Args[0] 15628 v.reset(OpCopy) 15629 v.Type = x.Type 15630 v.AddArg(x) 15631 return true 15632 } 15633 } 15634 func rewriteValueARM64_OpTrunc64to16_0(v *Value) bool { 15635 // match: (Trunc64to16 x) 15636 // cond: 15637 // result: x 15638 for { 15639 x := v.Args[0] 15640 v.reset(OpCopy) 15641 v.Type = x.Type 15642 v.AddArg(x) 15643 return true 15644 } 15645 } 15646 func rewriteValueARM64_OpTrunc64to32_0(v *Value) bool { 15647 // match: (Trunc64to32 x) 15648 // cond: 15649 // result: x 15650 for { 15651 x := v.Args[0] 15652 v.reset(OpCopy) 15653 v.Type = x.Type 15654 v.AddArg(x) 15655 return true 15656 } 15657 } 15658 func rewriteValueARM64_OpTrunc64to8_0(v *Value) bool { 15659 // match: (Trunc64to8 x) 15660 // cond: 15661 // result: x 15662 for { 15663 x := v.Args[0] 15664 v.reset(OpCopy) 15665 v.Type = x.Type 15666 v.AddArg(x) 15667 return true 15668 } 15669 } 15670 func rewriteValueARM64_OpXor16_0(v *Value) bool { 15671 // match: (Xor16 x y) 15672 // cond: 15673 // result: (XOR x y) 15674 for { 15675 _ = v.Args[1] 15676 x := v.Args[0] 15677 y := v.Args[1] 15678 v.reset(OpARM64XOR) 15679 v.AddArg(x) 15680 v.AddArg(y) 15681 return true 15682 } 15683 } 15684 func rewriteValueARM64_OpXor32_0(v *Value) bool { 15685 // match: (Xor32 x y) 15686 // cond: 15687 // result: (XOR x y) 15688 for { 15689 _ = v.Args[1] 15690 x := v.Args[0] 15691 y := v.Args[1] 15692 v.reset(OpARM64XOR) 15693 v.AddArg(x) 15694 v.AddArg(y) 15695 return true 15696 } 15697 } 15698 func rewriteValueARM64_OpXor64_0(v *Value) bool { 15699 // match: (Xor64 x y) 15700 // cond: 15701 // result: (XOR x y) 15702 for { 15703 _ = v.Args[1] 15704 x := v.Args[0] 15705 y := v.Args[1] 15706 v.reset(OpARM64XOR) 15707 v.AddArg(x) 15708 v.AddArg(y) 15709 return true 15710 } 15711 } 15712 func rewriteValueARM64_OpXor8_0(v *Value) bool { 15713 // match: (Xor8 x y) 15714 // cond: 15715 // result: (XOR x y) 15716 for { 15717 _ = v.Args[1] 15718 x := v.Args[0] 15719 y := v.Args[1] 15720 v.reset(OpARM64XOR) 15721 v.AddArg(x) 15722 v.AddArg(y) 15723 return true 15724 } 15725 } 15726 func rewriteValueARM64_OpZero_0(v *Value) bool { 15727 b := v.Block 15728 _ = b 15729 typ := &b.Func.Config.Types 15730 _ = typ 15731 // match: (Zero [0] _ mem) 15732 // cond: 15733 // result: mem 15734 for { 15735 if v.AuxInt != 0 { 15736 break 15737 } 15738 _ = v.Args[1] 15739 mem := v.Args[1] 15740 v.reset(OpCopy) 15741 v.Type = mem.Type 15742 v.AddArg(mem) 15743 return true 15744 } 15745 // match: (Zero [1] ptr mem) 15746 // cond: 15747 // result: (MOVBstore ptr (MOVDconst [0]) mem) 15748 for { 15749 if v.AuxInt != 1 { 15750 break 15751 } 15752 _ = v.Args[1] 15753 ptr := v.Args[0] 15754 mem := v.Args[1] 15755 v.reset(OpARM64MOVBstore) 15756 v.AddArg(ptr) 15757 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15758 v0.AuxInt = 0 15759 v.AddArg(v0) 15760 v.AddArg(mem) 15761 return true 15762 } 15763 // match: (Zero [2] ptr mem) 15764 // cond: 15765 // result: (MOVHstore ptr (MOVDconst [0]) mem) 15766 for { 15767 if v.AuxInt != 2 { 15768 break 15769 } 15770 _ = v.Args[1] 15771 ptr := v.Args[0] 15772 mem := v.Args[1] 15773 v.reset(OpARM64MOVHstore) 15774 v.AddArg(ptr) 15775 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15776 v0.AuxInt = 0 15777 v.AddArg(v0) 15778 v.AddArg(mem) 15779 return true 15780 } 15781 // match: (Zero [4] ptr mem) 15782 // cond: 15783 // result: (MOVWstore ptr (MOVDconst [0]) mem) 15784 for { 15785 if v.AuxInt != 4 { 15786 break 15787 } 15788 _ = v.Args[1] 15789 ptr := v.Args[0] 15790 mem := v.Args[1] 15791 v.reset(OpARM64MOVWstore) 15792 v.AddArg(ptr) 15793 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15794 v0.AuxInt = 0 15795 v.AddArg(v0) 15796 v.AddArg(mem) 15797 return true 15798 } 15799 // match: (Zero [8] ptr mem) 15800 // cond: 15801 // result: (MOVDstore ptr (MOVDconst [0]) mem) 15802 for { 15803 if v.AuxInt != 8 { 15804 break 15805 } 15806 _ = v.Args[1] 15807 ptr := v.Args[0] 15808 mem := v.Args[1] 15809 v.reset(OpARM64MOVDstore) 15810 v.AddArg(ptr) 15811 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15812 v0.AuxInt = 0 15813 v.AddArg(v0) 15814 v.AddArg(mem) 15815 return true 15816 } 15817 // match: (Zero [3] ptr mem) 15818 // cond: 15819 // result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)) 15820 for { 15821 if v.AuxInt != 3 { 15822 break 15823 } 15824 _ = v.Args[1] 15825 ptr := v.Args[0] 15826 mem := v.Args[1] 15827 v.reset(OpARM64MOVBstore) 15828 v.AuxInt = 2 15829 v.AddArg(ptr) 15830 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15831 v0.AuxInt = 0 15832 v.AddArg(v0) 15833 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 15834 v1.AddArg(ptr) 15835 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15836 v2.AuxInt = 0 15837 v1.AddArg(v2) 15838 v1.AddArg(mem) 15839 v.AddArg(v1) 15840 return true 15841 } 15842 // match: (Zero [5] ptr mem) 15843 // cond: 15844 // result: (MOVBstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 15845 for { 15846 if v.AuxInt != 5 { 15847 break 15848 } 15849 _ = v.Args[1] 15850 ptr := v.Args[0] 15851 mem := v.Args[1] 15852 v.reset(OpARM64MOVBstore) 15853 v.AuxInt = 4 15854 v.AddArg(ptr) 15855 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15856 v0.AuxInt = 0 15857 v.AddArg(v0) 15858 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 15859 v1.AddArg(ptr) 15860 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15861 v2.AuxInt = 0 15862 v1.AddArg(v2) 15863 v1.AddArg(mem) 15864 v.AddArg(v1) 15865 return true 15866 } 15867 // match: (Zero [6] ptr mem) 15868 // cond: 15869 // result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 15870 for { 15871 if v.AuxInt != 6 { 15872 break 15873 } 15874 _ = v.Args[1] 15875 ptr := v.Args[0] 15876 mem := v.Args[1] 15877 v.reset(OpARM64MOVHstore) 15878 v.AuxInt = 4 15879 v.AddArg(ptr) 15880 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15881 v0.AuxInt = 0 15882 v.AddArg(v0) 15883 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 15884 v1.AddArg(ptr) 15885 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15886 v2.AuxInt = 0 15887 v1.AddArg(v2) 15888 v1.AddArg(mem) 15889 v.AddArg(v1) 15890 return true 15891 } 15892 // match: (Zero [7] ptr mem) 15893 // cond: 15894 // result: (MOVBstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))) 15895 for { 15896 if v.AuxInt != 7 { 15897 break 15898 } 15899 _ = v.Args[1] 15900 ptr := v.Args[0] 15901 mem := v.Args[1] 15902 v.reset(OpARM64MOVBstore) 15903 v.AuxInt = 6 15904 v.AddArg(ptr) 15905 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15906 v0.AuxInt = 0 15907 v.AddArg(v0) 15908 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 15909 v1.AuxInt = 4 15910 v1.AddArg(ptr) 15911 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15912 v2.AuxInt = 0 15913 v1.AddArg(v2) 15914 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 15915 v3.AddArg(ptr) 15916 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15917 v4.AuxInt = 0 15918 v3.AddArg(v4) 15919 v3.AddArg(mem) 15920 v1.AddArg(v3) 15921 v.AddArg(v1) 15922 return true 15923 } 15924 // match: (Zero [9] ptr mem) 15925 // cond: 15926 // result: (MOVBstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 15927 for { 15928 if v.AuxInt != 9 { 15929 break 15930 } 15931 _ = v.Args[1] 15932 ptr := v.Args[0] 15933 mem := v.Args[1] 15934 v.reset(OpARM64MOVBstore) 15935 v.AuxInt = 8 15936 v.AddArg(ptr) 15937 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15938 v0.AuxInt = 0 15939 v.AddArg(v0) 15940 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 15941 v1.AddArg(ptr) 15942 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15943 v2.AuxInt = 0 15944 v1.AddArg(v2) 15945 v1.AddArg(mem) 15946 v.AddArg(v1) 15947 return true 15948 } 15949 return false 15950 } 15951 func rewriteValueARM64_OpZero_10(v *Value) bool { 15952 b := v.Block 15953 _ = b 15954 typ := &b.Func.Config.Types 15955 _ = typ 15956 // match: (Zero [10] ptr mem) 15957 // cond: 15958 // result: (MOVHstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 15959 for { 15960 if v.AuxInt != 10 { 15961 break 15962 } 15963 _ = v.Args[1] 15964 ptr := v.Args[0] 15965 mem := v.Args[1] 15966 v.reset(OpARM64MOVHstore) 15967 v.AuxInt = 8 15968 v.AddArg(ptr) 15969 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15970 v0.AuxInt = 0 15971 v.AddArg(v0) 15972 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 15973 v1.AddArg(ptr) 15974 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15975 v2.AuxInt = 0 15976 v1.AddArg(v2) 15977 v1.AddArg(mem) 15978 v.AddArg(v1) 15979 return true 15980 } 15981 // match: (Zero [11] ptr mem) 15982 // cond: 15983 // result: (MOVBstore [10] ptr (MOVDconst [0]) (MOVHstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 15984 for { 15985 if v.AuxInt != 11 { 15986 break 15987 } 15988 _ = v.Args[1] 15989 ptr := v.Args[0] 15990 mem := v.Args[1] 15991 v.reset(OpARM64MOVBstore) 15992 v.AuxInt = 10 15993 v.AddArg(ptr) 15994 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 15995 v0.AuxInt = 0 15996 v.AddArg(v0) 15997 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 15998 v1.AuxInt = 8 15999 v1.AddArg(ptr) 16000 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16001 v2.AuxInt = 0 16002 v1.AddArg(v2) 16003 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 16004 v3.AddArg(ptr) 16005 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16006 v4.AuxInt = 0 16007 v3.AddArg(v4) 16008 v3.AddArg(mem) 16009 v1.AddArg(v3) 16010 v.AddArg(v1) 16011 return true 16012 } 16013 // match: (Zero [12] ptr mem) 16014 // cond: 16015 // result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 16016 for { 16017 if v.AuxInt != 12 { 16018 break 16019 } 16020 _ = v.Args[1] 16021 ptr := v.Args[0] 16022 mem := v.Args[1] 16023 v.reset(OpARM64MOVWstore) 16024 v.AuxInt = 8 16025 v.AddArg(ptr) 16026 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16027 v0.AuxInt = 0 16028 v.AddArg(v0) 16029 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 16030 v1.AddArg(ptr) 16031 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16032 v2.AuxInt = 0 16033 v1.AddArg(v2) 16034 v1.AddArg(mem) 16035 v.AddArg(v1) 16036 return true 16037 } 16038 // match: (Zero [13] ptr mem) 16039 // cond: 16040 // result: (MOVBstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 16041 for { 16042 if v.AuxInt != 13 { 16043 break 16044 } 16045 _ = v.Args[1] 16046 ptr := v.Args[0] 16047 mem := v.Args[1] 16048 v.reset(OpARM64MOVBstore) 16049 v.AuxInt = 12 16050 v.AddArg(ptr) 16051 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16052 v0.AuxInt = 0 16053 v.AddArg(v0) 16054 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 16055 v1.AuxInt = 8 16056 v1.AddArg(ptr) 16057 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16058 v2.AuxInt = 0 16059 v1.AddArg(v2) 16060 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 16061 v3.AddArg(ptr) 16062 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16063 v4.AuxInt = 0 16064 v3.AddArg(v4) 16065 v3.AddArg(mem) 16066 v1.AddArg(v3) 16067 v.AddArg(v1) 16068 return true 16069 } 16070 // match: (Zero [14] ptr mem) 16071 // cond: 16072 // result: (MOVHstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 16073 for { 16074 if v.AuxInt != 14 { 16075 break 16076 } 16077 _ = v.Args[1] 16078 ptr := v.Args[0] 16079 mem := v.Args[1] 16080 v.reset(OpARM64MOVHstore) 16081 v.AuxInt = 12 16082 v.AddArg(ptr) 16083 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16084 v0.AuxInt = 0 16085 v.AddArg(v0) 16086 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 16087 v1.AuxInt = 8 16088 v1.AddArg(ptr) 16089 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16090 v2.AuxInt = 0 16091 v1.AddArg(v2) 16092 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 16093 v3.AddArg(ptr) 16094 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16095 v4.AuxInt = 0 16096 v3.AddArg(v4) 16097 v3.AddArg(mem) 16098 v1.AddArg(v3) 16099 v.AddArg(v1) 16100 return true 16101 } 16102 // match: (Zero [15] ptr mem) 16103 // cond: 16104 // result: (MOVBstore [14] ptr (MOVDconst [0]) (MOVHstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)))) 16105 for { 16106 if v.AuxInt != 15 { 16107 break 16108 } 16109 _ = v.Args[1] 16110 ptr := v.Args[0] 16111 mem := v.Args[1] 16112 v.reset(OpARM64MOVBstore) 16113 v.AuxInt = 14 16114 v.AddArg(ptr) 16115 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16116 v0.AuxInt = 0 16117 v.AddArg(v0) 16118 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 16119 v1.AuxInt = 12 16120 v1.AddArg(ptr) 16121 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16122 v2.AuxInt = 0 16123 v1.AddArg(v2) 16124 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 16125 v3.AuxInt = 8 16126 v3.AddArg(ptr) 16127 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16128 v4.AuxInt = 0 16129 v3.AddArg(v4) 16130 v5 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 16131 v5.AddArg(ptr) 16132 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16133 v6.AuxInt = 0 16134 v5.AddArg(v6) 16135 v5.AddArg(mem) 16136 v3.AddArg(v5) 16137 v1.AddArg(v3) 16138 v.AddArg(v1) 16139 return true 16140 } 16141 // match: (Zero [16] ptr mem) 16142 // cond: 16143 // result: (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem) 16144 for { 16145 if v.AuxInt != 16 { 16146 break 16147 } 16148 _ = v.Args[1] 16149 ptr := v.Args[0] 16150 mem := v.Args[1] 16151 v.reset(OpARM64STP) 16152 v.AuxInt = 0 16153 v.AddArg(ptr) 16154 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16155 v0.AuxInt = 0 16156 v.AddArg(v0) 16157 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16158 v1.AuxInt = 0 16159 v.AddArg(v1) 16160 v.AddArg(mem) 16161 return true 16162 } 16163 // match: (Zero [32] ptr mem) 16164 // cond: 16165 // result: (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)) 16166 for { 16167 if v.AuxInt != 32 { 16168 break 16169 } 16170 _ = v.Args[1] 16171 ptr := v.Args[0] 16172 mem := v.Args[1] 16173 v.reset(OpARM64STP) 16174 v.AuxInt = 16 16175 v.AddArg(ptr) 16176 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16177 v0.AuxInt = 0 16178 v.AddArg(v0) 16179 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16180 v1.AuxInt = 0 16181 v.AddArg(v1) 16182 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 16183 v2.AuxInt = 0 16184 v2.AddArg(ptr) 16185 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16186 v3.AuxInt = 0 16187 v2.AddArg(v3) 16188 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16189 v4.AuxInt = 0 16190 v2.AddArg(v4) 16191 v2.AddArg(mem) 16192 v.AddArg(v2) 16193 return true 16194 } 16195 // match: (Zero [48] ptr mem) 16196 // cond: 16197 // result: (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem))) 16198 for { 16199 if v.AuxInt != 48 { 16200 break 16201 } 16202 _ = v.Args[1] 16203 ptr := v.Args[0] 16204 mem := v.Args[1] 16205 v.reset(OpARM64STP) 16206 v.AuxInt = 32 16207 v.AddArg(ptr) 16208 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16209 v0.AuxInt = 0 16210 v.AddArg(v0) 16211 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16212 v1.AuxInt = 0 16213 v.AddArg(v1) 16214 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 16215 v2.AuxInt = 16 16216 v2.AddArg(ptr) 16217 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16218 v3.AuxInt = 0 16219 v2.AddArg(v3) 16220 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16221 v4.AuxInt = 0 16222 v2.AddArg(v4) 16223 v5 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 16224 v5.AuxInt = 0 16225 v5.AddArg(ptr) 16226 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16227 v6.AuxInt = 0 16228 v5.AddArg(v6) 16229 v7 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16230 v7.AuxInt = 0 16231 v5.AddArg(v7) 16232 v5.AddArg(mem) 16233 v2.AddArg(v5) 16234 v.AddArg(v2) 16235 return true 16236 } 16237 // match: (Zero [64] ptr mem) 16238 // cond: 16239 // result: (STP [48] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)))) 16240 for { 16241 if v.AuxInt != 64 { 16242 break 16243 } 16244 _ = v.Args[1] 16245 ptr := v.Args[0] 16246 mem := v.Args[1] 16247 v.reset(OpARM64STP) 16248 v.AuxInt = 48 16249 v.AddArg(ptr) 16250 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16251 v0.AuxInt = 0 16252 v.AddArg(v0) 16253 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16254 v1.AuxInt = 0 16255 v.AddArg(v1) 16256 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 16257 v2.AuxInt = 32 16258 v2.AddArg(ptr) 16259 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16260 v3.AuxInt = 0 16261 v2.AddArg(v3) 16262 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16263 v4.AuxInt = 0 16264 v2.AddArg(v4) 16265 v5 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 16266 v5.AuxInt = 16 16267 v5.AddArg(ptr) 16268 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16269 v6.AuxInt = 0 16270 v5.AddArg(v6) 16271 v7 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16272 v7.AuxInt = 0 16273 v5.AddArg(v7) 16274 v8 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 16275 v8.AuxInt = 0 16276 v8.AddArg(ptr) 16277 v9 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16278 v9.AuxInt = 0 16279 v8.AddArg(v9) 16280 v10 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 16281 v10.AuxInt = 0 16282 v8.AddArg(v10) 16283 v8.AddArg(mem) 16284 v5.AddArg(v8) 16285 v2.AddArg(v5) 16286 v.AddArg(v2) 16287 return true 16288 } 16289 return false 16290 } 16291 func rewriteValueARM64_OpZero_20(v *Value) bool { 16292 b := v.Block 16293 _ = b 16294 config := b.Func.Config 16295 _ = config 16296 // match: (Zero [s] ptr mem) 16297 // cond: s%16 != 0 && s > 16 16298 // result: (Zero [s-s%16] (OffPtr <ptr.Type> ptr [s%16]) (Zero [s%16] ptr mem)) 16299 for { 16300 s := v.AuxInt 16301 _ = v.Args[1] 16302 ptr := v.Args[0] 16303 mem := v.Args[1] 16304 if !(s%16 != 0 && s > 16) { 16305 break 16306 } 16307 v.reset(OpZero) 16308 v.AuxInt = s - s%16 16309 v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type) 16310 v0.AuxInt = s % 16 16311 v0.AddArg(ptr) 16312 v.AddArg(v0) 16313 v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem) 16314 v1.AuxInt = s % 16 16315 v1.AddArg(ptr) 16316 v1.AddArg(mem) 16317 v.AddArg(v1) 16318 return true 16319 } 16320 // match: (Zero [s] ptr mem) 16321 // cond: s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice 16322 // result: (DUFFZERO [4 * (64 - int64(s/16))] ptr mem) 16323 for { 16324 s := v.AuxInt 16325 _ = v.Args[1] 16326 ptr := v.Args[0] 16327 mem := v.Args[1] 16328 if !(s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice) { 16329 break 16330 } 16331 v.reset(OpARM64DUFFZERO) 16332 v.AuxInt = 4 * (64 - int64(s/16)) 16333 v.AddArg(ptr) 16334 v.AddArg(mem) 16335 return true 16336 } 16337 // match: (Zero [s] ptr mem) 16338 // cond: s%16 == 0 && (s > 16*64 || config.noDuffDevice) 16339 // result: (LoweredZero ptr (ADDconst <ptr.Type> [s-16] ptr) mem) 16340 for { 16341 s := v.AuxInt 16342 _ = v.Args[1] 16343 ptr := v.Args[0] 16344 mem := v.Args[1] 16345 if !(s%16 == 0 && (s > 16*64 || config.noDuffDevice)) { 16346 break 16347 } 16348 v.reset(OpARM64LoweredZero) 16349 v.AddArg(ptr) 16350 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, ptr.Type) 16351 v0.AuxInt = s - 16 16352 v0.AddArg(ptr) 16353 v.AddArg(v0) 16354 v.AddArg(mem) 16355 return true 16356 } 16357 return false 16358 } 16359 func rewriteValueARM64_OpZeroExt16to32_0(v *Value) bool { 16360 // match: (ZeroExt16to32 x) 16361 // cond: 16362 // result: (MOVHUreg x) 16363 for { 16364 x := v.Args[0] 16365 v.reset(OpARM64MOVHUreg) 16366 v.AddArg(x) 16367 return true 16368 } 16369 } 16370 func rewriteValueARM64_OpZeroExt16to64_0(v *Value) bool { 16371 // match: (ZeroExt16to64 x) 16372 // cond: 16373 // result: (MOVHUreg x) 16374 for { 16375 x := v.Args[0] 16376 v.reset(OpARM64MOVHUreg) 16377 v.AddArg(x) 16378 return true 16379 } 16380 } 16381 func rewriteValueARM64_OpZeroExt32to64_0(v *Value) bool { 16382 // match: (ZeroExt32to64 x) 16383 // cond: 16384 // result: (MOVWUreg x) 16385 for { 16386 x := v.Args[0] 16387 v.reset(OpARM64MOVWUreg) 16388 v.AddArg(x) 16389 return true 16390 } 16391 } 16392 func rewriteValueARM64_OpZeroExt8to16_0(v *Value) bool { 16393 // match: (ZeroExt8to16 x) 16394 // cond: 16395 // result: (MOVBUreg x) 16396 for { 16397 x := v.Args[0] 16398 v.reset(OpARM64MOVBUreg) 16399 v.AddArg(x) 16400 return true 16401 } 16402 } 16403 func rewriteValueARM64_OpZeroExt8to32_0(v *Value) bool { 16404 // match: (ZeroExt8to32 x) 16405 // cond: 16406 // result: (MOVBUreg x) 16407 for { 16408 x := v.Args[0] 16409 v.reset(OpARM64MOVBUreg) 16410 v.AddArg(x) 16411 return true 16412 } 16413 } 16414 func rewriteValueARM64_OpZeroExt8to64_0(v *Value) bool { 16415 // match: (ZeroExt8to64 x) 16416 // cond: 16417 // result: (MOVBUreg x) 16418 for { 16419 x := v.Args[0] 16420 v.reset(OpARM64MOVBUreg) 16421 v.AddArg(x) 16422 return true 16423 } 16424 } 16425 func rewriteBlockARM64(b *Block) bool { 16426 config := b.Func.Config 16427 _ = config 16428 fe := b.Func.fe 16429 _ = fe 16430 typ := &config.Types 16431 _ = typ 16432 switch b.Kind { 16433 case BlockARM64EQ: 16434 // match: (EQ (CMPconst [0] x) yes no) 16435 // cond: 16436 // result: (Z x yes no) 16437 for { 16438 v := b.Control 16439 if v.Op != OpARM64CMPconst { 16440 break 16441 } 16442 if v.AuxInt != 0 { 16443 break 16444 } 16445 x := v.Args[0] 16446 b.Kind = BlockARM64Z 16447 b.SetControl(x) 16448 b.Aux = nil 16449 return true 16450 } 16451 // match: (EQ (CMPWconst [0] x) yes no) 16452 // cond: 16453 // result: (ZW x yes no) 16454 for { 16455 v := b.Control 16456 if v.Op != OpARM64CMPWconst { 16457 break 16458 } 16459 if v.AuxInt != 0 { 16460 break 16461 } 16462 x := v.Args[0] 16463 b.Kind = BlockARM64ZW 16464 b.SetControl(x) 16465 b.Aux = nil 16466 return true 16467 } 16468 // match: (EQ (FlagEQ) yes no) 16469 // cond: 16470 // result: (First nil yes no) 16471 for { 16472 v := b.Control 16473 if v.Op != OpARM64FlagEQ { 16474 break 16475 } 16476 b.Kind = BlockFirst 16477 b.SetControl(nil) 16478 b.Aux = nil 16479 return true 16480 } 16481 // match: (EQ (FlagLT_ULT) yes no) 16482 // cond: 16483 // result: (First nil no yes) 16484 for { 16485 v := b.Control 16486 if v.Op != OpARM64FlagLT_ULT { 16487 break 16488 } 16489 b.Kind = BlockFirst 16490 b.SetControl(nil) 16491 b.Aux = nil 16492 b.swapSuccessors() 16493 return true 16494 } 16495 // match: (EQ (FlagLT_UGT) yes no) 16496 // cond: 16497 // result: (First nil no yes) 16498 for { 16499 v := b.Control 16500 if v.Op != OpARM64FlagLT_UGT { 16501 break 16502 } 16503 b.Kind = BlockFirst 16504 b.SetControl(nil) 16505 b.Aux = nil 16506 b.swapSuccessors() 16507 return true 16508 } 16509 // match: (EQ (FlagGT_ULT) yes no) 16510 // cond: 16511 // result: (First nil no yes) 16512 for { 16513 v := b.Control 16514 if v.Op != OpARM64FlagGT_ULT { 16515 break 16516 } 16517 b.Kind = BlockFirst 16518 b.SetControl(nil) 16519 b.Aux = nil 16520 b.swapSuccessors() 16521 return true 16522 } 16523 // match: (EQ (FlagGT_UGT) yes no) 16524 // cond: 16525 // result: (First nil no yes) 16526 for { 16527 v := b.Control 16528 if v.Op != OpARM64FlagGT_UGT { 16529 break 16530 } 16531 b.Kind = BlockFirst 16532 b.SetControl(nil) 16533 b.Aux = nil 16534 b.swapSuccessors() 16535 return true 16536 } 16537 // match: (EQ (InvertFlags cmp) yes no) 16538 // cond: 16539 // result: (EQ cmp yes no) 16540 for { 16541 v := b.Control 16542 if v.Op != OpARM64InvertFlags { 16543 break 16544 } 16545 cmp := v.Args[0] 16546 b.Kind = BlockARM64EQ 16547 b.SetControl(cmp) 16548 b.Aux = nil 16549 return true 16550 } 16551 case BlockARM64GE: 16552 // match: (GE (FlagEQ) yes no) 16553 // cond: 16554 // result: (First nil yes no) 16555 for { 16556 v := b.Control 16557 if v.Op != OpARM64FlagEQ { 16558 break 16559 } 16560 b.Kind = BlockFirst 16561 b.SetControl(nil) 16562 b.Aux = nil 16563 return true 16564 } 16565 // match: (GE (FlagLT_ULT) yes no) 16566 // cond: 16567 // result: (First nil no yes) 16568 for { 16569 v := b.Control 16570 if v.Op != OpARM64FlagLT_ULT { 16571 break 16572 } 16573 b.Kind = BlockFirst 16574 b.SetControl(nil) 16575 b.Aux = nil 16576 b.swapSuccessors() 16577 return true 16578 } 16579 // match: (GE (FlagLT_UGT) yes no) 16580 // cond: 16581 // result: (First nil no yes) 16582 for { 16583 v := b.Control 16584 if v.Op != OpARM64FlagLT_UGT { 16585 break 16586 } 16587 b.Kind = BlockFirst 16588 b.SetControl(nil) 16589 b.Aux = nil 16590 b.swapSuccessors() 16591 return true 16592 } 16593 // match: (GE (FlagGT_ULT) yes no) 16594 // cond: 16595 // result: (First nil yes no) 16596 for { 16597 v := b.Control 16598 if v.Op != OpARM64FlagGT_ULT { 16599 break 16600 } 16601 b.Kind = BlockFirst 16602 b.SetControl(nil) 16603 b.Aux = nil 16604 return true 16605 } 16606 // match: (GE (FlagGT_UGT) yes no) 16607 // cond: 16608 // result: (First nil yes no) 16609 for { 16610 v := b.Control 16611 if v.Op != OpARM64FlagGT_UGT { 16612 break 16613 } 16614 b.Kind = BlockFirst 16615 b.SetControl(nil) 16616 b.Aux = nil 16617 return true 16618 } 16619 // match: (GE (InvertFlags cmp) yes no) 16620 // cond: 16621 // result: (LE cmp yes no) 16622 for { 16623 v := b.Control 16624 if v.Op != OpARM64InvertFlags { 16625 break 16626 } 16627 cmp := v.Args[0] 16628 b.Kind = BlockARM64LE 16629 b.SetControl(cmp) 16630 b.Aux = nil 16631 return true 16632 } 16633 case BlockARM64GT: 16634 // match: (GT (FlagEQ) yes no) 16635 // cond: 16636 // result: (First nil no yes) 16637 for { 16638 v := b.Control 16639 if v.Op != OpARM64FlagEQ { 16640 break 16641 } 16642 b.Kind = BlockFirst 16643 b.SetControl(nil) 16644 b.Aux = nil 16645 b.swapSuccessors() 16646 return true 16647 } 16648 // match: (GT (FlagLT_ULT) yes no) 16649 // cond: 16650 // result: (First nil no yes) 16651 for { 16652 v := b.Control 16653 if v.Op != OpARM64FlagLT_ULT { 16654 break 16655 } 16656 b.Kind = BlockFirst 16657 b.SetControl(nil) 16658 b.Aux = nil 16659 b.swapSuccessors() 16660 return true 16661 } 16662 // match: (GT (FlagLT_UGT) yes no) 16663 // cond: 16664 // result: (First nil no yes) 16665 for { 16666 v := b.Control 16667 if v.Op != OpARM64FlagLT_UGT { 16668 break 16669 } 16670 b.Kind = BlockFirst 16671 b.SetControl(nil) 16672 b.Aux = nil 16673 b.swapSuccessors() 16674 return true 16675 } 16676 // match: (GT (FlagGT_ULT) yes no) 16677 // cond: 16678 // result: (First nil yes no) 16679 for { 16680 v := b.Control 16681 if v.Op != OpARM64FlagGT_ULT { 16682 break 16683 } 16684 b.Kind = BlockFirst 16685 b.SetControl(nil) 16686 b.Aux = nil 16687 return true 16688 } 16689 // match: (GT (FlagGT_UGT) yes no) 16690 // cond: 16691 // result: (First nil yes no) 16692 for { 16693 v := b.Control 16694 if v.Op != OpARM64FlagGT_UGT { 16695 break 16696 } 16697 b.Kind = BlockFirst 16698 b.SetControl(nil) 16699 b.Aux = nil 16700 return true 16701 } 16702 // match: (GT (InvertFlags cmp) yes no) 16703 // cond: 16704 // result: (LT cmp yes no) 16705 for { 16706 v := b.Control 16707 if v.Op != OpARM64InvertFlags { 16708 break 16709 } 16710 cmp := v.Args[0] 16711 b.Kind = BlockARM64LT 16712 b.SetControl(cmp) 16713 b.Aux = nil 16714 return true 16715 } 16716 case BlockIf: 16717 // match: (If (Equal cc) yes no) 16718 // cond: 16719 // result: (EQ cc yes no) 16720 for { 16721 v := b.Control 16722 if v.Op != OpARM64Equal { 16723 break 16724 } 16725 cc := v.Args[0] 16726 b.Kind = BlockARM64EQ 16727 b.SetControl(cc) 16728 b.Aux = nil 16729 return true 16730 } 16731 // match: (If (NotEqual cc) yes no) 16732 // cond: 16733 // result: (NE cc yes no) 16734 for { 16735 v := b.Control 16736 if v.Op != OpARM64NotEqual { 16737 break 16738 } 16739 cc := v.Args[0] 16740 b.Kind = BlockARM64NE 16741 b.SetControl(cc) 16742 b.Aux = nil 16743 return true 16744 } 16745 // match: (If (LessThan cc) yes no) 16746 // cond: 16747 // result: (LT cc yes no) 16748 for { 16749 v := b.Control 16750 if v.Op != OpARM64LessThan { 16751 break 16752 } 16753 cc := v.Args[0] 16754 b.Kind = BlockARM64LT 16755 b.SetControl(cc) 16756 b.Aux = nil 16757 return true 16758 } 16759 // match: (If (LessThanU cc) yes no) 16760 // cond: 16761 // result: (ULT cc yes no) 16762 for { 16763 v := b.Control 16764 if v.Op != OpARM64LessThanU { 16765 break 16766 } 16767 cc := v.Args[0] 16768 b.Kind = BlockARM64ULT 16769 b.SetControl(cc) 16770 b.Aux = nil 16771 return true 16772 } 16773 // match: (If (LessEqual cc) yes no) 16774 // cond: 16775 // result: (LE cc yes no) 16776 for { 16777 v := b.Control 16778 if v.Op != OpARM64LessEqual { 16779 break 16780 } 16781 cc := v.Args[0] 16782 b.Kind = BlockARM64LE 16783 b.SetControl(cc) 16784 b.Aux = nil 16785 return true 16786 } 16787 // match: (If (LessEqualU cc) yes no) 16788 // cond: 16789 // result: (ULE cc yes no) 16790 for { 16791 v := b.Control 16792 if v.Op != OpARM64LessEqualU { 16793 break 16794 } 16795 cc := v.Args[0] 16796 b.Kind = BlockARM64ULE 16797 b.SetControl(cc) 16798 b.Aux = nil 16799 return true 16800 } 16801 // match: (If (GreaterThan cc) yes no) 16802 // cond: 16803 // result: (GT cc yes no) 16804 for { 16805 v := b.Control 16806 if v.Op != OpARM64GreaterThan { 16807 break 16808 } 16809 cc := v.Args[0] 16810 b.Kind = BlockARM64GT 16811 b.SetControl(cc) 16812 b.Aux = nil 16813 return true 16814 } 16815 // match: (If (GreaterThanU cc) yes no) 16816 // cond: 16817 // result: (UGT cc yes no) 16818 for { 16819 v := b.Control 16820 if v.Op != OpARM64GreaterThanU { 16821 break 16822 } 16823 cc := v.Args[0] 16824 b.Kind = BlockARM64UGT 16825 b.SetControl(cc) 16826 b.Aux = nil 16827 return true 16828 } 16829 // match: (If (GreaterEqual cc) yes no) 16830 // cond: 16831 // result: (GE cc yes no) 16832 for { 16833 v := b.Control 16834 if v.Op != OpARM64GreaterEqual { 16835 break 16836 } 16837 cc := v.Args[0] 16838 b.Kind = BlockARM64GE 16839 b.SetControl(cc) 16840 b.Aux = nil 16841 return true 16842 } 16843 // match: (If (GreaterEqualU cc) yes no) 16844 // cond: 16845 // result: (UGE cc yes no) 16846 for { 16847 v := b.Control 16848 if v.Op != OpARM64GreaterEqualU { 16849 break 16850 } 16851 cc := v.Args[0] 16852 b.Kind = BlockARM64UGE 16853 b.SetControl(cc) 16854 b.Aux = nil 16855 return true 16856 } 16857 // match: (If cond yes no) 16858 // cond: 16859 // result: (NZ cond yes no) 16860 for { 16861 v := b.Control 16862 _ = v 16863 cond := b.Control 16864 b.Kind = BlockARM64NZ 16865 b.SetControl(cond) 16866 b.Aux = nil 16867 return true 16868 } 16869 case BlockARM64LE: 16870 // match: (LE (FlagEQ) yes no) 16871 // cond: 16872 // result: (First nil yes no) 16873 for { 16874 v := b.Control 16875 if v.Op != OpARM64FlagEQ { 16876 break 16877 } 16878 b.Kind = BlockFirst 16879 b.SetControl(nil) 16880 b.Aux = nil 16881 return true 16882 } 16883 // match: (LE (FlagLT_ULT) yes no) 16884 // cond: 16885 // result: (First nil yes no) 16886 for { 16887 v := b.Control 16888 if v.Op != OpARM64FlagLT_ULT { 16889 break 16890 } 16891 b.Kind = BlockFirst 16892 b.SetControl(nil) 16893 b.Aux = nil 16894 return true 16895 } 16896 // match: (LE (FlagLT_UGT) yes no) 16897 // cond: 16898 // result: (First nil yes no) 16899 for { 16900 v := b.Control 16901 if v.Op != OpARM64FlagLT_UGT { 16902 break 16903 } 16904 b.Kind = BlockFirst 16905 b.SetControl(nil) 16906 b.Aux = nil 16907 return true 16908 } 16909 // match: (LE (FlagGT_ULT) yes no) 16910 // cond: 16911 // result: (First nil no yes) 16912 for { 16913 v := b.Control 16914 if v.Op != OpARM64FlagGT_ULT { 16915 break 16916 } 16917 b.Kind = BlockFirst 16918 b.SetControl(nil) 16919 b.Aux = nil 16920 b.swapSuccessors() 16921 return true 16922 } 16923 // match: (LE (FlagGT_UGT) yes no) 16924 // cond: 16925 // result: (First nil no yes) 16926 for { 16927 v := b.Control 16928 if v.Op != OpARM64FlagGT_UGT { 16929 break 16930 } 16931 b.Kind = BlockFirst 16932 b.SetControl(nil) 16933 b.Aux = nil 16934 b.swapSuccessors() 16935 return true 16936 } 16937 // match: (LE (InvertFlags cmp) yes no) 16938 // cond: 16939 // result: (GE cmp yes no) 16940 for { 16941 v := b.Control 16942 if v.Op != OpARM64InvertFlags { 16943 break 16944 } 16945 cmp := v.Args[0] 16946 b.Kind = BlockARM64GE 16947 b.SetControl(cmp) 16948 b.Aux = nil 16949 return true 16950 } 16951 case BlockARM64LT: 16952 // match: (LT (FlagEQ) yes no) 16953 // cond: 16954 // result: (First nil no yes) 16955 for { 16956 v := b.Control 16957 if v.Op != OpARM64FlagEQ { 16958 break 16959 } 16960 b.Kind = BlockFirst 16961 b.SetControl(nil) 16962 b.Aux = nil 16963 b.swapSuccessors() 16964 return true 16965 } 16966 // match: (LT (FlagLT_ULT) yes no) 16967 // cond: 16968 // result: (First nil yes no) 16969 for { 16970 v := b.Control 16971 if v.Op != OpARM64FlagLT_ULT { 16972 break 16973 } 16974 b.Kind = BlockFirst 16975 b.SetControl(nil) 16976 b.Aux = nil 16977 return true 16978 } 16979 // match: (LT (FlagLT_UGT) yes no) 16980 // cond: 16981 // result: (First nil yes no) 16982 for { 16983 v := b.Control 16984 if v.Op != OpARM64FlagLT_UGT { 16985 break 16986 } 16987 b.Kind = BlockFirst 16988 b.SetControl(nil) 16989 b.Aux = nil 16990 return true 16991 } 16992 // match: (LT (FlagGT_ULT) yes no) 16993 // cond: 16994 // result: (First nil no yes) 16995 for { 16996 v := b.Control 16997 if v.Op != OpARM64FlagGT_ULT { 16998 break 16999 } 17000 b.Kind = BlockFirst 17001 b.SetControl(nil) 17002 b.Aux = nil 17003 b.swapSuccessors() 17004 return true 17005 } 17006 // match: (LT (FlagGT_UGT) yes no) 17007 // cond: 17008 // result: (First nil no yes) 17009 for { 17010 v := b.Control 17011 if v.Op != OpARM64FlagGT_UGT { 17012 break 17013 } 17014 b.Kind = BlockFirst 17015 b.SetControl(nil) 17016 b.Aux = nil 17017 b.swapSuccessors() 17018 return true 17019 } 17020 // match: (LT (InvertFlags cmp) yes no) 17021 // cond: 17022 // result: (GT cmp yes no) 17023 for { 17024 v := b.Control 17025 if v.Op != OpARM64InvertFlags { 17026 break 17027 } 17028 cmp := v.Args[0] 17029 b.Kind = BlockARM64GT 17030 b.SetControl(cmp) 17031 b.Aux = nil 17032 return true 17033 } 17034 case BlockARM64NE: 17035 // match: (NE (CMPconst [0] x) yes no) 17036 // cond: 17037 // result: (NZ x yes no) 17038 for { 17039 v := b.Control 17040 if v.Op != OpARM64CMPconst { 17041 break 17042 } 17043 if v.AuxInt != 0 { 17044 break 17045 } 17046 x := v.Args[0] 17047 b.Kind = BlockARM64NZ 17048 b.SetControl(x) 17049 b.Aux = nil 17050 return true 17051 } 17052 // match: (NE (CMPWconst [0] x) yes no) 17053 // cond: 17054 // result: (NZW x yes no) 17055 for { 17056 v := b.Control 17057 if v.Op != OpARM64CMPWconst { 17058 break 17059 } 17060 if v.AuxInt != 0 { 17061 break 17062 } 17063 x := v.Args[0] 17064 b.Kind = BlockARM64NZW 17065 b.SetControl(x) 17066 b.Aux = nil 17067 return true 17068 } 17069 // match: (NE (FlagEQ) yes no) 17070 // cond: 17071 // result: (First nil no yes) 17072 for { 17073 v := b.Control 17074 if v.Op != OpARM64FlagEQ { 17075 break 17076 } 17077 b.Kind = BlockFirst 17078 b.SetControl(nil) 17079 b.Aux = nil 17080 b.swapSuccessors() 17081 return true 17082 } 17083 // match: (NE (FlagLT_ULT) yes no) 17084 // cond: 17085 // result: (First nil yes no) 17086 for { 17087 v := b.Control 17088 if v.Op != OpARM64FlagLT_ULT { 17089 break 17090 } 17091 b.Kind = BlockFirst 17092 b.SetControl(nil) 17093 b.Aux = nil 17094 return true 17095 } 17096 // match: (NE (FlagLT_UGT) yes no) 17097 // cond: 17098 // result: (First nil yes no) 17099 for { 17100 v := b.Control 17101 if v.Op != OpARM64FlagLT_UGT { 17102 break 17103 } 17104 b.Kind = BlockFirst 17105 b.SetControl(nil) 17106 b.Aux = nil 17107 return true 17108 } 17109 // match: (NE (FlagGT_ULT) yes no) 17110 // cond: 17111 // result: (First nil yes no) 17112 for { 17113 v := b.Control 17114 if v.Op != OpARM64FlagGT_ULT { 17115 break 17116 } 17117 b.Kind = BlockFirst 17118 b.SetControl(nil) 17119 b.Aux = nil 17120 return true 17121 } 17122 // match: (NE (FlagGT_UGT) yes no) 17123 // cond: 17124 // result: (First nil yes no) 17125 for { 17126 v := b.Control 17127 if v.Op != OpARM64FlagGT_UGT { 17128 break 17129 } 17130 b.Kind = BlockFirst 17131 b.SetControl(nil) 17132 b.Aux = nil 17133 return true 17134 } 17135 // match: (NE (InvertFlags cmp) yes no) 17136 // cond: 17137 // result: (NE cmp yes no) 17138 for { 17139 v := b.Control 17140 if v.Op != OpARM64InvertFlags { 17141 break 17142 } 17143 cmp := v.Args[0] 17144 b.Kind = BlockARM64NE 17145 b.SetControl(cmp) 17146 b.Aux = nil 17147 return true 17148 } 17149 case BlockARM64NZ: 17150 // match: (NZ (Equal cc) yes no) 17151 // cond: 17152 // result: (EQ cc yes no) 17153 for { 17154 v := b.Control 17155 if v.Op != OpARM64Equal { 17156 break 17157 } 17158 cc := v.Args[0] 17159 b.Kind = BlockARM64EQ 17160 b.SetControl(cc) 17161 b.Aux = nil 17162 return true 17163 } 17164 // match: (NZ (NotEqual cc) yes no) 17165 // cond: 17166 // result: (NE cc yes no) 17167 for { 17168 v := b.Control 17169 if v.Op != OpARM64NotEqual { 17170 break 17171 } 17172 cc := v.Args[0] 17173 b.Kind = BlockARM64NE 17174 b.SetControl(cc) 17175 b.Aux = nil 17176 return true 17177 } 17178 // match: (NZ (LessThan cc) yes no) 17179 // cond: 17180 // result: (LT cc yes no) 17181 for { 17182 v := b.Control 17183 if v.Op != OpARM64LessThan { 17184 break 17185 } 17186 cc := v.Args[0] 17187 b.Kind = BlockARM64LT 17188 b.SetControl(cc) 17189 b.Aux = nil 17190 return true 17191 } 17192 // match: (NZ (LessThanU cc) yes no) 17193 // cond: 17194 // result: (ULT cc yes no) 17195 for { 17196 v := b.Control 17197 if v.Op != OpARM64LessThanU { 17198 break 17199 } 17200 cc := v.Args[0] 17201 b.Kind = BlockARM64ULT 17202 b.SetControl(cc) 17203 b.Aux = nil 17204 return true 17205 } 17206 // match: (NZ (LessEqual cc) yes no) 17207 // cond: 17208 // result: (LE cc yes no) 17209 for { 17210 v := b.Control 17211 if v.Op != OpARM64LessEqual { 17212 break 17213 } 17214 cc := v.Args[0] 17215 b.Kind = BlockARM64LE 17216 b.SetControl(cc) 17217 b.Aux = nil 17218 return true 17219 } 17220 // match: (NZ (LessEqualU cc) yes no) 17221 // cond: 17222 // result: (ULE cc yes no) 17223 for { 17224 v := b.Control 17225 if v.Op != OpARM64LessEqualU { 17226 break 17227 } 17228 cc := v.Args[0] 17229 b.Kind = BlockARM64ULE 17230 b.SetControl(cc) 17231 b.Aux = nil 17232 return true 17233 } 17234 // match: (NZ (GreaterThan cc) yes no) 17235 // cond: 17236 // result: (GT cc yes no) 17237 for { 17238 v := b.Control 17239 if v.Op != OpARM64GreaterThan { 17240 break 17241 } 17242 cc := v.Args[0] 17243 b.Kind = BlockARM64GT 17244 b.SetControl(cc) 17245 b.Aux = nil 17246 return true 17247 } 17248 // match: (NZ (GreaterThanU cc) yes no) 17249 // cond: 17250 // result: (UGT cc yes no) 17251 for { 17252 v := b.Control 17253 if v.Op != OpARM64GreaterThanU { 17254 break 17255 } 17256 cc := v.Args[0] 17257 b.Kind = BlockARM64UGT 17258 b.SetControl(cc) 17259 b.Aux = nil 17260 return true 17261 } 17262 // match: (NZ (GreaterEqual cc) yes no) 17263 // cond: 17264 // result: (GE cc yes no) 17265 for { 17266 v := b.Control 17267 if v.Op != OpARM64GreaterEqual { 17268 break 17269 } 17270 cc := v.Args[0] 17271 b.Kind = BlockARM64GE 17272 b.SetControl(cc) 17273 b.Aux = nil 17274 return true 17275 } 17276 // match: (NZ (GreaterEqualU cc) yes no) 17277 // cond: 17278 // result: (UGE cc yes no) 17279 for { 17280 v := b.Control 17281 if v.Op != OpARM64GreaterEqualU { 17282 break 17283 } 17284 cc := v.Args[0] 17285 b.Kind = BlockARM64UGE 17286 b.SetControl(cc) 17287 b.Aux = nil 17288 return true 17289 } 17290 // match: (NZ (ANDconst [c] x) yes no) 17291 // cond: oneBit(c) 17292 // result: (TBNZ {ntz(c)} x yes no) 17293 for { 17294 v := b.Control 17295 if v.Op != OpARM64ANDconst { 17296 break 17297 } 17298 c := v.AuxInt 17299 x := v.Args[0] 17300 if !(oneBit(c)) { 17301 break 17302 } 17303 b.Kind = BlockARM64TBNZ 17304 b.SetControl(x) 17305 b.Aux = ntz(c) 17306 return true 17307 } 17308 // match: (NZ (MOVDconst [0]) yes no) 17309 // cond: 17310 // result: (First nil no yes) 17311 for { 17312 v := b.Control 17313 if v.Op != OpARM64MOVDconst { 17314 break 17315 } 17316 if v.AuxInt != 0 { 17317 break 17318 } 17319 b.Kind = BlockFirst 17320 b.SetControl(nil) 17321 b.Aux = nil 17322 b.swapSuccessors() 17323 return true 17324 } 17325 // match: (NZ (MOVDconst [c]) yes no) 17326 // cond: c != 0 17327 // result: (First nil yes no) 17328 for { 17329 v := b.Control 17330 if v.Op != OpARM64MOVDconst { 17331 break 17332 } 17333 c := v.AuxInt 17334 if !(c != 0) { 17335 break 17336 } 17337 b.Kind = BlockFirst 17338 b.SetControl(nil) 17339 b.Aux = nil 17340 return true 17341 } 17342 case BlockARM64NZW: 17343 // match: (NZW (ANDconst [c] x) yes no) 17344 // cond: oneBit(int64(uint32(c))) 17345 // result: (TBNZ {ntz(int64(uint32(c)))} x yes no) 17346 for { 17347 v := b.Control 17348 if v.Op != OpARM64ANDconst { 17349 break 17350 } 17351 c := v.AuxInt 17352 x := v.Args[0] 17353 if !(oneBit(int64(uint32(c)))) { 17354 break 17355 } 17356 b.Kind = BlockARM64TBNZ 17357 b.SetControl(x) 17358 b.Aux = ntz(int64(uint32(c))) 17359 return true 17360 } 17361 // match: (NZW (MOVDconst [c]) yes no) 17362 // cond: int32(c) == 0 17363 // result: (First nil no yes) 17364 for { 17365 v := b.Control 17366 if v.Op != OpARM64MOVDconst { 17367 break 17368 } 17369 c := v.AuxInt 17370 if !(int32(c) == 0) { 17371 break 17372 } 17373 b.Kind = BlockFirst 17374 b.SetControl(nil) 17375 b.Aux = nil 17376 b.swapSuccessors() 17377 return true 17378 } 17379 // match: (NZW (MOVDconst [c]) yes no) 17380 // cond: int32(c) != 0 17381 // result: (First nil yes no) 17382 for { 17383 v := b.Control 17384 if v.Op != OpARM64MOVDconst { 17385 break 17386 } 17387 c := v.AuxInt 17388 if !(int32(c) != 0) { 17389 break 17390 } 17391 b.Kind = BlockFirst 17392 b.SetControl(nil) 17393 b.Aux = nil 17394 return true 17395 } 17396 case BlockARM64UGE: 17397 // match: (UGE (FlagEQ) yes no) 17398 // cond: 17399 // result: (First nil yes no) 17400 for { 17401 v := b.Control 17402 if v.Op != OpARM64FlagEQ { 17403 break 17404 } 17405 b.Kind = BlockFirst 17406 b.SetControl(nil) 17407 b.Aux = nil 17408 return true 17409 } 17410 // match: (UGE (FlagLT_ULT) yes no) 17411 // cond: 17412 // result: (First nil no yes) 17413 for { 17414 v := b.Control 17415 if v.Op != OpARM64FlagLT_ULT { 17416 break 17417 } 17418 b.Kind = BlockFirst 17419 b.SetControl(nil) 17420 b.Aux = nil 17421 b.swapSuccessors() 17422 return true 17423 } 17424 // match: (UGE (FlagLT_UGT) yes no) 17425 // cond: 17426 // result: (First nil yes no) 17427 for { 17428 v := b.Control 17429 if v.Op != OpARM64FlagLT_UGT { 17430 break 17431 } 17432 b.Kind = BlockFirst 17433 b.SetControl(nil) 17434 b.Aux = nil 17435 return true 17436 } 17437 // match: (UGE (FlagGT_ULT) yes no) 17438 // cond: 17439 // result: (First nil no yes) 17440 for { 17441 v := b.Control 17442 if v.Op != OpARM64FlagGT_ULT { 17443 break 17444 } 17445 b.Kind = BlockFirst 17446 b.SetControl(nil) 17447 b.Aux = nil 17448 b.swapSuccessors() 17449 return true 17450 } 17451 // match: (UGE (FlagGT_UGT) yes no) 17452 // cond: 17453 // result: (First nil yes no) 17454 for { 17455 v := b.Control 17456 if v.Op != OpARM64FlagGT_UGT { 17457 break 17458 } 17459 b.Kind = BlockFirst 17460 b.SetControl(nil) 17461 b.Aux = nil 17462 return true 17463 } 17464 // match: (UGE (InvertFlags cmp) yes no) 17465 // cond: 17466 // result: (ULE cmp yes no) 17467 for { 17468 v := b.Control 17469 if v.Op != OpARM64InvertFlags { 17470 break 17471 } 17472 cmp := v.Args[0] 17473 b.Kind = BlockARM64ULE 17474 b.SetControl(cmp) 17475 b.Aux = nil 17476 return true 17477 } 17478 case BlockARM64UGT: 17479 // match: (UGT (FlagEQ) yes no) 17480 // cond: 17481 // result: (First nil no yes) 17482 for { 17483 v := b.Control 17484 if v.Op != OpARM64FlagEQ { 17485 break 17486 } 17487 b.Kind = BlockFirst 17488 b.SetControl(nil) 17489 b.Aux = nil 17490 b.swapSuccessors() 17491 return true 17492 } 17493 // match: (UGT (FlagLT_ULT) yes no) 17494 // cond: 17495 // result: (First nil no yes) 17496 for { 17497 v := b.Control 17498 if v.Op != OpARM64FlagLT_ULT { 17499 break 17500 } 17501 b.Kind = BlockFirst 17502 b.SetControl(nil) 17503 b.Aux = nil 17504 b.swapSuccessors() 17505 return true 17506 } 17507 // match: (UGT (FlagLT_UGT) yes no) 17508 // cond: 17509 // result: (First nil yes no) 17510 for { 17511 v := b.Control 17512 if v.Op != OpARM64FlagLT_UGT { 17513 break 17514 } 17515 b.Kind = BlockFirst 17516 b.SetControl(nil) 17517 b.Aux = nil 17518 return true 17519 } 17520 // match: (UGT (FlagGT_ULT) yes no) 17521 // cond: 17522 // result: (First nil no yes) 17523 for { 17524 v := b.Control 17525 if v.Op != OpARM64FlagGT_ULT { 17526 break 17527 } 17528 b.Kind = BlockFirst 17529 b.SetControl(nil) 17530 b.Aux = nil 17531 b.swapSuccessors() 17532 return true 17533 } 17534 // match: (UGT (FlagGT_UGT) yes no) 17535 // cond: 17536 // result: (First nil yes no) 17537 for { 17538 v := b.Control 17539 if v.Op != OpARM64FlagGT_UGT { 17540 break 17541 } 17542 b.Kind = BlockFirst 17543 b.SetControl(nil) 17544 b.Aux = nil 17545 return true 17546 } 17547 // match: (UGT (InvertFlags cmp) yes no) 17548 // cond: 17549 // result: (ULT cmp yes no) 17550 for { 17551 v := b.Control 17552 if v.Op != OpARM64InvertFlags { 17553 break 17554 } 17555 cmp := v.Args[0] 17556 b.Kind = BlockARM64ULT 17557 b.SetControl(cmp) 17558 b.Aux = nil 17559 return true 17560 } 17561 case BlockARM64ULE: 17562 // match: (ULE (FlagEQ) yes no) 17563 // cond: 17564 // result: (First nil yes no) 17565 for { 17566 v := b.Control 17567 if v.Op != OpARM64FlagEQ { 17568 break 17569 } 17570 b.Kind = BlockFirst 17571 b.SetControl(nil) 17572 b.Aux = nil 17573 return true 17574 } 17575 // match: (ULE (FlagLT_ULT) yes no) 17576 // cond: 17577 // result: (First nil yes no) 17578 for { 17579 v := b.Control 17580 if v.Op != OpARM64FlagLT_ULT { 17581 break 17582 } 17583 b.Kind = BlockFirst 17584 b.SetControl(nil) 17585 b.Aux = nil 17586 return true 17587 } 17588 // match: (ULE (FlagLT_UGT) yes no) 17589 // cond: 17590 // result: (First nil no yes) 17591 for { 17592 v := b.Control 17593 if v.Op != OpARM64FlagLT_UGT { 17594 break 17595 } 17596 b.Kind = BlockFirst 17597 b.SetControl(nil) 17598 b.Aux = nil 17599 b.swapSuccessors() 17600 return true 17601 } 17602 // match: (ULE (FlagGT_ULT) yes no) 17603 // cond: 17604 // result: (First nil yes no) 17605 for { 17606 v := b.Control 17607 if v.Op != OpARM64FlagGT_ULT { 17608 break 17609 } 17610 b.Kind = BlockFirst 17611 b.SetControl(nil) 17612 b.Aux = nil 17613 return true 17614 } 17615 // match: (ULE (FlagGT_UGT) yes no) 17616 // cond: 17617 // result: (First nil no yes) 17618 for { 17619 v := b.Control 17620 if v.Op != OpARM64FlagGT_UGT { 17621 break 17622 } 17623 b.Kind = BlockFirst 17624 b.SetControl(nil) 17625 b.Aux = nil 17626 b.swapSuccessors() 17627 return true 17628 } 17629 // match: (ULE (InvertFlags cmp) yes no) 17630 // cond: 17631 // result: (UGE cmp yes no) 17632 for { 17633 v := b.Control 17634 if v.Op != OpARM64InvertFlags { 17635 break 17636 } 17637 cmp := v.Args[0] 17638 b.Kind = BlockARM64UGE 17639 b.SetControl(cmp) 17640 b.Aux = nil 17641 return true 17642 } 17643 case BlockARM64ULT: 17644 // match: (ULT (FlagEQ) yes no) 17645 // cond: 17646 // result: (First nil no yes) 17647 for { 17648 v := b.Control 17649 if v.Op != OpARM64FlagEQ { 17650 break 17651 } 17652 b.Kind = BlockFirst 17653 b.SetControl(nil) 17654 b.Aux = nil 17655 b.swapSuccessors() 17656 return true 17657 } 17658 // match: (ULT (FlagLT_ULT) yes no) 17659 // cond: 17660 // result: (First nil yes no) 17661 for { 17662 v := b.Control 17663 if v.Op != OpARM64FlagLT_ULT { 17664 break 17665 } 17666 b.Kind = BlockFirst 17667 b.SetControl(nil) 17668 b.Aux = nil 17669 return true 17670 } 17671 // match: (ULT (FlagLT_UGT) yes no) 17672 // cond: 17673 // result: (First nil no yes) 17674 for { 17675 v := b.Control 17676 if v.Op != OpARM64FlagLT_UGT { 17677 break 17678 } 17679 b.Kind = BlockFirst 17680 b.SetControl(nil) 17681 b.Aux = nil 17682 b.swapSuccessors() 17683 return true 17684 } 17685 // match: (ULT (FlagGT_ULT) yes no) 17686 // cond: 17687 // result: (First nil yes no) 17688 for { 17689 v := b.Control 17690 if v.Op != OpARM64FlagGT_ULT { 17691 break 17692 } 17693 b.Kind = BlockFirst 17694 b.SetControl(nil) 17695 b.Aux = nil 17696 return true 17697 } 17698 // match: (ULT (FlagGT_UGT) yes no) 17699 // cond: 17700 // result: (First nil no yes) 17701 for { 17702 v := b.Control 17703 if v.Op != OpARM64FlagGT_UGT { 17704 break 17705 } 17706 b.Kind = BlockFirst 17707 b.SetControl(nil) 17708 b.Aux = nil 17709 b.swapSuccessors() 17710 return true 17711 } 17712 // match: (ULT (InvertFlags cmp) yes no) 17713 // cond: 17714 // result: (UGT cmp yes no) 17715 for { 17716 v := b.Control 17717 if v.Op != OpARM64InvertFlags { 17718 break 17719 } 17720 cmp := v.Args[0] 17721 b.Kind = BlockARM64UGT 17722 b.SetControl(cmp) 17723 b.Aux = nil 17724 return true 17725 } 17726 case BlockARM64Z: 17727 // match: (Z (ANDconst [c] x) yes no) 17728 // cond: oneBit(c) 17729 // result: (TBZ {ntz(c)} x yes no) 17730 for { 17731 v := b.Control 17732 if v.Op != OpARM64ANDconst { 17733 break 17734 } 17735 c := v.AuxInt 17736 x := v.Args[0] 17737 if !(oneBit(c)) { 17738 break 17739 } 17740 b.Kind = BlockARM64TBZ 17741 b.SetControl(x) 17742 b.Aux = ntz(c) 17743 return true 17744 } 17745 // match: (Z (MOVDconst [0]) yes no) 17746 // cond: 17747 // result: (First nil yes no) 17748 for { 17749 v := b.Control 17750 if v.Op != OpARM64MOVDconst { 17751 break 17752 } 17753 if v.AuxInt != 0 { 17754 break 17755 } 17756 b.Kind = BlockFirst 17757 b.SetControl(nil) 17758 b.Aux = nil 17759 return true 17760 } 17761 // match: (Z (MOVDconst [c]) yes no) 17762 // cond: c != 0 17763 // result: (First nil no yes) 17764 for { 17765 v := b.Control 17766 if v.Op != OpARM64MOVDconst { 17767 break 17768 } 17769 c := v.AuxInt 17770 if !(c != 0) { 17771 break 17772 } 17773 b.Kind = BlockFirst 17774 b.SetControl(nil) 17775 b.Aux = nil 17776 b.swapSuccessors() 17777 return true 17778 } 17779 case BlockARM64ZW: 17780 // match: (ZW (ANDconst [c] x) yes no) 17781 // cond: oneBit(int64(uint32(c))) 17782 // result: (TBZ {ntz(int64(uint32(c)))} x yes no) 17783 for { 17784 v := b.Control 17785 if v.Op != OpARM64ANDconst { 17786 break 17787 } 17788 c := v.AuxInt 17789 x := v.Args[0] 17790 if !(oneBit(int64(uint32(c)))) { 17791 break 17792 } 17793 b.Kind = BlockARM64TBZ 17794 b.SetControl(x) 17795 b.Aux = ntz(int64(uint32(c))) 17796 return true 17797 } 17798 // match: (ZW (MOVDconst [c]) yes no) 17799 // cond: int32(c) == 0 17800 // result: (First nil yes no) 17801 for { 17802 v := b.Control 17803 if v.Op != OpARM64MOVDconst { 17804 break 17805 } 17806 c := v.AuxInt 17807 if !(int32(c) == 0) { 17808 break 17809 } 17810 b.Kind = BlockFirst 17811 b.SetControl(nil) 17812 b.Aux = nil 17813 return true 17814 } 17815 // match: (ZW (MOVDconst [c]) yes no) 17816 // cond: int32(c) != 0 17817 // result: (First nil no yes) 17818 for { 17819 v := b.Control 17820 if v.Op != OpARM64MOVDconst { 17821 break 17822 } 17823 c := v.AuxInt 17824 if !(int32(c) != 0) { 17825 break 17826 } 17827 b.Kind = BlockFirst 17828 b.SetControl(nil) 17829 b.Aux = nil 17830 b.swapSuccessors() 17831 return true 17832 } 17833 } 17834 return false 17835 }